diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3b4e477 --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +*.*~ +*_flymake.py +*.pyc +_build +_static +_templates + diff --git a/COPYING b/COPYING new file mode 100644 index 0000000..94a9ed0 --- /dev/null +++ b/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..65c5ca8 --- /dev/null +++ b/LICENSE @@ -0,0 +1,165 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..e113904 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +include *.rst +include COPYING +include requirements.txt +include scripts/* diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..73e6bd4 --- /dev/null +++ b/README.rst @@ -0,0 +1,37 @@ +======== +Synopsis +======== + +This module helps with the processing and analysis of data from the `FAAM `_ aircraft. + + +========== +Motivation +========== + +The faampy repository provides python modules for working with data from the FAAM aircraft. Its main goals are to minimize code duplication and to increase the efficiency of the data analysis. Spend less time data wrangling; spend more time on analysis. + + +============ +Installation +============ + +The installation of the module is done in the usual way:: + + git clone https://github.com/ncasuk/faampy.git + python setup.py build + sudo python setup.py install + + +============ +Contributors +============ + +* Axel Wellpott (FAAM) + + +======= +License +======= + +faampy is licenced under GNU Lesser General Public License (LGPLv3). diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..cc65a00 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,225 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " epub3 to make an epub3" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + @echo " dummy to check syntax errors of document sources" + +.PHONY: clean +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: html +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: dirhtml +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +.PHONY: singlehtml +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +.PHONY: pickle +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +.PHONY: json +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +.PHONY: htmlhelp +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +.PHONY: qthelp +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/faampy.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/faampy.qhc" + +.PHONY: applehelp +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +.PHONY: devhelp +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/faampy" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/faampy" + @echo "# devhelp" + +.PHONY: epub +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +.PHONY: epub3 +epub3: + $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 + @echo + @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." + +.PHONY: latex +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +.PHONY: latexpdf +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: latexpdfja +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +.PHONY: text +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +.PHONY: man +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +.PHONY: texinfo +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +.PHONY: info +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +.PHONY: gettext +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +.PHONY: changes +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +.PHONY: linkcheck +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +.PHONY: doctest +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +.PHONY: coverage +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +.PHONY: xml +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +.PHONY: pseudoxml +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." + +.PHONY: dummy +dummy: + $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy + @echo + @echo "Build finished. Dummy builder generates no files." diff --git a/docs/cli_tools.rst b/docs/cli_tools.rst new file mode 100644 index 0000000..1415a27 --- /dev/null +++ b/docs/cli_tools.rst @@ -0,0 +1,91 @@ +================== +Command line tools +================== + +The faampy module provides a number of command line tools. All those commands are called via:: + + faampy SUBCOMMAND [OPTIONS] ARGUMENTS + +A list of available subcommands is shown by just typing "faampy" on the command line. + + +nimrod_to_nc +~~~~~~~~~~~~ +.. argparse:: + :module: faampy.data_io.nimrod_to_nc + :func: _argparser + :prog: faampy nimrod_to_nc + :nodefault: + +nc_to_gpx +~~~~~~~~~ +.. argparse:: + :module: faampy.mapping.nc_to_gpx + :func: _argparser + :prog: faampy nc_to_gpx + :nodefault: + +ge_ncas_airquality +~~~~~~~~~~~~~~~~~~ + +.. argparse:: + :module: faampy.mapping.ge_ncas_airquality + :func: _argparser + :prog: faampy ge_ncas_airquality + :nodefault: + +ge_nimrod_to_kmz +~~~~~~~~~~~~~~~~ +.. argparse:: + :module: faampy.mapping.ge_nimrod_to_kmz + :func: _argparser + :prog: faampy ge_nimrod_to_kmz + :nodefault: + +ge_photo_album +~~~~~~~~~~~~~~ +.. argparse:: + :module: faampy.mapping.ge_photo_album + :func: _argparser + :prog: faampy ge_photo_album + :nodefault: + +ge_ncvar_to_kml +~~~~~~~~~~~~~~~ +.. argparse:: + :module: faampy.mapping.ge_ncvar_to_kml + :func: _argparser + :prog: faampy ge_ncvar_to_kml + :nodefault: + +ge_was_to_kmz +~~~~~~~~~~~~~ +.. argparse:: + :module: faampy.mapping.ge_was_to_kmz + :func: _argparser + :prog: faampy ge_was_to_kmz + :nodefault: + +world_map +~~~~~~~~~ +.. argparse:: + :module: faampy.mapping.world_map + :func: _argparser + :prog: faampy world_map + :nodefault: + +sat_tracker +~~~~~~~~~~~ +.. argparse:: + :module: faampy.mapping.sat_tracker + :func: _argparser + :prog: faampy sat_tracker + :nodefault: + +plt_quicklooks +~~~~~~~~~~~~~~ +.. argparse:: + :module: faampy.plotting.quicklooks + :func: _argparser + :prog: faampy plt_quicklooks + :nodefault: diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..8dbedf9 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,375 @@ +# -*- coding: utf-8 -*- +# +# faampy documentation build configuration file, created by +# sphinx-quickstart on Sat Dec 3 20:57:34 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import mock +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + + + +MOCK_MODULES = ['numpy', 'scipy', 'matplotlib', 'matplotlib.pyplot', 'numpy.ma', 'matplotlib.ticker', + 'matplotlib.image', 'matplotlib.dates', 'matplotlib.cbook', 'PIL', 'PIL.Image', 'PIL.ExfiTags', 'ephem', + 'mpl_toolkits.basemap.pyproj', 'Basemap', 'json', 'urllib2', 'math', 'mpl_toolkits', 'mpl_toolkits.basemap', 'mapnik', + 'pandas', 'iris', 'netCDF4', 'osgeo', 'haversine', 'pyspatialite', 'pyspatialite.dbapi2', + 'osgeo.ogr', 'gdal', 'simplekml', 'rdp', 'seaborn', '_geoslib', 'proj'] + + +#for mod_name in MOCK_MODULES: +# sys.modules[mod_name] = mock.Mock() + +from mock import Mock as MagicMock + +class Mock(MagicMock): + @classmethod + def __getattr__(cls, name): + return MagicMock() + +sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) + + + + +import faampy + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.viewcode', +# 'sphinxcontrib.autoprogram', + 'sphinxarg.ext', +# 'sphinx.ext.imgmath', +# 'sphinxcontrib.programoutput', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +# +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'faampy' +copyright = u'2017, Axel Wellpott' +author = u'Axel Wellpott' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = faampy.__version__ +# The full version, including alpha/beta/rc tags. +release = faampy.__version__ + " (" + faampy.__status__ + ")" + + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# +# today = '' +# +# Else, today_fmt is used as the format for a strftime call. +# +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +html_theme_path = ["_themes", ] + +# The name for this set of Sphinx documents. +# " v documentation" by default. +# +# html_title = u'faampy v0.1.1' + +# A shorter title for the navigation bar. Default is the same as html_title. +# +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# +# html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# +# html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +# +# html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# +# html_additional_pages = {} + +# If false, no module index is generated. +# +# html_domain_indices = True + +# If false, no index is generated. +# +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' +# +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +# +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'faampydoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'faampy.tex', u'faampy Documentation', + u'Axel Wellpott', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# +# latex_use_parts = False + +# If true, show page references after internal links. +# +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# +# latex_appendices = [] + +# It false, will not define \strong, \code, itleref, \crossref ... but only +# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added +# packages. +# +# latex_keep_old_macro_names = True + +# If false, no module index is generated. +# +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'faampy', u'faampy Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +# +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'faampy', u'faampy Documentation', + author, 'faampy', 'Modules for handling FAAM data.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +# +# texinfo_appendices = [] + +# If false, no module index is generated. +# +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# +# texinfo_no_detailmenu = False diff --git a/docs/faam_data.rst b/docs/faam_data.rst new file mode 100644 index 0000000..fcac918 --- /dev/null +++ b/docs/faam_data.rst @@ -0,0 +1,17 @@ + +============ +FAAM_Dataset +============ + +The FAAM_Dataset class handles the core_faam*nc files and smoothes out the reading process of the data and ensures that older files are read in the same way as newer ones. The class copies the behaviour of netCDF4.Dataset class. + +A nifty method of the class is merge, which enables you to merge data from a separate file. The data type that can be merged is a numpy.recarray. The index for the procedure is the timestamp, of the FAAM_Dataset. Care is taken off gaps in the recarray. + +A convenient option is exporting the Dataset into a pandas DataFrame, which then gives you all the amazing features of pandas. Due to the fact that pandas can not deal with multidimensional arrays, only the first measurement within a row is used for the DataFrame. + + + +.. automodule:: faampy.core.faam_data + :members: FAAM_Dataset + + diff --git a/docs/full_list.rst b/docs/full_list.rst new file mode 100644 index 0000000..08769e5 --- /dev/null +++ b/docs/full_list.rst @@ -0,0 +1,15 @@ + +Full List +========= + + +faampy.utils +------------ + +.. automodule:: faampy.utils.file_info + :members: File_Info + +.. automodule:: faampy.utils.file_list + :members: File_List + + diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..45ca8af --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,29 @@ +.. faampy documentation master file, created by + sphinx-quickstart on Sat Dec 3 20:57:34 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to faampy's documentation! +================================== + +Contents: + +.. toctree:: + :maxdepth: 3 + :glob: + + intro + installation + faam_data + cli_tools + recipe/* + full_list + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 0000000..4cd06e0 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,27 @@ + +Installing faampy +================= + +Installation of faampy is done in the usual way using the setup script:: + + git clone https://github.com/ncasuk/faampy.git + python setup.py build + sudo python setup.py install + +So far the module has only been tested on linux machines and most of the code development has been done with python 2.7. However the idea is to make faampy python3 compatible and platform independent. + + +Example flight data, databases, ... +----------------------------------- + +Example data and databases of flight tracks are available for download. After installing the faampy module you can run:: + + faampy data_download ZIP_PASSWORD + +from the command line. This will download a zip file and copies its content to a 'faampy_data' directory in your $HOME directory. However, for the moment the zip file that you download is password protected. Please contact me if you think you need the data and I will give you the password. + + +Disclaimer +---------- + +faampy is in its early stages and has not been thoroughly tested. There will more modules been added in the near future. A backlog of moduls exists that have been written, but will need to be tidied up, before being added to the repository. diff --git a/docs/intro.rst b/docs/intro.rst new file mode 100644 index 0000000..afb2033 --- /dev/null +++ b/docs/intro.rst @@ -0,0 +1,5 @@ +========== +Motivation +========== + +The faampy repository contains a number of python modules and scripts that handle data from the `FAAM `_ aircraft. The goal of faampy is it to provide useful and stable tools that deals with many common tasks. It focuses on general utilities rather than very specific scientific problems. From experience many tasks (data merging, mapping, etc.) are common no matter which background the user has (Aerosol, Chemistry, Cloud Physics, ...). faampy is meant to be for the FAAM community **by** the FAAM community. Therefore users are encouraged to report/fix bugs and send in suggestions for improvements. diff --git a/docs/recipe/img/b991_flight_track.png b/docs/recipe/img/b991_flight_track.png new file mode 100644 index 0000000..74bc39b Binary files /dev/null and b/docs/recipe/img/b991_flight_track.png differ diff --git a/docs/recipe/recipes_01_core.rst b/docs/recipe/recipes_01_core.rst new file mode 100644 index 0000000..85277da --- /dev/null +++ b/docs/recipe/recipes_01_core.rst @@ -0,0 +1,17 @@ + +Recipe - FAAM core +================== + +To come. + + +Batch processing +---------------- + +To come. + + +Data Mining +----------- + +To come. \ No newline at end of file diff --git a/docs/recipe/recipes_02_post_flight_analysis.rst b/docs/recipe/recipes_02_post_flight_analysis.rst new file mode 100644 index 0000000..2c9f5b6 --- /dev/null +++ b/docs/recipe/recipes_02_post_flight_analysis.rst @@ -0,0 +1,185 @@ + + +Recipe - Post Flight Analysis +============================= + +This is an example for what the post flight analysis for typical chemistry FAAM flight could look like. + +The data we are using are from the "Into the Blue" flight b991 on the 24th October 2016. This flight took us up and down the west coast between Morecambe and Wales and "plumes" were sampled, that originated from the Manchester/Liverpool area. + +.. image:: ./img/b991_flight_track.png + +.. warning:: All the provided chemistry data are preliminary and uncalibrated. Therefore the data are not suitable for scientific analysis. + + +FAAM Dataset +------------ + +At the start we need to import a number of modules and define a few variables that we need in later steps. + +.. code-block:: python + + import datetime + import os + import numpy as np + import pandas as pd + import faampy + from faampy.core.faam_data import FAAM_Dataset + + year, month, day = 2016, 10, 24 + FID = 'b991' + +Reading the data from the NOx instrument into a pandas.Dataframe. + +.. code-block:: python + + # define the input data file + nox_file = os.path.join(faampy.FAAMPY_DATA_PATH, + 'b991', + 'chem_data', + 'NOx_161024_090507') + + # defining the function that calculates the timestamp + nox_dateparse = lambda x: pd.datetime(year, month, day) + \ + datetime.timedelta(seconds=int(float(float(x) % 1)*86400.)) + + df_nox = pd.read_csv(nox_file, parse_dates=[0], date_parser=nox_dateparse) + df_nox = df_nox.set_index('TheTime') # Setting index + t = df_nox.index.values + df_nox['timestamp'] = t.astype('datetime64[s]') # Converting index data type + df_nox = df_nox[['timestamp', 'no_conc', 'no2_conc', 'nox_conc']] + df_nox[df_nox < 0] = np.nan + + +Now reading in the FGGA data. + +.. code-block:: python + + fgga_file = os.path.join(faampy.FAAMPY_DATA_PATH, + 'b991', + 'chem_data', + 'FGGA_20161024_092223_B991.txt') + + fgga_dateparse = lambda x: pd.datetime.utcfromtimestamp(int(x)) + fgga_names = ['identifier', 'packet_length', 'timestamp', 'ptp_sync', + 'MFM', 'flight_num', 'CPU_Load', 'USB_disk_space', 'ch4', 'co2', + 'h2o', 'press_torr', 'temp_c', 'fit_flag', 'rda_usec', + 'rdb_usec', 'ch4_ppb', 'co2_ppm', 'MFC_1_absolute_pressure', + 'MFC_1_temperature', 'MFC_1volumetic_flow', 'MFC_1mass_flow', + 'MFC_1set_point', 'V1', 'V2', 'V3', 'V4', 'restart_FGGA', + 'FGGA_Pump', 'CAL_MFC_1Set_Value'] + df_fgga = pd.read_csv(fgga_file, + names=fgga_names, + delimiter=',', + parse_dates=[2], + date_parser=fgga_dateparse, + skiprows=100) # To be sure to skip the header + + # Using the Valve states for flagging out calibration periods + df_fgga.loc[df_fgga['V1'] != 0, 'ch4_ppb'] = np.nan + df_fgga.loc[df_fgga['V2'] != 0, 'co2_ppm'] = np.nan + df_fgga.loc[df_fgga['V2'] != 0, 'ch4_ppb'] = np.nan + + +.. code-block:: python + + # Reading in the FAAM core data file using the FAAM_Dataset object from the + # faampy module + + core_file = os.path.join(faampy.FAAMPY_DATA_PATH, + 'b991', + 'core', + 'core_faam_20161024_v004_r0_b991.nc') + ds = FAAM_Dataset(core_file) + + + # merge chemistry data with the core data set + ds.merge(df_nox.to_records(convert_datetime64=False), index='timestamp', delay=3) + ds.merge(df_fgga.to_records(convert_datetime64=False), index='timestamp', delay=4) + + # define variable list, that we like to extract + var_list = ['Time', 'LAT_GIN', 'LON_GIN', 'ALT_GIN', 'HGT_RADR', + 'CO_AERO', 'U_C', 'V_C', 'W_C', 'U_NOTURB', 'V_NOTURB', + 'WOW_IND', 'TAT_DI_R', 'TDEW_GE', 'PS_RVSM', 'ch4_ppb', 'co2_ppm', + 'no_conc', 'no2_conc', 'nox_conc', 'TSC_BLUU', 'TSC_GRNU', + 'TSC_REDU', 'BSC_BLUU', 'BSC_GRNU', 'BSC_REDU', 'IAS_RVSM'] + + # write the netcdf out to you HOME directory + outfile = os.path.join(os.environ['HOME'], '%s_merged.nc' % (FID.lower())) + ds.write(outfile, + clobber=True, + v_name_list=var_list) + + +Google-Earth overlays +--------------------- + +The commands in this section are run from the konsole. To keep the filenames short we move into the directory where the data for b991 are located:: + + cd ~/faampy_data/b991 + + +We create a gpx (`GPS Exchange Format `_) file:: + + faampy nc_to_gpx core/core_faam_20161024_v004_r0_b991.nc . + + +We use the gpx data file to geotag a few photographs that were taking during the flight. The gpscorrelate utility can be installed from the linux distribution package manager:: + + gpscorrelate --gps b991_20161024.gpx --photooffset -3600 photos/*jpg + + +Now that the photos are geotagged it is possible to creaet a photo album:: + + faampy ge_photo_album ./photos ./ge_photo_album_20161024_b991.kmz + + +WAS bottle overlay:: + + faampy ge_was_to_kmz ./chem_data/B991.WAS ./core/core_faam_20161024_v004_r0_b991_1hz.nc . + + +Make profiles for some of the chemicals in the created merged file:: + + cd ~/faampy_data/b991 + faampy ge_ncvar_to_kml --offset -100 --scale_factor 500 \ + --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt CO_AERO b991_merged.nc . + + faampy ge_ncvar_to_kml --offset -435 --scale_factor 1500 \ + --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt co2_ppm b991_merged.nc . + + faampy ge_ncvar_to_kml --offset -2115 --scale_factor 500 \ + --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt ch4_ppb b991_merged.nc . + + faampy ge_ncvar_to_kml --scale_factor 0.4 \ + --fltsumm ./core/flight-sum_faam_20161024_r0_b991.txt nox_conc b991_merged.nc . + + + +Quicklook Figures +----------------- + +faampy provides a command line tool to create quicklook figures from using the information from the flight summary. According to the event name (e.g. Profile, Run, ...) either a time series or a profile plot is created. Maps are created for every event and skewt plots for every profile. Again, to keep filenames on the command line brief we move into the b991 directory:: + + cd ~/faampy_data/b991 + mkdir quicklooks + +Create a quicklooks configuration file (quicklooks.cfg) which defines the figure layout and which variables should be plotted:: + + touch quicklooks.cfg + +Add the following text to the quicklooks.cfg file using a text editor:: + + [['TSC_BLUU', 'TSC_GRNU' , 'TSC_REDU'], ['BSC_BLUU', 'BSC_GRNU', 'BSC_REDU']] + [['CO_AERO'],] + [['ch4_ppb'], ['co2_ppm']] + [['no_conc'], ['no2_conc'], ['nox_conc']] + +Every line defines one figure and the number of suplots. For example the first line ([['TSC_BLUU', 'TSC_GRNU' , 'TSC_REDU'], ['BSC_BLUU', 'BSC_GRNU', 'BSC_REDU']]) will create two subplots. In the first the total scatter values from the Nephelometer will be plotted and in the second subplot the backscatter values will be plotted. + +We will use the merged data file, which we produced in the previous section. This file contains the NOx and FGGA data. The command for creating the quicklooks is:: + + faampy plt_quicklooks --config_file quicklooks.cfg b991_merged.nc \ + ./core/flight-sum_faam_20161024_r0_b991.txt ./quicklooks/ + +If the above commmand was successful the figures should have been created in the quicklooks directory. diff --git a/docs/recipe/recipes_03_spatial.rst b/docs/recipe/recipes_03_spatial.rst new file mode 100644 index 0000000..f3da146 --- /dev/null +++ b/docs/recipe/recipes_03_spatial.rst @@ -0,0 +1,270 @@ + + +Recipe - Spatial Analysis +========================= + +FAAM core data are stored as netCDF and come with *Time* dimension. However, since the FAAM aircraft is a moving plaform location is obviously also an important dimension and spatial queries of the FAAM data can add useful functionality. To provide this feature the FAAM flight tracks are inserted as *linestring* into a database with spatial capabilities. Such a database allows queries like: + + * Give me all the flights that have crossed Scotland + * On which flights did we fly closer than 10nm miles pass the Queen's palace + * What length was the flight track + + +DB description +-------------- + +The spatialite DB is stored in one single file, which is very convenient and does not require the setup of an advanced database, which can come with a lots of obstacles. In direct comparison spatialite is less powerful but has all the features that we need. For more information see: + + | https://www.gaia-gis.it/fossil/libspatialite/index + | http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.4.0.html + | https://www.gaia-gis.it/gaia-sins/spatialite-tutorial-2.3.1.html + | http://postgis.net/docs/ + +The example database has currently only one table and three colums: + + | fid - Flight id + | date - Start date of flight + | the_geom - Holds the linestring geometry + + +For the below examples python is our tool of choice, which has all the necessary modules ot interact with the db. The code is from the *faampy_spatial_examples.py* file. + + +Imports and DB connection +------------------------- + +For the examples below to work we need to import some common modules and connect to the database. + +.. code-block:: python + + import json + import numpy as np + import os + import osgeo.ogr + import simplekml + + try: + from faampy.core.faam_spatial import FAAM_Spatial_DB + except: + import faam_spatial + + LATEST_FID = 'b991' + + DB_FILE = os.path.join(faampy.FAAMPY_DATA_PATH, 'db', 'faam_spatial_db.sqlite') + + # connecting to the database + db = FAAM_Spatial_DB(DB_FILE) + + +Example: Get some db info +------------------------- + +Just get some basic information from the database. + +.. code-block:: python + + print('Some DB info') + + # Count how many records are in the DB + sql="""SELECT COUNT(*) FROM flight_tracks;""" + cur = db.conn.cursor() + cur.execute(sql) + cnt = cur.fetchone()[0] + + print('Number of flights in the DB: %i' % (cnt,)) + + # Get all fids that are in the DB + sql = """SELECT fid FROM flight_tracks ORDER BY fid;""" + cur = db.conn.cursor() + cur.execute(sql) + fids = [i[0] for i in cur.fetchall()] + # By today FAAM has flown a total of 991 missions + all_fids = set(['b%0.3i' % i for i in range(int(LATEST_FID[1:]))]) + missing_fids = sorted(all_fids.difference(fids)) + + print('Number Missing flights: %i' % (len(missing_fids),)) + print('Missing flights ids: %s' % (','.join(missing_fids),)) + + +Example: Find flights that go over the North Sea +------------------------------------------------ + +The goal is to find all FAAM flights that go over the North Sea. To do this we need +the boundaries for the North Sea. A shapefile with the Polygon can be downloaded from +the web: +http://www.marineregions.org/gazetteer.php?p=details&id=2350 + +To read it in we use the osgeo module. + +.. code-block:: python + + sf = osgeo.ogr.Open('north_sea.shp') + layer = sf.GetLayer() + ns = layer.GetFeature(0) # there is only one feature in the layer + geometry = ns.GetGeometryRef() + ns_wkt = geometry.ExportToWkt() # Getting Well-known text representation + +Give me all flights where the track intersects the North Sea Polygon. Now that +we have the Geometry in wkt format we can use it to create a sql query that we +can send to the spatialite DB + +.. code-block:: python + + sql = "SELECT FT.fid FROM flight_tracks FT where " + sql += "ST_Intersects( GeomFromText('%s'), FT.the_geom) " % (ns_wkt,) + sql += "ORDER BY FT.fid;" + cur = db.conn.cursor() # connect + cur.execute(sql) # execute + fids = [i[0] for i in cur.fetchall()] # flatten the result + + print('Number of flights that intersect the North Sea: %i' % (len(fids),)) + print('List flights that intersect the North Sea: %s\n' % (','.join(fids),)) + +Now that we have all the fids that intersected the North Sea, we want +to look at them using google-earth. Spatialite has the capability of +formatting the geometries into kml format (askml). + +.. code-block:: python + + sql = "SELECT askml(Simplify(FT.the_geom, 0.01)) FROM flight_tracks FT WHERE" + sql += " FT.fid IN (%s)" % (str(','.join(["'%s'" % fid for fid in fids]))) + cur.execute(sql) + flight_tracks_kml = cur.fetchall() + + #Create a new kml file + kml = simplekml.Kml() + folder = kml.newfolder(name='Spatialite result') + lines = kml.kml().split('\n') + lines.insert(-4, '') + lines.insert(-4, 'North Sea') + lines.insert(-4, geometry.ExportToKML()) + lines.insert(-4, '') + for i, flight_track in enumerate(flight_tracks_kml): + lines.insert(-4, '') + lines.insert(-4, '%s' % (fids[i],)) + lines.insert(-4, flight_track[0]) + lines.insert(-4, '') + + ofilename = os.path.join(os.environ['HOME'], 'fids_crossing_ns.kml') + print 'Writing North Sea Polygon and the flight track linestrings as kml' + print 'kml written to: %s' % (ofilename,) + ofile = open(ofilename, 'w') + ofile.write('\n'.join(lines)) + ofile.close() + + +Example: Get the length of a flight track +------------------------------------------ + +Get the length of a flight track. The database can do this for us +using the *GreatCircleLength* function. + +.. code-block:: python + + fid = 'b659' + sql = "SELECT GreatCircleLength(the_geom) from " + sql += "flight_tracks where fid = '%s';" % (fid, ) + cur = db.conn.cursor() # connect + cur.execute(sql) # execute + length = cur.fetchone()[0]/1000. + print('Flight %s was %.2f km long.' % (fid, length)) + + +Example: Get all flights when the ARA climbed above a certain altitude +---------------------------------------------------------------------- + +We are trying to find all the flights where we climbed above a certain gps +altitude. For this we loop over all individual flight tracks. The steps are: +1. Get flight track from DB in json format +2. Use the 'coordinates' key from the json and extract the z-coordinate +3. Check if the maximum z-value is greater than the MAX_ALT and store +the fid in the result list if that's the case + + +.. code-block:: python + + MAX_ALT = 11000 + print('TASK: Finding flights exceeding %i m altitude' % (int(MAX_ALT,))) + sql = """SELECT fid, AsGeoJSON(the_geom) from flight_tracks;""" + cur = db.conn.cursor() # connect + cur.execute(sql) # execute + result = cur.fetchall() + fid_max_alt_list = [] + for r in result: + fid = r[0] + # get the coordinates from the geojson + coords = np.array(json.loads(r[1])['coordinates']) + # the alt coordinate is the 3rd column + alt_max = np.nanmax(coords[:,2]) + fid_max_alt_list.append((fid, alt_max)) + + fids = sorted([i[0] for i in fid_max_alt_list if i[1] > MAX_ALT]) + print('N fids with gps altitude > %i: %i' % (int(MAX_ALT), len(fids),)) + print('List of flight ids: %s\n' % (','.join(fids),)) + + +Example: Get all flights that took off from Cranfield +----------------------------------------------------------------- +Some code is needed to calculate the distance between two WGS84 coordinates + +.. code-block:: python + + # http://stackoverflow.com/questions/19412462/getting-distance-between-two-points-based-on-latitude-longitude-python + def calc_distance(lat1, lon1, lat2, lon2): + from math import sin, cos, sqrt, atan2, radians + # approximate radius of earth in m + R = 6373000.0 + lat1 = radians(lat1) + lon1 = radians(lon1) + lat2 = radians(lat2) + lon2 = radians(lon2) + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2 + c = 2 * atan2(sqrt(a), sqrt(1 - a)) + + distance = R * c + return distance + +This is the code to get all the flights from the database. + +.. code-block:: python + + print('TASK: Finding flights that took off in Cranfield in every year') + Cranfield_Coords = (52.072222, -0.616667) # Cranfield Airport coordinates + # in m; the distance is rather large to cover flights + # when the GIN didn't work straight away + MAX_DISTANCE = 15000 + sql = """SELECT fid, date, AsGeoJSON(the_geom) from flight_tracks order by date;""" + cur = db.conn.cursor() # connect + cur.execute(sql) # execute + result = cur.fetchall() + # get a list of all years for which we do the analysis + years = list(set([r[1].split('-')[0] for r in result])) + dist_dict = {} + for y in years: + dist_dict[y] = [] + + for r in result: + fid = r[0] + # get the coordinates from the geojson + coords = np.array(json.loads(r[2])['coordinates']) + # extract year string from sql result + year = r[1].split('-')[0] + lat1, lon1 = Cranfield_Coords + # pull coordinates form the very first array + lon2 = coords[0, 0] + lat2 = coords[0, 1] + dist = calc_distance(lat1, lon1, lat2, lon2) + if dist < MAX_DISTANCE: + dist_dict[year].append((fid, dist)) + + # print summary + total = 0 + # print the number for every year + for year in sorted(dist_dict.keys()): + n = len(dist_dict[year]) + total += n + print('%7s: %3s' % (year, n)) + print('%7s: %3s' % ('total', total)) + \ No newline at end of file diff --git a/docs/recipe/recipes_04_cis.rst b/docs/recipe/recipes_04_cis.rst new file mode 100644 index 0000000..400f595 --- /dev/null +++ b/docs/recipe/recipes_04_cis.rst @@ -0,0 +1,122 @@ + +Recipe - FAAM meets cis +======================= + + +CIS Installation +---------------- + +Information about installing anaconda2 and cis: + | anaconda2: https://www.continuum.io/downloads + | cis: http://cistools.net/get-started#installation + +Please note that the cis instructions say that you should install python 2.7 and **not** 3.x. If you are new to python you might be irritated why you wouldn't install the very latest version. In brief: The two versions are not fully compatible and many people decided to stick with 2.7. + +FAAM netCDF preparation +----------------------- + +The FAAM core data do not work with the cis tool straight away. The netCDF need a little tweaking to make them fully CF compliant, so that cis interpretets the data correctly. The global attributes "Conventions" and "Coordinates" need to be added. A small bash script is provide (faam_edit.sh) that does the changes using nc utilities. The example netCDF has already been edited and works with cis. + +The example data (core_faam_20161024_v004_r0_b991_1hz_editted.nc) are for flight b991 (24-10-2016), when the aircraft was flying downwind of Manchester and Liverpool measuring the emissions from the two cities ahead of the *Into the Blue* event. + + +Starting cis +------------ + +The next thing to do is to start the cis environment that we installed earlier. Go to the bin directory of your conda installation:: + + cd ~/anaconda2/bin/ + +and activate the environment:: + + source activate cis_env + +From now on the shell should have the string '(cis_env)' in front indicating that we are working in the cis envrionment. + +In a next step we need to tell cis where to look for the FAAM_NetCDF plugin which is needed to read in the FAAM core data. The envronment variable CIS_PLUGIN_HOME has to be set to the directory that contains the FAAM_NetCDF.py file. For me the command looks like this:: + + export CIS_PLUGIN_HOME=/home/axel/cis_plugin_development + +Go to the directory where the edited faam core netcdf is stored to keep the file path short in the cis commands:: + + cd ~/cis_plugin_development/data + + +Working with cis and FAAM data +------------------------------ + +Below are several one line examples that show the functionality of the cis tools. Most of the examples have been taken and adapted from the cis online documentation. + +.. note:: + All the commands below go on **one** line in your shell. The page is just too small to get it all printed on one line. + +.. note:: + If you get an Error message similar to: + + ERROR - 'DimCoord' object has no attribute 'data' - check cis.log for details + + cis can not find the FAAM_NetCDF plugin and it is most likely that the CIS_PLUGIN_HOME variable was not defined correctly. + +Get information about the netCDF:: + + cis info TAT_ND_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc + +Create scatter plot to compare the deiced (TAT_DI_R) and non-deiced (TAT_ND_R) temperature measurements on the ARA:: + + cis plot TAT_ND_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc \ + TAT_DI_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc \ + --type comparativescatter --grid \ + --title "True air temperature comparison" \ + --xlabel "non deiced sensor (K)" --ylabel "deiced sensor (K)" + +And print some statistics about the TAT_DI_R variable:: + + cis stats TAT_ND_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc \ + TAT_DI_R:core_faam_20161024_v004_r0_b991_1hz_editted.nc + + +Make a coloured line plot, showing the CO concentration on a map:: + + cis plot CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc \ + --xaxis longitude --yaxis latitude --xmin -5 --xmax -2 --ymin 52.2 --ymax 55 + + +Calculate mean,min,max for 1min time intervals for the CO_AERO data for the time interval 11:45 to 14:45. The results are written to a netCDF:: + + cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=mean \ + t=[2016-10-24T11:45,2016-10-24T14:45,PT1M] -o b991_co_aero_1min_mean.nc + + cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=max \ + t=[2016-10-24T11:45,2016-10-24T14:45,PT1M] -o b991_co_aero_1min_max.nc + + cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=min \ + t=[2016-10-24T11:45,2016-10-24T14:45,PT1M] -o b991_co_aero_1min_min.nc + +Plot the three lines in one figure:: + + cis plot CO_AERO:b991_co_aero_1min_max.nc \ + CO_AERO:b991_co_aero_1min_mean.nc \ + CO_AERO:b991_co_aero_1min_min.nc + +Reproducing an aggregation example from the documentation: + http://cis.readthedocs.io/en/stable/aggregation.html#aircraft-track + +The results from the aggregation will be saved to a netCDF (option -o). The following line aggregates over 5 minutes and over altiude in 200 meter steps in the range of 0 to 1000m:: + + cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc \ + t=[2016-10-24T11:45,2016-10-24T14:45,PT5M],z=[0,1000,200] \ + -o b991_co_aero_alt_time.nc + +Plot a curtain using the netCDF that we just created:: + + cis plot CO_AERO:b991_co_aero_alt_time.nc --xaxis time --yaxis altitude + + +Make a grid plot from the mean, where each grid cell is 0.2 in size. The results are written to a netCDF:: + + cis aggregate CO_AERO:core_faam_20161024_v004_r0_b991_1hz_editted.nc:kernel=mean \ + x=[-5,0,0.2],y=[52,55,0.2] -o b991_co_aero_grid_mean.nc + +Now plot the grid on a map using the netcdf that we just created:: + + cis plot CO_AERO:b991_co_aero_grid_mean.nc diff --git a/examples/faampy_spatial_examples.py b/examples/faampy_spatial_examples.py new file mode 100644 index 0000000..8da7377 --- /dev/null +++ b/examples/faampy_spatial_examples.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- + +""" +This scripts contains examples that work with an example spatialite DB, which +stores (almost) all FAAM flight tracks. An entry in this database can be +accompanied by a geometry (e.g. point, line, polygon, etc.) and it is possible +to make spatial queries, that a normal database is not able to perform. For +example distance measurements are possible. + +The spatialite db is one single file, which is very convenient and does +not require the setup of an advanced database, which can come with a lots of +obstacles. In direct comparison spatialite is less powerful but has all the +features that we need. For more information see: + + https://www.gaia-gis.it/fossil/libspatialite/index + + http://www.gaia-gis.it/gaia-sins/spatialite-sql-4.4.0.html + + https://www.gaia-gis.it/gaia-sins/spatialite-tutorial-2.3.1.html + + http://postgis.net/docs/ + +""" + + +import json +import numpy as np +import os +import osgeo.ogr +import simplekml + +from faam_spatial import FAAM_Spatial_DB + +LATEST_FID='b991' + +DB_FILE = './faam_spatial_db.sqlite' + +db = FAAM_Spatial_DB(DB_FILE) + +#============================================================================= + +print '\n'*3 +print '=' * 40 +print '\n'*3 + +print 'Some DB info' + +# Count how many records are in the DB +sql="""SELECT COUNT(*) FROM flight_tracks;""" +cur = db.conn.cursor() +cur.execute(sql) +cnt = cur.fetchone()[0] + +print 'Number of flights in the DB: %i' % (cnt,) +print '' + +sql="""SELECT fid FROM flight_tracks ORDER BY fid;""" +cur = db.conn.cursor() +cur.execute(sql) +fids = [i[0] for i in cur.fetchall()] +#By today FAAM has flown a total of 991 missions +all_fids = set(['b%0.3i' % i for i in range(int(LATEST_FID[1:]))]) +missing_fids = sorted(all_fids.difference(fids)) + +print 'Number Missing flights: %i' % (len(missing_fids),) +print 'Missing flights ids: %s' % (','.join(missing_fids),) + + +#============================================================================= + + +print '\n'*3 +print '=' * 40 +print '\n'*3 +print 'TASK: Finding all flights that go over the North Sea' +print '\n' +# The shape (Polygon from the North Sea was downloaded from the web +# http://www.marineregions.org/gazetteer.php?p=details&id=2350 +print 'Reading in the shape file for the North Sea' +sf = osgeo.ogr.Open('north_sea.shp') +layer = sf.GetLayer() +ns = layer.GetFeature(0) # there is only one feature in the layer +geometry = ns.GetGeometryRef() +ns_wkt = geometry.ExportToWkt() # Getting a Well-known text representation + +print 'Give me all flights where the track intersects the North Sea Polygon' + +# Now that we have the Geometry in a wkt format we can use +# that to create a sql query that we can send to the spatialite DB +sql = "SELECT FT.fid FROM flight_tracks FT where " +sql += "ST_Intersects( GeomFromText('%s'), FT.the_geom) " % (ns_wkt,) +sql += "ORDER BY FT.fid;" +cur = db.conn.cursor() # connect +cur.execute(sql) # execute +fids = [i[0] for i in cur.fetchall()] # flatten the result + +print '' +print 'Number of flights that intersect the North Sea: %i' % (len(fids),) +print '' +print 'List flights that intersect the North Sea: %s\n' % (','.join(fids),) + + +# Now that we have all the fids that intersected the North Sea, we want +# to look at them using google-earth. Spatialite has the capability of +# formatting the geometries into a kml string (askml) + +sql = "SELECT askml(Simplify(FT.the_geom, 0.01)) FROM flight_tracks FT WHERE" +sql += " FT.fid IN (%s)" % (str(','.join(["'%s'" % fid for fid in fids]))) +cur.execute(sql) +flight_tracks_kml = cur.fetchall() + + +#Create a new kml file +kml=simplekml.Kml() +folder=kml.newfolder(name='Spatialite result') +lines=kml.kml().split('\n') +lines.insert(-4, '') +lines.insert(-4, 'North Sea') +lines.insert(-4, geometry.ExportToKML()) +lines.insert(-4, '') +for i, flight_track in enumerate(flight_tracks_kml): + lines.insert(-4, '') + lines.insert(-4, '%s' % (fids[i],)) + lines.insert(-4, flight_track[0]) + lines.insert(-4, '') + + +ofilename = os.path.join(os.environ['HOME'], 'fids_crossing_ns.kml') +print 'Writing North Sea Polygon and the flight track linestrings as kml' +print 'kml written to: %s' % (ofilename,) +ofile = open(ofilename, 'w') +ofile.write('\n'.join(lines)) +ofile.close() + + +#============================================================================= + + +print '\n'*3 +print '=' * 40 +print '\n'*3 +# Find the length of flight track for a specific flight +fid = 'b659' +print 'TASK: Give me the length of %s' % (fid,) +print '\n' +sql = "SELECT GreatCircleLength(the_geom) from " +sql += "flight_tracks where fid = '%s';" % (fid, ) +cur = db.conn.cursor() # connect +cur.execute(sql) # execute +length = cur.fetchone()[0]/1000. +print 'Flight %s was %.2f km long.' % (fid, length) + + +#============================================================================= + + +print '\n'*3 +print '=' * 40 +print '\n'*3 +# Print give me all flights when we climbed above 11000m +# There does not seem to be a way to do this directly in spatialite, so we +# do some simple data crunching in python +# +# To do this we need to get the xyz coordinates for each flight first and check +# those. I did not find a way to query the linestring directly +# Spatialite can return the geometry in json format which can then easily +# converted into a dictionary with 'coordinates' being one of the keys +MAX_HEIGHT = 11000 +print 'TASK: Finding flights exceeding %i m altitude' % (int(MAX_HEIGHT,)) +sql = """SELECT fid, AsGeoJSON(the_geom) from flight_tracks;""" +cur = db.conn.cursor() # connect +cur.execute(sql) # execute +result = cur.fetchall() +fid_max_alt_list = [] +for r in result: + fid = r[0] + # get the coordinates from the geojson + coords = np.array(json.loads(r[1])['coordinates']) + # the alt coordinate is the 3rd column + alt_max = np.nanmax(coords[:,2]) + fid_max_alt_list.append((fid, alt_max)) + +fids = sorted([i[0] for i in fid_max_alt_list if i[1] > MAX_HEIGHT]) +print 'N fids with gps height > %i: %i' % (int(MAX_HEIGHT), len(fids),) +print '' +print 'List of flight ids: %s\n' % (','.join(fids),) + + +#============================================================================= + + diff --git a/faampy/_3rdparty/__init__.py b/faampy/_3rdparty/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/faampy/_3rdparty/rdp/__init__.py b/faampy/_3rdparty/rdp/__init__.py new file mode 100644 index 0000000..94d5d1d --- /dev/null +++ b/faampy/_3rdparty/rdp/__init__.py @@ -0,0 +1,182 @@ +""" +rdp +~~~ + +Python implementation of the Ramer-Douglas-Peucker algorithm. + +:copyright: 2014-2016 Fabian Hirschmann +:license: MIT, see LICENSE.txt for more details. + +""" +from math import sqrt +from functools import partial +import numpy as np +import sys + +if sys.version_info[0] >= 3: + xrange = range + + +def pldist(point, start, end): + """ + Calculates the distance from ``point`` to the line given + by the points ``start`` and ``end``. + + :param point: a point + :type point: numpy array + :param start: a point of the line + :type start: numpy array + :param end: another point of the line + :type end: numpy array + """ + if np.all(np.equal(start, end)): + return np.linalg.norm(point, start) + + return np.divide( + np.abs(np.linalg.norm(np.cross(end - start, start - point))), + np.linalg.norm(end - start)) + + +def rdp_rec(M, epsilon, dist=pldist): + """ + Simplifies a given array of points. + + Recursive version. + + :param M: an array + :type M: numpy array + :param epsilon: epsilon in the rdp algorithm + :type epsilon: float + :param dist: distance function + :type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pldist` + """ + dmax = 0.0 + index = -1 + + for i in xrange(1, M.shape[0]): + d = dist(M[i], M[0], M[-1]) + + if d > dmax: + index = i + dmax = d + + if dmax > epsilon: + r1 = rdp_rec(M[:index + 1], epsilon, dist) + r2 = rdp_rec(M[index:], epsilon, dist) + + return np.vstack((r1[:-1], r2)) + else: + return np.vstack((M[0], M[-1])) + + +def _rdp_iter(M, start_index, last_index, epsilon, dist=pldist): + stk = [] + stk.append([start_index, last_index]) + global_start_index = start_index + indices = np.ones(last_index - start_index + 1, dtype=bool) + + while stk: + start_index, last_index = stk.pop() + + dmax = 0.0 + index = start_index + + for i in xrange(index + 1, last_index): + if indices[i - global_start_index]: + d = dist(M[i], M[start_index], M[last_index]) + if d > dmax: + index = i + dmax = d + + if dmax > epsilon: + stk.append([start_index, index]) + stk.append([index, last_index]) + else: + for i in xrange(start_index + 1, last_index): + indices[i - global_start_index] = False + + return indices + + +def rdp_iter(M, epsilon, dist=pldist, return_mask=False): + """ + Simplifies a given array of points. + + Iterative version. + + :param M: an array + :type M: numpy array + :param epsilon: epsilon in the rdp algorithm + :type epsilon: float + :param dist: distance function + :type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pldist` + :param return_mask: return the mask of points to keep instead + :type return_mask: bool + """ + mask = _rdp_iter(M, 0, len(M) - 1, epsilon, dist) + + if return_mask: + return mask + + return M[mask] + + +def rdp(M, epsilon=0, dist=pldist, algo="iter", return_mask=False): + """ + Simplifies a given array of points using the Ramer-Douglas-Peucker + algorithm. + + Example: + + >>> from rdp import rdp + >>> rdp([[1, 1], [2, 2], [3, 3], [4, 4]]) + [[1, 1], [4, 4]] + + This is a convenience wrapper around both :func:`rdp.rdp_iter` + and :func:`rdp.rdp_rec` that detects if the input is a numpy array + in order to adapt the output accordingly. This means that + when it is called using a Python list as argument, a Python + list is returned, and in case of an invocation using a numpy + array, a NumPy array is returned. + + The parameter ``return_mask=True`` can be used in conjunction + with ``algo="iter"`` to return only the mask of points to keep. Example: + + >>> from rdp import rdp + >>> import numpy as np + >>> arr = np.array([1, 1, 2, 2, 3, 3, 4, 4]).reshape(4, 2) + >>> arr + array([[1, 1], + [2, 2], + [3, 3], + [4, 4]]) + >>> mask = rdp(arr, algo="iter", return_mask=True) + >>> mask + array([ True, False, False, True], dtype=bool) + >>> arr[mask] + array([[1, 1], + [4, 4]]) + + :param M: a series of points + :type M: numpy array with shape ``(n,d)`` where ``n`` is the number of points and ``d`` their dimension + :param epsilon: epsilon in the rdp algorithm + :type epsilon: float + :param dist: distance function + :type dist: function with signature ``f(point, start, end)`` -- see :func:`rdp.pldist` + :param algo: either ``iter`` for an iterative algorithm or ``rec`` for a recursive algorithm + :type algo: string + :param return_mask: return mask instead of simplified array + :type return_mask: bool + """ + + if algo == "iter": + algo = partial(rdp_iter, return_mask=return_mask) + elif algo == "rec": + if return_mask: + raise NotImplementedError("return_mask=True not supported with algo=\"rec\"") + algo = rdp_rec + + if "numpy" in str(type(M)): + return algo(M, epsilon, dist) + + return algo(np.array(M), epsilon, dist).tolist() diff --git a/faampy/_3rdparty/tephi/__init__.py b/faampy/_3rdparty/tephi/__init__.py new file mode 100644 index 0000000..700ca9c --- /dev/null +++ b/faampy/_3rdparty/tephi/__init__.py @@ -0,0 +1,760 @@ +# (C) British Crown Copyright 2014, Met Office +# +# This file is part of tephi. +# +# Tephi is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Tephi is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with tephi. If not, see . +""" +The tephi module provides tephigram plotting of pressure, temperature and wind +barb data. + +.. warning:: + This is a beta release module and is liable to change. + +""" + +from collections import Iterable, namedtuple +from functools import partial +from matplotlib.font_manager import FontProperties +import matplotlib.pyplot as plt +from mpl_toolkits.axisartist.grid_helper_curvelinear import GridHelperCurveLinear +from mpl_toolkits.axisartist import Subplot +import numbers +import numpy as np +import os.path + +import isopleths +import transforms + + +__version__ = '0.1.0' + + +# +# Miscellaneous constants. +# +DEFAULT_WIDTH = 700 # in pixels + +ISOBAR_SPEC = [(25, .03), (50, .10), (100, .25), (200, 1.5)] +ISOBAR_LINE = {'color':'blue', 'linewidth':0.5, 'clip_on':True} +ISOBAR_TEXT = {'size':8, 'color':'blue', 'clip_on':True, 'va':'bottom', 'ha':'right'} +ISOBAR_FIXED = [50, 1000] + +WET_ADIABAT_SPEC = [(1, .05), (2, .15), (4, 1.5)] +WET_ADIABAT_LINE = {'color':'orange', 'linewidth':0.5, 'clip_on':True} +WET_ADIABAT_TEXT = {'size':8, 'color':'orange', 'clip_on':True, 'va':'bottom', 'ha':'left'} +WET_ADIABAT_FIXED = None + +MIXING_RATIO_SPEC = [(1, .05), (2, .18), (4, .3), (8, 1.5)] +MIXING_RATIO_LINE = {'color':'green', 'linewidth':0.5, 'clip_on':True} +MIXING_RATIO_TEXT = {'size':8, 'color':'green', 'clip_on':True, 'va':'bottom', 'ha':'right'} +MIXING_RATIOS = [.001, .002, .005, .01, .02, .03, .05, .1, .15, .2, .3, .4, .5, .6, .8, + 1.0, 1.5, 2.0, 2.5, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 12.0, 14.0, 16.0, + 18.0, 20.0, 24.0, 28.0, 32.0, 36.0, 40.0, 44.0, 48.0, 52.0, 56.0, 60.0, 68.0, 80.0] +MIXING_RATIO_FIXED = None + +MIN_PRESSURE = 50 # mb = hPa +MAX_PRESSURE = 1000 # mb = hPa +MIN_THETA = 0 # degC +MAX_THETA = 250 # degC +MIN_WET_ADIABAT = 1 # degC +MAX_WET_ADIABAT = 60 # degC +MIN_TEMPERATURE = -50 # degC + + +RESOURCES_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'etc') +DATA_DIR = os.path.join(RESOURCES_DIR, 'test_data') +RESULTS_DIR = os.path.join(RESOURCES_DIR, 'test_results') + + +def loadtxt(*filenames, **kwargs): + """ + Load one or more text files of pressure, temperature, wind speed and wind + direction value sets. + + Each line should contain, at minimum, a single pressure value (mb or hPa), + and a single temperature value (degC), but may also contain a dewpoint + value (degC), wind speed (knots) and wind direction value (degrees from + north). + + Note that blank lines and comment lines beginning with a '#' are ignored. + + For example: + + >>> import os.path + >>> import tephi + + >>> winds = os.path.join(tephi.DATA_DIR, 'barbs.txt') + >>> columns = ('pressure', 'dewpoint', 'wind_speed', 'wind_direction') + >>> data = tephi.loadtxt(winds, column_titles=columns) + >>> pressure = data.pressure + >>> dews = data.dewpoint + >>> wind_speed = data.wind_speed + >>> wind_direction = data.wind_direction + + .. seealso:: :func:`numpy.loadtxt`. + + Args: + + * filenames: one or more filenames. + + Kwargs: + + * column_titles: + List of iterables, or None. If specified, should contain one title + string for each column of data per specified file. If all of multiple + files loaded have the same column titles, then only one tuple of column + titles need be specified. + + * delimiter: + The string used to separate values. This is passed directly to + :func:`np.loadtxt`, which defaults to using any whitespace as delimiter + if this keyword is not specified. + + * dtype: + The datatype to cast the data in the text file to. Passed directly to + :func:`np.loadtxt`. + + Returns: + A :func:`collections.namedtuple` instance containing one tuple, named + with the relevant column title if specified, for each column of data + in the text file loaded. If more than one file is loaded, a sequence + of namedtuples is returned. + + """ + def _repr(nt): + """An improved representation of namedtuples over the default.""" + + typename = nt.__class__.__name__ + fields = nt._fields + n_fields = len(fields) + return_str = '{}(\n'.format(typename) + for i, t in enumerate(fields): + gap = ' ' * 4 + if i == n_fields - 1: + ender = '' + else: + ender = '\n' + return_str += '{}{}={!r}{}'.format(gap, t, getattr(nt, t), ender) + return_str += ')' + return return_str + + column_titles = kwargs.pop('column_titles', None) + delimiter = kwargs.pop('delimiter', None) + dtype = kwargs.pop('dtype', 'f4') + + if column_titles is not None: + fields = column_titles[0] + if not isinstance(column_titles, basestring): + if isinstance(fields, Iterable) and \ + not isinstance(fields, basestring): + # We've an iterable of iterables - multiple titles is True. + multiple_titles = True + if len(column_titles) > len(filenames): + msg = 'Received {} files but {} sets of column titles.' + raise ValueError(msg.format(len(column_titles), + len(filenames))) + elif isinstance(fields, basestring): + # We've an iterable of title strings - use for namedtuple. + tephidata = namedtuple('tephidata', column_titles) + multiple_titles = False + else: + # Whatever we've got it isn't iterable, so raise TypeError. + msg = 'Expected title to be string, got {!r}.' + raise TypeError(msg.format(type(column_titles))) + else: + msg = 'Expected column_titles to be iterable, got {!r}.' + raise TypeError(msg.format(type(column_titles))) + + else: + tephidata = namedtuple('tephidata', ('pressure', 'temperature')) + multiple_titles = False + + data = [] + for ct, arg in enumerate(filenames): + if isinstance(arg, basestring): + if os.path.isfile(arg): + if multiple_titles: + tephidata = namedtuple('tephidata', column_titles[ct]) + tephidata.__repr__ = _repr + payload = np.loadtxt(arg, dtype=dtype, delimiter=delimiter) + item = tephidata(*payload.T) + data.append(item) + else: + msg = 'Item {} is either not a file or does not exist.' + raise OSError(msg.format(arg)) + + if len(data) == 1: + data = data[0] + + return data + + +class _FormatterTheta(object): + """Dry adiabats potential temperature axis tick formatter.""" + + def __call__(self, direction, factor, values): + return [r"$\theta=%s$" % str(value) for value in values] + + +class _FormatterIsotherm(object): + """Isotherms temperature axis tick formatter.""" + + def __call__(self, direction, factor, values): + return [r" $T=%s$" % str(value) for value in values] + + +class Locator(object): + """Determine the fixed step axis tick locations when called with a tick range.""" + + def __init__(self, step): + """ + Set the fixed step value for the axis tick locations. + + Generate tick location specification when called with a tick range. + + For example: + + >>> from tephi import Locator + >>> locator = Locator(10) + >>> locator(-45, 23) + (array([-50, -40, -30, -20, -10, 0, 10, 20]), 8, 1) + + Args: + + * step: the step value for each axis tick. + + """ + self.step = int(step) + + def __call__(self, start, stop): + """Calculate the axis ticks given the provided tick range.""" + + step = self.step + start = (int(start) / step) * step + stop = (int(stop) / step) * step + ticks = np.arange(start, stop + step, step) + + return ticks, len(ticks), 1 + + +def _refresh_isopleths(axes): + """ + Refresh the plot isobars, wet adiabats and mixing ratios and associated + text labels. + + Args: + + * axes: + Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance. + + Returns: + Boolean, whether the plot has changed. + + """ + changed = False + + # Determine the current zoom level. + xlim = axes.get_xlim() + delta_xlim = xlim[1] - xlim[0] + ylim = axes.get_ylim() + zoom = delta_xlim / axes.tephigram_original_delta_xlim + + # Determine the display mid-point. + x_point = xlim[0] + delta_xlim * 0.5 + y_point = ylim[0] + (ylim[1] - ylim[0]) * 0.5 + xy_point = axes.tephigram_inverse.transform(np.array([[x_point, y_point]]))[0] + + for profile in axes.tephigram_profiles: + profile.refresh() + + for isopleth in axes.tephigram_isopleths: + changed = isopleth.refresh(zoom, xy_point) or changed + + return changed + + +def _handler(event): + """Matplotlib event handler.""" + + for axes in event.canvas.figure.axes: + if hasattr(axes, 'tephigram'): + if _refresh_isopleths(axes): + event.canvas.figure.show() + + +class _PlotGroup(dict): + """ + Container for a related group of tephigram isopleths. + + Manages the creation and plotting of all isopleths within the group. + + """ + def __init__(self, axes, plot_func, text_kwargs, step, zoom, tags, fixed=None, xfocus=None): + self.axes = axes + self.text_kwargs = text_kwargs + self.step = step + self.zoom = zoom + + pairs = [] + for tag in tags: + text = plt.text(0, 0, str(tag), **text_kwargs) + text.set_bbox(dict(boxstyle='Round,pad=0.3', facecolor='white', + edgecolor='white', alpha=0.5, clip_on=True, + clip_box=self.axes.bbox)) + pairs.append((tag, [plot_func(tag), text])) + + dict.__init__(self, pairs) + for line, text in self.itervalues(): + line.set_visible(True) + text.set_visible(True) + self._visible = True + + if fixed is None: + fixed = [] + + if not isinstance(fixed, Iterable): + fixed = [fixed] + + if zoom is None: + self.fixed = set(tags) + else: + self.fixed = set(tags) & set(fixed) + + self.xfocus = xfocus + + def __setitem__(self, tag, item): + raise ValueError('Cannot add or set an item into the plot group %r' % self.step) + + def __getitem__(self, tag): + if tag not in self.keys(): + raise KeyError('Tag item %r is not a member of the plot group %r' % (tag, self.step)) + return dict.__getitem__(self, tag) + + def refresh(self, zoom, xy_point): + """ + Refresh all isopleths within the plot group. + + Args: + + * zoom: + Zoom level of the current plot, relative to the initial plot. + * xy_point: + The center point of the current point, transformed into + temperature and potential temperature. + + Returns: + Boolean, whether the plot group has changed. + + """ + if self.zoom is None or zoom <= self.zoom: + changed = self._item_on() + else: + changed = self._item_off() + self._refresh_text(xy_point) + return changed + + def _item_on(self, zoom=None): + changed = False + if zoom is None or self.zoom is None or zoom <= self.zoom: + if not self._visible: + for line, text in self.itervalues(): + line.set_visible(True) + text.set_visible(True) + changed = True + self._visible = True + return changed + + def _item_off(self, zoom=None): + changed = False + if self.zoom is not None and (zoom is None or zoom > self.zoom): + if self._visible: + for tag, (line, text) in self.iteritems(): + if tag not in self.fixed: + line.set_visible(False) + text.set_visible(False) + changed = True + self._visible = False + return changed + + def _generate_text(self, tag, xy_point): + line, text = self[tag] + x_data = line.get_xdata() + y_data = line.get_ydata() + + if self.xfocus: + delta = np.power(x_data - xy_point[0], 2) + else: + delta = np.power(x_data - xy_point[0], 2) + np.power(y_data - xy_point[1], 2) + index = np.argmin(delta) + text.set_position((x_data[index], y_data[index])) + + def _refresh_text(self, xy_point): + if self._visible: + for tag in self: + self._generate_text(tag, xy_point) + elif self.fixed: + for tag in self.fixed: + self._generate_text(tag, xy_point) + + +class _PlotCollection(object): + """ + Container for tephigram isopleths. + + Manages the creation and plotting of all tephigram isobars, mixing ratio + lines and pseudo saturated wet adiabats. + + """ + def __init__(self, axes, spec, stop, plot_func, text_kwargs, fixed=None, minimum=None, xfocus=None): + if isinstance(stop, Iterable): + if minimum and minimum > max(stop): + raise ValueError('Minimum value of %r exceeds all other values' % minimum) + + items = [[step, zoom, set(stop[step - 1::step])] for step, zoom in sorted(spec, reverse=True)] + else: + if minimum and minimum > stop: + raise ValueError('Minimum value of %r exceeds maximum threshold %r' % (minimum, stop)) + + items = [[step, zoom, set(range(step, stop + step, step))] for step, zoom in sorted(spec, reverse=True)] + + for index, item in enumerate(items): + if minimum: + item[2] = set([value for value in item[2] if value >= minimum]) + + for subitem in items[index + 1:]: + subitem[2] -= item[2] + + self.groups = {item[0]: + _PlotGroup(axes, plot_func, text_kwargs, *item, fixed=fixed, xfocus=xfocus) for item in items if item[2]} + + if not self.groups: + raise ValueError('The plot collection failed to generate any plot groups') + + def refresh(self, zoom, xy_point): + """ + Refresh all isopleth groups within the plot collection. + + Args: + + * zoom: + Zoom level of the current plot, relative to the initial plot. + * xy_point: + The center point of the current plot, transformed into + temperature and potential temperature. + + Returns: + Boolean, whether any plot group has changed. + + """ + changed = False + + for group in self.groups.itervalues(): + changed = group.refresh(zoom, xy_point) or changed + + return changed + + +class Tephigram(object): + """ + Generate a tephigram of one or more pressure and temperature data sets. + + """ + + def __init__(self, figure=None, isotherm_locator=None, + dry_adiabat_locator=None, anchor=None): + """ + Initialise the tephigram transformation and plot axes. + + Kwargs: + + * figure: + An existing :class:`matplotlib.figure.Figure` instance for the + tephigram plot. If a figure is not provided, a new figure will + be created by default. + * isotherm_locator: + A :class:`tephi.Locator` instance or a numeric step size + for the isotherm lines. + * dry_adiabat_locator: + A :class:`tephi.Locator` instance or a numeric step size + for the dry adiabat lines. + * anchor: + A sequence of two pressure, temperature pairs specifying the extent + of the tephigram plot in terms of the bottom left hand corner and + the top right hand corner. Pressure data points must be in units of + mb or hPa, and temperature data points must be in units of degC. + + For example: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + import os.path + import tephi + from tephi import Tephigram + + dew_point = os.path.join(tephi.DATA_DIR, 'dews.txt') + dry_bulb = os.path.join(tephi.DATA_DIR, 'temps.txt') + dew_data, temp_data = tephi.loadtxt(dew_point, dry_bulb) + dews = zip(dew_data.pressure, dew_data.temperature) + temps = zip(temp_data.pressure, temp_data.temperature) + tpg = Tephigram() + tpg.plot(dews, label='Dew-point', color='blue', linewidth=2, marker='s') + tpg.plot(temps, label='Dry-bulb', color='red', linewidth=2, marker='o') + plt.show() + + """ + if not figure: + # Create a default figure. + self.figure = plt.figure(0, figsize=(9, 9)) + else: + self.figure = figure + + # Configure the locators. + if isotherm_locator and not isinstance(isotherm_locator, Locator): + if not isinstance(isotherm_locator, numbers.Number): + raise ValueError('Invalid isotherm locator') + locator_isotherm = Locator(isotherm_locator) + else: + locator_isotherm = isotherm_locator + + if dry_adiabat_locator and not isinstance(dry_adiabat_locator, Locator): + if not isinstance(dry_adiabat_locator, numbers.Number): + raise ValueError('Invalid dry adiabat locator') + locator_theta = Locator(dry_adiabat_locator) + else: + locator_theta = dry_adiabat_locator + + # Define the tephigram coordinate-system transformation. + self.tephi_transform = transforms.TephiTransform() + grid_helper1 = GridHelperCurveLinear(self.tephi_transform, + tick_formatter1=_FormatterIsotherm(), + grid_locator1=locator_isotherm, + tick_formatter2=_FormatterTheta(), + grid_locator2=locator_theta) + self.axes = Subplot(self.figure, 1, 1, 1, grid_helper=grid_helper1) + self.transform = self.tephi_transform + self.axes.transData + self.axes.axis['isotherm'] = self.axes.new_floating_axis(1, 0) + self.axes.axis['theta'] = self.axes.new_floating_axis(0, 0) + self.axes.axis['left'].get_helper().nth_coord_ticks = 0 + self.axes.axis['left'].toggle(all=True) + self.axes.axis['bottom'].get_helper().nth_coord_ticks = 1 + self.axes.axis['bottom'].toggle(all=True) + self.axes.axis['top'].get_helper().nth_coord_ticks = 0 + self.axes.axis['top'].toggle(all=False) + self.axes.axis['right'].get_helper().nth_coord_ticks = 1 + self.axes.axis['right'].toggle(all=True) + self.axes.gridlines.set_linestyle('solid') + + self.figure.add_subplot(self.axes) + + # Configure default axes. + axis = self.axes.axis['left'] + axis.major_ticklabels.set_fontsize(10) + axis.major_ticklabels.set_va('baseline') + axis.major_ticklabels.set_rotation(135) + axis = self.axes.axis['right'] + axis.major_ticklabels.set_fontsize(10) + axis.major_ticklabels.set_va('baseline') + axis.major_ticklabels.set_rotation(-135) + self.axes.axis['top'].major_ticklabels.set_fontsize(10) + axis = self.axes.axis['bottom'] + axis.major_ticklabels.set_fontsize(10) + axis.major_ticklabels.set_ha('left') + axis.major_ticklabels.set_va('top') + axis.major_ticklabels.set_rotation(-45) + + # Isotherms: lines of constant temperature (degC). + axis = self.axes.axis['isotherm'] + axis.set_axis_direction('right') + axis.set_axislabel_direction('-') + axis.major_ticklabels.set_rotation(90) + axis.major_ticklabels.set_fontsize(10) + axis.major_ticklabels.set_va('bottom') + axis.major_ticklabels.set_color('grey') + axis.major_ticklabels.set_visible(False) # turned-off + + # Dry adiabats: lines of constant potential temperature (degC). + axis = self.axes.axis['theta'] + axis.set_axis_direction('right') + axis.set_axislabel_direction('+') + axis.major_ticklabels.set_fontsize(10) + axis.major_ticklabels.set_va('bottom') + axis.major_ticklabels.set_color('grey') + axis.major_ticklabels.set_visible(False) # turned-off + axis.line.set_linewidth(3) + axis.line.set_linestyle('--') + + # Lock down the aspect ratio. + self.axes.set_aspect(1.) + self.axes.grid(True) + + # Initialise the text formatter for the navigation status bar. + self.axes.format_coord = self._status_bar + + # Factor in the tephigram transform. + ISOBAR_TEXT['transform'] = self.transform + WET_ADIABAT_TEXT['transform'] = self.transform + MIXING_RATIO_TEXT['transform'] = self.transform + + # Create plot collections for the tephigram isopleths. + func = partial(isopleths.isobar, MIN_THETA, MAX_THETA, self.axes, self.transform, ISOBAR_LINE) + self._isobars = _PlotCollection(self.axes, ISOBAR_SPEC, MAX_PRESSURE, func, ISOBAR_TEXT, + fixed=ISOBAR_FIXED, minimum=MIN_PRESSURE) + + func = partial(isopleths.wet_adiabat, MAX_PRESSURE, MIN_TEMPERATURE, self.axes, self.transform, WET_ADIABAT_LINE) + self._wet_adiabats = _PlotCollection(self.axes, WET_ADIABAT_SPEC, MAX_WET_ADIABAT, func, WET_ADIABAT_TEXT, + fixed=WET_ADIABAT_FIXED, minimum=MIN_WET_ADIABAT, xfocus=True) + + func = partial(isopleths.mixing_ratio, MIN_PRESSURE, MAX_PRESSURE, self.axes, self.transform, MIXING_RATIO_LINE) + self._mixing_ratios = _PlotCollection(self.axes, MIXING_RATIO_SPEC, MIXING_RATIOS, func, MIXING_RATIO_TEXT, + fixed=MIXING_RATIO_FIXED) + + # Initialise for the tephigram plot event handler. + plt.connect('motion_notify_event', _handler) + self.axes.tephigram = True + self.axes.tephigram_original_delta_xlim = self.original_delta_xlim = DEFAULT_WIDTH + self.axes.tephigram_transform = self.tephi_transform + self.axes.tephigram_inverse = self.tephi_transform.inverted() + self.axes.tephigram_isopleths = [self._isobars, self._wet_adiabats, self._mixing_ratios] + + # The tephigram profiles. + self._profiles = [] + self.axes.tephigram_profiles = self._profiles + + # Center the plot around the anchor extent. + self._anchor = anchor + if self._anchor is not None: + self._anchor = np.asarray(anchor) + if self._anchor.ndim != 2 or self._anchor.shape[-1] != 2 or \ + len(self._anchor) != 2: + msg = 'Invalid anchor, expecting [(bottom-left-pressure, ' \ + 'bottom-left-temperature), (top-right-pressure, ' \ + 'top-right-temperature)]' + raise ValueError(msg) + (bottom_pressure, bottom_temp), \ + (top_pressure, top_temp) = self._anchor + + if (bottom_pressure - top_pressure) < 0: + raise ValueError('Invalid anchor pressure range') + if (bottom_temp - top_temp) < 0: + raise ValueError('Invalid anchor temperature range') + + self._anchor = isopleths.Profile(anchor, self.axes) + self._anchor.plot(visible=False) + xlim, ylim = self._calculate_extents() + self.axes.set_xlim(xlim) + self.axes.set_ylim(ylim) + + def plot(self, data, **kwargs): + """ + Plot the environmental lapse rate profile of the pressure and + temperature data points. + + The pressure and temperature data points are transformed into + potential temperature and temperature data points before plotting. + + By default, the tephigram will automatically center the plot around + all profiles. + + .. warning:: + Pressure data points must be in units of mb or hPa, and temperature + data points must be in units of degC. + + Args: + + * data: pressure and temperature pair data points. + + .. note:: + All keyword arguments are passed through to + :func:`matplotlib.pyplot.plot`. + + For example: + + .. plot:: + :include-source: + + import matplotlib.pyplot as plt + from tephi import Tephigram + + tpg = Tephigram() + data = [[1006, 26.4], [924, 20.3], [900, 19.8], + [850, 14.5], [800, 12.9], [755, 8.3]] + profile = tpg.plot(data, color='red', linestyle='--', + linewidth=2, marker='o') + barbs = [(10, 45, 900), (20, 60, 850), (25, 90, 800)] + profile.barbs(barbs) + plt.show() + + For associating wind barbs with an environmental lapse rate profile, + see :meth:`~tephi.isopleths.Profile.barbs`. + + """ + profile = isopleths.Profile(data, self.axes) + profile.plot(**kwargs) + self._profiles.append(profile) + + # Center the tephigram plot around all the profiles. + if self._anchor is None: + xlim, ylim = self._calculate_extents(xfactor=.25, yfactor=.05) + self.axes.set_xlim(xlim) + self.axes.set_ylim(ylim) + + # Refresh the tephigram plot isopleths. + _refresh_isopleths(self.axes) + + # Show the plot legend. + if 'label' in kwargs: + font_properties = FontProperties(size='x-small') + plt.legend(loc='upper left', fancybox=True, shadow=True, prop=font_properties) + + return profile + + def _status_bar(self, x_point, y_point): + """Generate text for the interactive backend navigation status bar.""" + + temperature, theta = transforms.xy_to_temperature_theta(x_point, y_point) + pressure, _ = transforms.temperature_theta_to_pressure_temperature(temperature, theta) + xlim = self.axes.get_xlim() + zoom = (xlim[1] - xlim[0]) / self.original_delta_xlim + text = "T:%.2f, theta:%.2f, phi:%.2f (zoom:%.3f)" % (float(temperature), float(theta), float(pressure), zoom) + + return text + + def _calculate_extents(self, xfactor=None, yfactor=None): + min_x = min_y = 1e10 + max_x = max_y = -1e-10 + profiles = self._profiles + + if self._anchor is not None: + profiles = [self._anchor] + + for profile in profiles: + xy_points = self.tephi_transform.transform(np.concatenate((profile.temperature.reshape(-1, 1), + profile.theta.reshape(-1, 1)), + axis=1)) + x_points = xy_points[:, 0] + y_points = xy_points[:, 1] + min_x, min_y = np.min([min_x, np.min(x_points)]), np.min([min_y, np.min(y_points)]) + max_x, max_y = np.max([max_x, np.max(x_points)]), np.max([max_y, np.max(y_points)]) + + if xfactor is not None: + delta_x = max_x - min_x + min_x, max_x = min_x - xfactor * delta_x, max_x + xfactor * delta_x + + if yfactor is not None: + delta_y = max_y - min_y + min_y, max_y = min_y - yfactor * delta_y, max_y + yfactor * delta_y + + return ([min_x, max_x], [min_y, max_y]) diff --git a/faampy/_3rdparty/tephi/_constants.py b/faampy/_3rdparty/tephi/_constants.py new file mode 100644 index 0000000..b9e394e --- /dev/null +++ b/faampy/_3rdparty/tephi/_constants.py @@ -0,0 +1,27 @@ +# (C) British Crown Copyright 2014, Met Office +# +# This file is part of tephi. +# +# Tephi is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Tephi is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with tephi. If not, see . +"""Tephigram transform and isopleth constants.""" + +# TODO: Discover the meaning of the magic constant numbers. + +CONST_CP = 1.01e3 +CONST_K = 0.286 +CONST_KELVIN = 273.15 # celsius to kelvin offset. +CONST_L = 2.5e6 +CONST_MA = 300.0 +CONST_RD = 287.0 +CONST_RV = 461.0 diff --git a/faampy/_3rdparty/tephi/isopleths.py b/faampy/_3rdparty/tephi/isopleths.py new file mode 100644 index 0000000..aa4e8a7 --- /dev/null +++ b/faampy/_3rdparty/tephi/isopleths.py @@ -0,0 +1,456 @@ +# (C) British Crown Copyright 2014, Met Office +# +# This file is part of tephi. +# +# Tephi is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Tephi is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with tephi. If not, see . +""" +Tephigram isopleth support for generating and plotting tephigram lines, +environment profiles and barbs. + +""" + +import math +from matplotlib.collections import PathCollection +import matplotlib.pyplot as plt +import matplotlib.transforms as mtransforms +from matplotlib.path import Path +import numpy as np +from scipy.interpolate import interp1d + +from _constants import CONST_CP, CONST_L, CONST_KELVIN, CONST_RD, CONST_RV +import transforms + + +# Wind barb speed (knots) ranges used since 1 January 1955. +_BARB_BINS = np.arange(20) * 5 + 3 +_BARB_GUTTER = 0.1 +_BARB_DTYPE = np.dtype(dict(names=('speed', 'angle', 'pressure', 'barb'), + formats=('f4', 'f4', 'f4', np.object))) + +# +# Reference: http://www-nwp/~hadaa/tephigram/tephi_plot.html +# + + +def mixing_ratio(min_pressure, max_pressure, axes, + transform, kwargs, mixing_ratio_value): + """ + Generate and plot a humidity mixing ratio line. + + A line of constant saturation mixing ratio with respect to a + plane water surface (g kg-1). + + Args: + + * min_pressure: + Minumum pressure, in mb or hPa, for the mixing ratio line extent. + + * max_pressure: + Maximum pressure, in mb or hPa, for the mixing ratio line extent. + + * axes: + Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance. + + * transform: + Tephigram plotting transformation + :class:`matplotlib.transforms.CompositeGenericTransform` instance. + + * kwargs: + Keyword arguments for the mixing ratio :class:`matplotlib.lines.Line2D` + instance. + + * mixing_ratio_value: + The mixing ratio value to be plotted. + + Returns: + The mixing ratio :class:`matplotlib.lines.Line2D` instance. + + """ + pressures = np.linspace(min_pressure, max_pressure, 100) + temps = transforms.pressure_mixing_ratio_to_temperature(pressures, + mixing_ratio_value) + _, thetas = transforms.pressure_temperature_to_temperature_theta(pressures, + temps) + line, = axes.plot(temps, thetas, transform=transform, **kwargs) + + return line + + +def isobar(min_theta, max_theta, axes, transform, kwargs, pressure): + """ + Generate and plot an isobar line. + + A line of constant pressure (mb). + + Args: + + * min_theta: + Minimum potential temperature, in degC, for the isobar extent. + + * max_theta: + Maximum potential temperature, in degC, for the isobar extent. + + * axes: + Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance. + + * transform: + Tephigram plotting transformation :class:`matplotlib.transforms.CompositeGenericTransform` + instance. + + * kwargs: + Keyword arguments for the isobar :class:`matplotlib.lines.Line2D` instance. + + * pressure: + The isobar pressure value, in mb or hPa, to be plotted. + + Returns: + The isobar :class:`matplotlib.lines.Line2D` instance. + + """ + steps = 100 + thetas = np.linspace(min_theta, max_theta, steps) + _, temps = transforms.pressure_theta_to_pressure_temperature([pressure] * steps, thetas) + line, = axes.plot(temps, thetas, transform=transform, **kwargs) + + return line + + +def _wet_adiabat_gradient(min_temperature, pressure, temperature, dp): + """ + Calculate the wet adiabat change in pressure and temperature. + + Args: + + * min_temperature: + Minimum potential temperature, in degC, for the wet adiabat line extent. + + * pressure: + Pressure point value, in mb or hPa, from which to calculate the gradient + difference. + + * temperature: + Potential temperature point value, in degC, from which to calculate + the gradient difference. + + * dp: + The wet adiabat change in pressure, in mb or hPa, from which to calculate + the gradient difference. + + Returns: + The gradient change as a pressure, potential temperature value pair. + + """ + + # TODO: Discover the meaning of the magic numbers. + + kelvin = temperature + CONST_KELVIN + lsbc = (CONST_L / CONST_RV) * ((1.0 / CONST_KELVIN) - (1.0 / kelvin)) + rw = 6.11 * np.exp(lsbc) * (0.622 / pressure) + lrwbt = (CONST_L * rw) / (CONST_RD * kelvin) + nume = ((CONST_RD * kelvin) / (CONST_CP * pressure)) * (1.0 + lrwbt) + deno = 1.0 + (lrwbt * ((0.622 * CONST_L) / (CONST_CP * kelvin))) + gradi = nume / deno + dt = dp * gradi + + if (temperature + dt) < min_temperature: + dt = min_temperature - temperature + dp = dt / gradi + + return dp, dt + + +def wet_adiabat(max_pressure, min_temperature, axes, + transform, kwargs, temperature): + """ + Generate and plot a pseudo saturated wet adiabat line. + + A line of constant equivalent potential temperature for saturated + air parcels (degC). + + Args: + + * max_pressure: + Maximum pressure, in mb or hPa, for the wet adiabat line extent. + + * min_temperature: + Minimum potential temperature, in degC, for the wet adiabat line extent. + + * axes: + Tephigram plotting :class:`matplotlib.axes.AxesSubplot` instance. + + * transform: + Tephigram plotting transformation + :class:`matplotlib.transforms.CompositeGenericTransform` instance. + + * kwargs: + Keyword arguments for the mixing ratio :class:`matplotlib.lines.Line2D` + instance. + + * temperature: + The wet adiabat value, in degC, to be plotted. + + Returns: + The wet adiabat :class:`matplotlib.lines.Line2D` instance. + + """ + temps = [temperature] + pressures = [max_pressure] + dp = -5.0 + + for i in xrange(200): + dp, dt = _wet_adiabat_gradient(min_temperature, pressures[i], + temps[i], dp) + temps.append(temps[i] + dt) + pressures.append(pressures[i] + dp) + + _, thetas = transforms.pressure_temperature_to_temperature_theta(pressures, + temps) + line, = axes.plot(temps, thetas, transform=transform, **kwargs) + + return line + + +class Barbs(object): + """Generate a wind arrow barb.""" + + def __init__(self, axes): + """ + Create a wind arrow barb for the given axes. + + Args: + + * axes: + A :class:`matplotlib.axes.AxesSubplot` instance. + + """ + self.axes = axes + self.barbs = None + self._gutter = None + self._transform = axes.tephigram_transform + axes.transData + self._kwargs = None + self._custom_kwargs = None + self._custom = dict(color=['barbcolor', 'color', 'edgecolor', 'facecolor'], + linewidth=['lw', 'linewidth'], + linestyle=['ls', 'linestyle']) + + @staticmethod + def _uv(magnitude, angle): + """ + Convert magnitude and angle measured in degrees to u and v components, + where u is -x and v is -y. + + """ + angle = angle % 360 + u = v = 0 + # Snap the magnitude of the barb vector to fall into one of the + # _BARB_BINS ensuring it's a multiple of five. Five is the increment + # step size for decorating with barb with flags. + magnitude = np.searchsorted(_BARB_BINS, magnitude, side='right') * 5 + modulus = angle % 90 + if modulus: + quadrant = int(angle / 90) + radians = math.radians(modulus) + y = math.cos(radians) * magnitude + x = math.sin(radians) * magnitude + if quadrant == 0: + u, v = -x, -y + elif quadrant == 1: + u, v = -y, x + elif quadrant == 2: + u, v = x, y + else: + u, v = y, -x + else: + angle = int(angle) + if angle == 0: + v = -magnitude + elif angle == 90: + u = -magnitude + elif angle == 180: + v = magnitude + else: + u = magnitude + return u, v + + def _make_barb(self, temperature, theta, speed, angle): + """Add the barb to the plot at the specified location.""" + u, v = self._uv(speed, angle) + if 0 < speed < _BARB_BINS[0]: + # Plot the missing barbless 1-2 knots line. + length = self._kwargs['length'] + pivot_points = dict(tip=0.0, middle=-length / 2.) + pivot = self._kwargs.get('pivot', 'tip') + offset = pivot_points[pivot] + verts = [(0.0, offset), (0.0, length + offset)] + verts = mtransforms.Affine2D().rotate(math.radians(-angle)).transform(verts) + codes = [Path.MOVETO, Path.LINETO] + path = Path(verts, codes) + size = length ** 2 / 4 + xy = np.array([[temperature, theta]]) + barb = PathCollection([path], (size,), offsets=xy, + transOffset=self._transform, **self._custom_kwargs) + barb.set_transform(mtransforms.IdentityTransform()) + self.axes.add_collection(barb) + else: + barb = plt.barbs(temperature, theta, u, v, + transform=self._transform, **self._kwargs) + return barb + + def refresh(self): + """Refresh the plot with the barbs.""" + if self.barbs is not None: + xlim = self.axes.get_xlim() + ylim = self.axes.get_ylim() + y = np.linspace(*ylim)[::-1] + xdelta = xlim[1] - xlim[0] + x = np.asarray([xlim[1] - (xdelta * self._gutter)] * y.size) + points = self.axes.tephigram_inverse.transform(np.asarray(zip(x, y))) + temperature, theta = points[:, 0], points[:, 1] + pressure, _ = transforms.temperature_theta_to_pressure_temperature(temperature, + theta) + min_pressure, max_pressure = np.min(pressure), np.max(pressure) + func = interp1d(pressure, temperature) + for i, (speed, angle, pressure, barb) in enumerate(self.barbs): + if min_pressure < pressure < max_pressure: + temperature, theta = transforms.pressure_temperature_to_temperature_theta(pressure, + func(pressure)) + if barb is None: + self.barbs[i]['barb'] = self._make_barb(temperature, theta, speed, angle) + else: + barb.set_offsets(np.array([[temperature, theta]])) + barb.set_visible(True) + else: + if barb is not None: + barb.set_visible(False) + + def plot(self, barbs, **kwargs): + """ + Plot the sequence of barbs. + + Args: + + * barbs: + Sequence of speed, direction and pressure value triples for + each barb. Where speed is measured in units of knots, direction + in units of degrees (clockwise from north), and pressure must + be in units of mb or hPa. + + Kwargs: + + * gutter: + Proportion offset from the right hand side axis to plot the + barbs. Defaults to 0.1 + + Also see :func:`matplotlib.pyplot.barbs` + + """ + self._gutter = kwargs.pop('gutter', _BARB_GUTTER) + self._kwargs = dict(length=7, zorder=10) + self._kwargs.update(kwargs) + self._custom_kwargs = dict(color=None, linewidth=1.5, + zorder=self._kwargs['zorder']) + for key, values in self._custom.iteritems(): + common = set(values).intersection(kwargs) + if common: + self._custom_kwargs[key] = kwargs[sorted(common)[0]] + barbs = np.asarray(barbs) + if barbs.ndim != 2 or barbs.shape[-1] != 3: + msg = 'The barbs require to be a sequence of wind speed, ' \ + 'wind direction and pressure value triples.' + raise ValueError(msg) + self.barbs = np.empty(barbs.shape[0], dtype=_BARB_DTYPE) + for i, barb in enumerate(barbs): + self.barbs[i] = tuple(barb) + (None,) + self.refresh() + + +class Profile(object): + """Generate an environmental lapse rate profile.""" + + def __init__(self, data, axes): + """ + Create an environmental lapse rate profile from the sequence of + pressure and temperature point data. + + Args: + + * data: + Sequence of pressure and temperature points defining the + environmental lapse rate. + + * axes: + The axes on which to plot the profile. + + """ + self.data = np.asarray(data) + if self.data.ndim != 2 or self.data.shape[-1] != 2: + msg = 'The environment profile data requires to be a sequence ' \ + 'of pressure, temperature value pairs.' + raise ValueError(msg) + self.axes = axes + self._transform = axes.tephigram_transform + axes.transData + self.pressure = self.data[:, 0] + self.temperature = self.data[:, 1] + _, self.theta = transforms.pressure_temperature_to_temperature_theta(self.pressure, + self.temperature) + self.line = None + self._barbs = Barbs(axes) + + def plot(self, **kwargs): + """ + Plot the environmental lapse rate profile. + + Kwargs: + + See :func:`matplotlib.pyplot.plot`. + + Returns: + The profile :class:`matplotlib.lines.Line2D` + + """ + if self.line is not None and line in self.axes.lines: + self.axes.lines.remove(line) + + if 'zorder' not in kwargs: + kwargs['zorder'] = 10 + + self.line, = self.axes.plot(self.temperature, self.theta, + transform=self._transform, **kwargs) + return self.line + + def refresh(self): + """Refresh the plot with the profile and any associated barbs.""" + self._barbs.refresh() + + def barbs(self, barbs, **kwargs): + """ + Plot the sequence of barbs associated with this profile. + + Args: + + * barbs: + Sequence of speed, direction and pressure value triples for + each barb. Where speed is measured in units of knots, direction + in units of degrees (clockwise from north), and pressure must + be in units of mb or hPa. + + Kwargs: + + See :func:`matplotlib.pyplot.barbs` + + """ + colors = ['color', 'barbcolor', 'edgecolor', 'facecolor'] + if not set(colors).intersection(kwargs): + kwargs['color'] = self.line.get_color() + self._barbs.plot(barbs, **kwargs) diff --git a/faampy/_3rdparty/tephi/tests/__init__.py b/faampy/_3rdparty/tephi/tests/__init__.py new file mode 100644 index 0000000..6f6579d --- /dev/null +++ b/faampy/_3rdparty/tephi/tests/__init__.py @@ -0,0 +1,275 @@ +# (C) British Crown Copyright 2014, Met Office +# +# This file is part of tephi. +# +# Tephi is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Tephi is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with tephi. If not, see . +""" +Provides enhanced testing capabilities. + +The primary class for this module is :class:`TephiTest`. + +When importing this module, sys.argv is inspected to identify the flags +``-d`` and ``-sf`` which toggle displaying and saving image tests respectively. + +.. note:: The ``-d`` option sets the matplotlib backend to either agg or + tkagg. For this reason ``tephi.tests`` **must** be imported before + ``matplotlib.pyplot`` + +""" + +import collections +import contextlib +import difflib +import logging +import os +import os.path +import platform +import StringIO +import sys +import tempfile +import unittest +import zlib + +import matplotlib +# NB pyplot is imported after main() so that a backend can be defined. +# import matplotlib.pyplot as plt +import numpy + +from tephi import DATA_DIR, RESULTS_DIR + + +_DATA_PATH = DATA_DIR +"""Basepath for test data.""" + +_RESULT_PATH = RESULTS_DIR +"""Basepath for test results.""" + + +# A shared logger for use by unit tests +logging.basicConfig() +logger = logging.getLogger('tests') + + +# Whether to display matplotlib output to the screen. +_DISPLAY_FIGURES = False + +# Whether to save matplotlib output to files. +_SAVE_FIGURES = True + +if '-d' in sys.argv: + sys.argv.remove('-d') + matplotlib.use('tkagg') + _DISPLAY_FIGURES = True +else: + matplotlib.use('agg') + +# Imported now so that matplotlib.use can work +import matplotlib.pyplot as plt + +if '-sf' in sys.argv or os.environ.get('TEPHI_TEST_SAVE_FIGURES', '') == '1': + if '-sf' in sys.argv: + sys.argv.remove('-sf') + _SAVE_FIGURES = True + + +_PLATFORM = '%s_%s' % (''.join(platform.dist()[:2]), platform.architecture()[0]) + + +def main(): + """ + A wrapper for unittest.main() which adds customised options to the + help (-h) output. + + """ + if '-h' in sys.argv or '--help' in sys.argv: + stdout = sys.stdout + buff = StringIO.StringIO() + # NB. unittest.main() raises an exception after it's shown the help text + try: + sys.stdout = buff + unittest.main() + finally: + sys.stdout = stdout + lines = buff.getvalue().split('\n') + lines.insert(9, 'Tephi-specific options:') + lines.insert(10, ' -d Display matplotlib figures (uses tkagg)') + lines.insert(11, ' -sf Save matplotlib figures to subfolder "image_results"') + print '\n'.join(lines) + else: + unittest.main() + + +def get_data_path(relative_path): + """ + Returns the absolute path to a data file when given the relative path + as a string, or sequence of strings. + + """ + if not isinstance(relative_path, basestring): + relative_path = os.path.join(*relative_path) + return os.path.abspath(os.path.join(_DATA_PATH, relative_path)) + + +def get_result_path(relative_path): + """ + Returns the absolute path to a result file when given the relative path + as a string, or sequence of strings. + + """ + if not isinstance(relative_path, basestring): + relative_path = os.path.join(*relative_path) + return os.path.abspath(os.path.join(_RESULT_PATH, relative_path)) + + +class TephiTest(unittest.TestCase): + """ + A subclass of unittest.TestCase which provides testing functionality + specific to tephi. + + """ + + _assertion_counts = collections.defaultdict(int) + + def file_checksum(self, file_path): + """ + Generate checksum from file. + """ + in_file = open(file_path, "rb") + return zlib.crc32(in_file.read()) + + def _unique_id(self): + """ + Returns the unique ID for the current assertion. + + The ID is composed of two parts: a unique ID for the current test + (which is itself composed of the module, class, and test names), and + a sequential counter (specific to the current test) that is incremented + on each call. + + For example, calls from a "test_tx" routine followed by a "test_ty" + routine might result in:: + test_plot.TestContourf.test_tx.0 + test_plot.TestContourf.test_tx.1 + test_plot.TestContourf.test_tx.2 + test_plot.TestContourf.test_ty.0 + + """ + # Obtain a consistent ID for the current test. + + # NB. unittest.TestCase.id() returns different values depending on + # whether the test has been run explicitly, or via test discovery. + # For example: + # python tests/test_brand.py + # => '__main__.TestBranding.test_combo' + # python -m unittest discover + # => 'tephi.tests.test_brand.TestBranding.test_combo' + bits = self.id().split('.')[-3:] + if bits[0] == '__main__': + file_name = os.path.basename(sys.modules['__main__'].__file__) + bits[0] = os.path.splitext(file_name)[0] + test_id = '.'.join(bits) + + # Derive the sequential assertion ID within the test + assertion_id = self._assertion_counts[test_id] + self._assertion_counts[test_id] += 1 + + return test_id + '.' + str(assertion_id) + + def _ensure_folder(self, path): + dir_path = os.path.dirname(path) + if not os.path.exists(dir_path): + logger.warning('Creating folder: %s', dir_path) + os.makedirs(dir_path) + + def create_temp_filename(self, suffix=''): + """ + Return a temporary file name. + + Args: + + * suffix - Optional filename extension. + + """ + temp_file = tempfile.mkstemp(suffix) + os.close(temp_file[0]) + return temp_file[1] + + @contextlib.contextmanager + def temp_filename(self, suffix=''): + filename = self.create_temp_filename(suffix) + yield filename + os.remove(filename) + + def assertArrayEqual(self, a, b): + return numpy.testing.assert_array_equal(a, b) + + def assertArrayAlmostEqual(self, a, b, *args, **kwargs): + return numpy.testing.assert_array_almost_equal(a, b, *args, **kwargs) + + +class GraphicsTest(TephiTest): + def tearDown(self): + # If a plotting test bombs out it can leave the current figure in an + # odd state, so we make sure it's been disposed of. + plt.close() + + def _get_image_checksum(self, unique_id, resultant_checksum): + checksum_result_path = get_result_path(('image_checksums', _PLATFORM, unique_id + '.txt')) + if os.path.isfile(checksum_result_path): + with open(checksum_result_path, 'r') as checksum_file: + checksum = int(checksum_file.readline().strip()) + else: + self._ensure_folder(checksum_result_path) + logger.warning('Creating image checksum result file: %s', checksum_result_path) + checksum = resultant_checksum + open(checksum_result_path, 'w').writelines(str(checksum)) + return checksum + + def check_graphic(self): + """ + Checks the CRC matches for the current matplotlib.pyplot figure, and + closes the figure. + + """ + unique_id = self._unique_id() + + figure = plt.gcf() + + try: + suffix = '.png' + if _SAVE_FIGURES: + file_path = get_result_path(('image_results', + unique_id + suffix)) + dir_path = os.path.dirname(file_path) + if not os.path.isdir(dir_path): + os.makedirs(dir_path) + else: + file_path = self.create_temp_filename(suffix) + + figure.savefig(file_path) + resultant_checksum = self.file_checksum(file_path) + + if not _SAVE_FIGURES: + os.remove(file_path) + + checksum = self._get_image_checksum(unique_id, resultant_checksum) + + if _DISPLAY_FIGURES: + if resultant_checksum != checksum: + print 'Test would have failed (new checksum: %s ; old checksum: %s)' % (resultant_checksum, checksum) + plt.show() + else: + self.assertEqual(resultant_checksum, checksum, 'Image checksums not equal for %s' % unique_id) + finally: + plt.close() diff --git a/faampy/_3rdparty/tephi/tests/test_tephigram.py b/faampy/_3rdparty/tephi/tests/test_tephigram.py new file mode 100644 index 0000000..a95ea13 --- /dev/null +++ b/faampy/_3rdparty/tephi/tests/test_tephigram.py @@ -0,0 +1,269 @@ +# (C) British Crown Copyright 2014, Met Office +# +# This file is part of tephi. +# +# Tephi is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Tephi is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with tephi. If not, see . +""" +Tests the tephigram plotting capability provided by tephi. + +""" +# Import tephi test package first so that some things can be initialised +# before importing anything else. +import tephi.tests as tests + +import cPickle + +import matplotlib.pyplot as plt +import numpy as np + +import tephi +from tephi import Tephigram + + +def _load_result(filename): + with open(tephi.tests.get_result_path(filename)) as f: + result = cPickle.load(f) + return result + + +_expected_dews = _load_result('dews.pkl') +_expected_temps = _load_result('temps.pkl') +_expected_barbs = _load_result('barbs.pkl') + + +class TestTephigramLoadTxt(tests.TephiTest): + def setUp(self): + self.filename_dews = tephi.tests.get_data_path('dews.txt') + self.filename_temps = tephi.tests.get_data_path('temps.txt') + self.filename_barbs = tephi.tests.get_data_path('barbs.txt') + self.filename_comma = tephi.tests.get_data_path('comma_sep.txt') + + def test_is_not_file(self): + with self.assertRaises(OSError): + tephi.loadtxt('wibble') + + def test_load_data_no_column_names(self): + dews = tephi.loadtxt(self.filename_dews) + self.assertEqual(dews._fields, ('pressure', 'temperature')) + self.assertArrayEqual(dews.pressure, _expected_dews[0]) + self.assertArrayEqual(dews, _expected_dews) + + def test_load_data_with_column_names(self): + # Column titles test all valid namedtuple characters (alphanumeric, _). + columns = ('pressure', 'dewpoint2', 'wind_speed', 'WindDirection') + barbs = tephi.loadtxt(self.filename_barbs, column_titles=columns) + self.assertEqual(barbs._fields, columns) + self.assertArrayEqual(barbs.wind_speed, _expected_barbs[2]) + self.assertArrayEqual(barbs, _expected_barbs) + + def test_load_multiple_files_same_column_names(self): + columns = ('foo', 'bar') + dews, temps = tephi.loadtxt(self.filename_dews, + self.filename_temps, + column_titles=columns) + self.assertEqual(dews._fields, columns) + self.assertEqual(temps._fields, columns) + + def test_load_data_too_many_column_iterables(self): + columns = [('pressure', 'dewpoint'), + ('pressure', 'wind_speed', 'wind_direction')] + with self.assertRaises(ValueError): + dews = tephi.loadtxt(self.filename_dews, column_titles=columns) + + def test_number_of_columns_and_titles_not_equal(self): + columns = ('pressure', 'dewpoint', 'wind_speed') + with self.assertRaises(TypeError): + barbs = tephi.loadtxt(self.filename_barbs, + column_titles=columns) + + def test_invalid_column_titles(self): + columns = ('pres-sure', 'dew+point', 5) + with self.assertRaises(ValueError): + dews = tephi.loadtxt(self.filename_dews, + column_titles=columns) + + def test_non_iterable_column_title(self): + # For the case of column titles, strings are considered non-iterable. + columns = 'pressure' + with self.assertRaises(TypeError): + dews = tephi.loadtxt(self.filename_dews, + column_titles=columns) + + def test_delimiter(self): + columns = ('pressure', 'temperature', 'wind_direction', 'wind_speed') + data = tephi.loadtxt(self.filename_comma, column_titles=columns, + delimiter=',') + self.assertEqual(data.pressure.shape, (2,)) + + def test_dtype(self): + dews = tephi.loadtxt(self.filename_dews, dtype='i4') + self.assertIsInstance(dews.pressure[0], np.int32) + self.assertIsInstance(dews.temperature[0], np.int32) + + +class TestTephigramPlot(tests.GraphicsTest): + def setUp(self): + dew_data = _expected_dews + self.dews = zip(dew_data[0], dew_data[1]) + temp_data = _expected_temps + self.temps = zip(temp_data[0], temp_data[1]) + + def test_plot_dews(self): + tpg = Tephigram() + tpg.plot(self.dews) + self.check_graphic() + + def test_plot_temps(self): + tpg = Tephigram() + tpg.plot(self.temps) + self.check_graphic() + + def test_plot_dews_temps(self): + tpg = Tephigram() + tpg.plot(self.dews) + tpg.plot(self.temps) + self.check_graphic() + + def test_plot_dews_label(self): + tpg = Tephigram() + tpg.plot(self.dews, label='Dew-point temperature') + self.check_graphic() + + def test_plot_temps_label(self): + tpg = Tephigram() + tpg.plot(self.temps, label='Dry-bulb temperature') + self.check_graphic() + + def test_plot_dews_custom(self): + tpg = Tephigram() + tpg.plot(self.dews, label='Dew-point temperature', linewidth=2, color='blue', marker='s') + self.check_graphic() + + def test_plot_temps_custom(self): + tpg = Tephigram() + tpg.plot(self.temps, label='Dry-bulb emperature', linewidth=2, color='red', marker='o') + self.check_graphic() + + def test_plot_dews_temps_custom(self): + tpg = Tephigram() + tpg.plot(self.dews, label='Dew-point temperature', linewidth=2, color='blue', marker='s') + tpg.plot(self.temps, label='Dry-bulb emperature', linewidth=2, color='red', marker='o') + self.check_graphic() + + def test_plot_dews_locator_isotherm_numeric(self): + tpg = Tephigram(isotherm_locator=10) + tpg.plot(self.dews) + self.check_graphic() + + def test_plot_dews_locator_isotherm_object(self): + tpg = Tephigram(isotherm_locator=tephi.Locator(10)) + tpg.plot(self.dews) + self.check_graphic() + + def test_plot_dews_locator_adiabat_numeric(self): + tpg = Tephigram(dry_adiabat_locator=10) + tpg.plot(self.dews) + self.check_graphic() + + def test_plot_dews_locator_adiabat_object(self): + tpg = Tephigram(dry_adiabat_locator=tephi.Locator(10)) + tpg.plot(self.dews) + self.check_graphic() + + def test_plot_dews_locator_numeric(self): + tpg = Tephigram(isotherm_locator=10, dry_adiabat_locator=10) + tpg.plot(self.dews) + self.check_graphic() + + def test_plot_dews_locator_object(self): + locator = tephi.Locator(10) + tpg = Tephigram(isotherm_locator=locator, dry_adiabat_locator=locator) + tpg.plot(self.dews) + self.check_graphic() + + def test_plot_anchor(self): + tpg = Tephigram(anchor=[(1000, 0), (300, 0)]) + tpg.plot(self.dews) + self.check_graphic() + + +class TestTephigramBarbs(tests.GraphicsTest): + def setUp(self): + self.dews = zip(_expected_dews[0], _expected_dews[1]) + temp_data = _expected_temps + self.temps = zip(_expected_temps[0], _expected_temps[1]) + magnitude = np.hstack(([0], np.arange(20) * 5 + 2, [102])) + self.barbs = [(m, 45, 1000 - i*35) for i, m in enumerate(magnitude)] + + def test_rotate(self): + tpg = Tephigram() + profile = tpg.plot(self.temps) + profile.barbs([(0, 0, 900), + (1, 30, 850), + (5, 60, 800), + (10, 90, 750), + (15, 120, 700), + (20, 150, 650), + (25, 180, 600), + (30, 210, 550), + (35, 240, 500), + (40, 270, 450), + (45, 300, 400), + (50, 330, 350), + (55, 360, 300)], zorder=10) + self.check_graphic() + + def test_barbs(self): + tpg = Tephigram() + profile = tpg.plot(self.temps) + profile.barbs(self.barbs, zorder=10) + self.check_graphic() + + def test_barbs_from_file(self): + tpg = Tephigram() + dews = zip(_expected_barbs[0], _expected_barbs[1]) + barbs = zip(_expected_barbs[2], _expected_barbs[3], _expected_barbs[0]) + profile = tpg.plot(dews) + profile.barbs(barbs, zorder=10) + self.check_graphic() + + def test_gutter(self): + tpg = Tephigram() + profile = tpg.plot(self.temps) + profile.barbs(self.barbs, gutter=0.5, zorder=10) + self.check_graphic() + + def test_length(self): + tpg = Tephigram() + profile = tpg.plot(self.temps) + profile.barbs(self.barbs, gutter=0.9, length=10, zorder=10) + self.check_graphic() + + def test_color(self): + tpg = Tephigram() + profile = tpg.plot(self.temps) + profile.barbs(self.barbs, color='green', zorder=10) + self.check_graphic() + + def test_pivot(self): + tpg = Tephigram() + tprofile = tpg.plot(self.temps) + tprofile.barbs(self.barbs, gutter=0.2, pivot='tip', length=8) + dprofile = tpg.plot(self.dews) + dprofile.barbs(self.barbs, gutter=0.3, pivot='middle', length=8) + self.check_graphic() + + +if __name__ == '__main__': + tests.main() diff --git a/faampy/_3rdparty/tephi/transforms.py b/faampy/_3rdparty/tephi/transforms.py new file mode 100644 index 0000000..68ec461 --- /dev/null +++ b/faampy/_3rdparty/tephi/transforms.py @@ -0,0 +1,262 @@ +# (C) British Crown Copyright 2014, Met Office +# +# This file is part of tephi. +# +# Tephi is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Tephi is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with tephi. If not, see . +""" +Tephigram transform support. + +""" + +import matplotlib as mpl +from matplotlib.transforms import Transform +import numpy as np +import types + +from _constants import CONST_K, CONST_KELVIN, CONST_L, CONST_MA, CONST_RV + + +# +# Reference: http://www-nwp/~hadaa/tephigram/tephi_plot.html +# + + +def temperature_theta_to_pressure_temperature(temperature, theta): + """ + Transform temperature and potential temperature into + pressure and temperature. + + Args: + + * temperature: + Temperature in degC. + + * theta: + Potential temperature in degC. + + Returns: + Tuple of pressure, in mb or hPa, and temperature, in degC. + + """ + temperature, theta = np.asarray(temperature), np.asarray(theta) + + # Convert temperature and theta from degC to kelvin. + kelvin = temperature + CONST_KELVIN + theta = theta + CONST_KELVIN + + # Calculate the associated pressure given the temperature and + # potential temperature. + pressure = 1000.0 * np.power(kelvin / theta, 1 / CONST_K) + + return pressure, temperature + + +def pressure_temperature_to_temperature_theta(pressure, temperature): + """ + Transform pressure and temperature into temperature and + potential temperature. + + Args: + + * pressure: + Pressure in mb or hPa. + + * temperature: + Temperature in degC. + + Returns: + Tuple of temperature, in degC, and potential temperature, in degC. + + """ + pressure, temperature = np.asarray(pressure), np.asarray(temperature) + + # Convert temperature from degC to kelvin. + kelvin = temperature + CONST_KELVIN + + # Calculate the potential temperature given the pressure and temperature. + theta = kelvin * ((1000.0 / pressure) ** CONST_K) + + # Convert potential temperature from kelvin to degC. + return temperature, theta - CONST_KELVIN + + +def pressure_theta_to_pressure_temperature(pressure, theta): + """ + Transform pressure and potential temperature into pressure and temperature. + + Args: + + * pressure: + Pressure in mb or hPa. + + * theta: + Potential temperature in degC. + + * Returns: + Tuple of pressure, in mb or hPa, and temperature, in degC. + + """ + pressure, theta = np.asarray(pressure), np.asarray(theta) + + # Convert potential temperature from degC to kelvin. + theta = theta + CONST_KELVIN + + # Calculate the temperature given the pressure and + # potential temperature. + kelvin = theta * (pressure ** CONST_K) / (1000.0 ** CONST_K) + + # Convert temperature from kelvin to degC. + return pressure, kelvin - CONST_KELVIN + + +def temperature_theta_to_xy(temperature, theta): + """ + Transform temperature and potential temperature to native display + coordinates. + + Args: + + * temperature: + Temperature in degC. + + * theta: + Potential temperature in degC. + + Returns: + Native display x and y coordinates. + + """ + temperature, theta = np.asarray(temperature), np.asarray(theta) + + # Convert potential temperature from degC to kelvin. + theta = theta + CONST_KELVIN + theta = np.clip(theta, 1, 1e10) + + phi = np.log(theta) + + x_data = phi * CONST_MA + temperature + y_data = phi * CONST_MA - temperature + + return x_data, y_data + + +def xy_to_temperature_theta(x_data, y_data): + """ + Transform native display coordinates to temperature and + potential temperature. + + Args: + + * x_data: + Native display x-coordinate/s. + + * y_data: + Native display y-coordinate/s. + + Returns: + Temperature, in degC, and potential temperature, in degC. + + """ + x_data, y_data = np.asarray(x_data), np.asarray(y_data) + + phi = (x_data + y_data) / (2 * CONST_MA) + temperature = (x_data - y_data) / 2. + + theta = np.exp(phi) - CONST_KELVIN + + return temperature, theta + + +def pressure_mixing_ratio_to_temperature(pressure, mixing_ratio): + """ + Transform pressure and mixing ratios to temperature. + + Args: + + * pressure: + Pressure in mb in hPa. + + * mixing_ratio: + Dimensionless mixing ratios. + + Returns: + Temperature in degC. + + """ + pressure = np.array(pressure) + + # Calculate the dew-point. + vapp = pressure * (8.0 / 5.0) * (mixing_ratio / 1000.0) + temp = 1.0 / ((1.0 / CONST_KELVIN) - ((CONST_RV / CONST_L) * np.log(vapp / 6.11))) + + return temp - CONST_KELVIN + + +class TephiTransform(Transform): + """ + Tephigram transformation to convert from temperature and + potential temperature to native plotting device coordinates. + + """ + input_dims = 2 + output_dims = 2 + is_separable = False + has_inverse = True + + def transform_non_affine(self, values): + """ + Transform from tephigram temperature and potential temperature + to native plotting device coordinates. + + Args: + + * values: + Values to be transformed, with shape (N, 2). + + """ + return np.concatenate(temperature_theta_to_xy(values[:, 0:1], values[:, 1:2]), axis=1) + + def inverted(self): + """Return the inverse transformation.""" + return TephiTransformInverted() + + +class TephiTransformInverted(Transform): + """ + Tephigram inverse transformation to convert from native + plotting device coordinates to tephigram temperature and + potential temperature. + + """ + input_dims = 2 + output_dims = 2 + is_separable = False + has_inverse = True + + def transform_non_affine(self, values): + """ + Transform from native plotting display coordinates to tephigram + temperature and potential temperature. + + Args: + + * values: + Values to be transformed, with shape (N, 2). + + """ + return np.concatenate(xy_to_temperature_theta(values[:, 0:1], values[:, 1:2]), axis=1) + + def inverted(self): + """Return the inverse transformation.""" + return TephiTransform() diff --git a/faampy/__init__.py b/faampy/__init__.py new file mode 100644 index 0000000..8ec53ad --- /dev/null +++ b/faampy/__init__.py @@ -0,0 +1,81 @@ +""" +faampy main command which calls subcommands + +The usage is for example + faampy nc_to_gpx .... + +""" + + +import argparse +import importlib +import os +import sys + + +__version__ = '0.1.2' +__status__ = 'a' + + +def command_line(): + # list of subcommands and assosciated python script + # this has to be maintained manually and has to be + # checked/updated with every release + + MODULES = [('data_download', 'faampy.utils.data_download'), + ('nimrod_to_nc', 'faampy.data_io.nimrod_to_nc'), + ('ge_ncas_airquality', 'faampy.mapping.ge_ncas_airquality'), + ('ge_nimrod_to_kmz', 'faampy.mapping.ge_nimrod_to_kmz'), + ('ge_photo_album', 'faampy.mapping.ge_photo_album'), + ('ge_was_to_kmz', 'faampy.mapping.ge_was_to_kmz'), + ('nc_to_gpx', 'faampy.mapping.nc_to_gpx'), + ('ge_ncvar_to_kml', 'faampy.mapping.ge_ncvar_to_kml'), + ('world_map', 'faampy.mapping.world_map'), + ('sat_tracker', 'faampy.mapping.sat_tracker'), + ('flight_summary', 'faampy.core.flight_summary'), + ('plt_quicklooks', 'faampy.plotting.quicklooks')] + + parser = argparse.ArgumentParser(description="faampy", + usage="""faampy [] + +%s +""" % (' \n'.join([' '+m[0] for m in MODULES]))) + parser.add_argument('command', help='Subcommand to run') + # parse_args defaults to [1:] for args, but you need to + # exclude the rest of the args too, or validation will fail + args = parser.parse_args(sys.argv[1:2]) + # check if subcommand is available. If not leave + if args.command not in zip(*MODULES)[0]: + sys.stdout.write('Unrecognized command\n') + parser.print_help() + sys.exit(1) + else: + sys.argv = sys.argv[2:] + # use dispatch pattern to invoke method with same name + for MOD in MODULES: + if args.command == MOD[0]: + mod = importlib.import_module(MOD[1]) + mod.main() # the main method is called + +FAAMPY_DATA_PATH = os.path.join(os.environ['HOME'], 'faampy_data') +SRTM_DATA_PATH = os.path.join(FAAMPY_DATA_PATH, 'srtm') + +FAAM_DATA = ['icons/camera_icon_32x32.png', + 'icons/dot_blue_32x32.png', + 'icons/dot_green_32x32.png', + 'icons/dot_orange_32x32.png', + 'icons/dot_red_32x32.png', + 'icons/dropsonde_32x32.png', + 'icons/was_bottle_32x32.png', + 'img/world_osm_54030_6804x3450.png', + 'img/uk_osm_background_2244x2642.png', + 'img/world_osm_54030_27234x13812.png', + 'img/world_osm_54030_4806x2437.png', + 'img/world_osm_54030_13617x6906.png', + 'img/uk_osm_background_3366x3963.png', + 'img/world_osm_54030_9633x4885.png', + 'img/world_osm_54030_3402x1725.png', + 'img/uk_osm_background_9507x11195.png', + 'img/uk_osm_background_6732x7927.png', + 'img/uk_osm_background_19014x22390.png', + 'img/world_osm_54030_1134x575.png', ] diff --git a/faampy/aimms/aimms_process.py b/faampy/aimms/aimms_process.py new file mode 100644 index 0000000..bc70a84 --- /dev/null +++ b/faampy/aimms/aimms_process.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +""" +The processing of the AIMMS data is done in several steps using win32 +scripts that are called from the command line. This python script is only a +wrapper for those steps calling the scripts using wine, which allows the +processing to be done on a linux machine. + +The final step is the conversion into a netCDF, which is currently done by a +idl routine. Therefore an idl installation is also required on the machine, +where the script runs. + +The script is called as: + + python aimms_process.py FID + + +""" + +import os +import re +import shutil +import sys +import tempfile + +from file_info import * +from file_list import * + +# AIMMS_ROOT_PATH = '/mnt/faamarchive/Data/cloudphysics' +# CORE_ROOT_PATH = '/mnt/faamarchive/badcMirror/' + +AIMMS_ROOT_PATH = '/home/tardis/FAAMarchive/Data/cloudphysics' +CORE_ROOT_PATH = '/home/tardis/FAAMarchive/badcMirror/' + +TEMPPATH = tempfile.mkdtemp() +OUTPATH = '/home/tardis/' +CALFILE = 'AIMMScalB884.dat' + +_VERSION_ = 'v003' + + +def do_checks(): + # checking if wine and idl are installed + return True + + +def get_aimms_raw_files(AIMMS_ROOT_PATH, fid): + """The AIMMS probe produces severl raw data files for + a single flight. Those files needs to be sticked together. + """ + pattern = '.*%s.*odms.*\d{8}.*r\d{2}' % (fid,) + file_list = [] + for root, subFolders, files in os.walk(AIMMS_ROOT_PATH): + for f in files: + filename = os.path.join(root, f) + if re.match(pattern, filename.lower()): + file_list.append(filename) + return file_list + + +def get_core_file(CORE_ROOT_PATH, fid): + fl = File_List(CORE_ROOT_PATH) + fl.filter_by_data_type('core-lowres') + fl.filter_latest_revision() + for f in fl: + if f.fid == fid: + return os.path.join(os.path.join(f.path, f.filename)) + + +def process(fid, revision, outpath): + dir_path = os.path.dirname(os.path.realpath(__file__)) + + # get the flight number + # fid = sys.argv[1] + + aimms_raw_files=get_aimms_raw_files(AIMMS_ROOT_PATH, fid) + core_file = get_core_file(CORE_ROOT_PATH, fid) + + sys.stdout.write('AIMMS ODMS Files:\n%s\n' % ' \n'.join(['%s' % f for f in aimms_raw_files])) + sys.stdout.write('FAAM Core: %s\n' % core_file) + sys.stdout.write('Temp dir: %s\n' % TEMPPATH) + + date_string = os.path.basename(core_file).split('_')[2] + + # copy all the files that are needed for processing + # into a temporary directory + for f in aimms_raw_files: + shutil.copy(f, os.path.join(TEMPPATH, os.path.basename(f))) + shutil.copy(core_file, os.path.join(TEMPPATH, os.path.basename(core_file))) + + # all the files that are needed for the processing will be copied to the TEMPPATH to + # keep things simple + for f in ['asmbl.exe', + 'canextr4.exe', + 'ekf556.exe', + 'convert_aimms_netcdf.pro', + 'findcorefile.pro', + 'cal_files/'+CALFILE]: + shutil.copy(os.path.join(dir_path, f), os.path.join(TEMPPATH, f)) + + # move into the temporary directory + os.chdir(TEMPPATH) + + # get the identifier from the raw files + idents = list(set([os.path.splitext(os.path.basename(f))[0] for f in aimms_raw_files])) + + for i in idents: + # stick the odms files together + cmd = "wine asmbl.exe %s 1" % (i,) + os.system(cmd) + #cmd = "wine canextr4.exe %s.raw" + aimms_out_file = '%s_%s_aimms.out' % (fid, date_string) + cmd = "wine ekf556.exe %s %s.raw -c on -o %s" % (CALFILE, i, aimms_out_file) + os.system(cmd) + aimms_ncfile = os.path.join(outpath, 'metoffice-aimms_faam_%s_%s_r%i_%s.nc' % (date_string, _VERSION_, revision, fid)) + # call the idl script from the command line + cmd = """idl -e "convert_aimms_netcdf, '%s/', '%s', '%s', '%s'" """ % (os.path.join(TEMPPATH), os.path.join(TEMPPATH, os.path.basename(core_file)), aimms_ncfile, fid) + os.system(cmd) + + +def main(): + import argparse + from argparse import RawTextHelpFormatter + + parser = argparse.ArgumentParser(description=__doc__, + version=__version__, + epilog="Report bugs to %s." % __email__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('fid', action="store", type=str, help='Flight Number') + parser.add_argument('-r', '--revision', action="store", type=int, + required=False, default=0, + help='revision nmuber of the AIMMS netcdf.') + parser.add_argument('-o', '--outpath', action="store", type=str, required=False, + default=os.environ['HOME'], + help='Outpath for the netCDF Default: $HOME.') + parser.add_argument('-k', '--keep-folder', action="store_true", required=False, default=False, + help='If option is set the temporary directory will *not* be deleted. Default: False') + args=parser.parse_args() + + process(args.fid, args.revision, args.outpath) + # clean_up behind ourselves + if not args.keep_folder: + sys.stdout.write('Deleting temporary folder %s ... \n' % (TEMPPATH)) + shutil.rmtree(TEMPPATH) + + +if __name__ == '__main__': + main() diff --git a/faampy/aimms/asmbl.exe b/faampy/aimms/asmbl.exe new file mode 100644 index 0000000..e2b37b7 Binary files /dev/null and b/faampy/aimms/asmbl.exe differ diff --git a/faampy/aimms/cal_files/AIMMScalB863.dat b/faampy/aimms/cal_files/AIMMScalB863.dat new file mode 100644 index 0000000..d603ac4 --- /dev/null +++ b/faampy/aimms/cal_files/AIMMScalB863.dat @@ -0,0 +1,33 @@ +-1.60 +-2.60 ++0.05 +-2.45 +-0.95 +-1.75 +-1.15 ++0.85 ++0.00 ++0.00 ++0.00 ++0.00 ++0.00 ++0.00 +3.953 +-8.275 +-1.29 +2 ++0.18179 +-0.18999 +-0.09484 ++0.00000 ++0.00000 +-4.542 ++2.224 ++15.080 +-4.705 ++8.555 ++2.015 +0.70 ++0.98 ++2.08 +-0.41 \ No newline at end of file diff --git a/faampy/aimms/cal_files/AIMMScalB884.dat b/faampy/aimms/cal_files/AIMMScalB884.dat new file mode 100644 index 0000000..224faaa --- /dev/null +++ b/faampy/aimms/cal_files/AIMMScalB884.dat @@ -0,0 +1,33 @@ +-1.70 +-1.55 +-0.20 +-0.60 +-1.10 +-0.90 +-1.10 ++0.65 ++0.00 ++0.00 ++0.00 ++0.00 ++0.00 ++0.00 +3.953 +-8.275 +-1.29 +2 ++0.18188 +-0.22580 +-0.09083 ++0.00000 ++0.00000 +-3.988 ++0.096 ++15.682 +-4.446 ++8.445 ++1.064 +0.70 ++0.98 ++2.08 ++0.41 \ No newline at end of file diff --git a/faampy/aimms/canextr4.exe b/faampy/aimms/canextr4.exe new file mode 100644 index 0000000..635bf10 Binary files /dev/null and b/faampy/aimms/canextr4.exe differ diff --git a/faampy/aimms/convert_aimms_netcdf.pro b/faampy/aimms/convert_aimms_netcdf.pro new file mode 100644 index 0000000..665749c --- /dev/null +++ b/faampy/aimms/convert_aimms_netcdf.pro @@ -0,0 +1,619 @@ +PRO convert_aimms_netcdf, basedir, corefile, ofile, fnum, $ + time,atime,btime,ctime,dtime, $ + average1hz=make_average, $ + release_data=datarelease +; +; PROCEDURE: CONVERT_AIMMS_NETCDF +; +; PURPOSE: To read post-processed data from an AIMMS ascii file and output to NetCDF format. +; Assumes that the ascii file has 20 data values per second. Time array is recreated +; to overcome apparent AIMMS clock-rate error. +; +; ARGUMENTS: BASEDIR - directory in which AIMMS ascii file is located and NetCDF written, assumed +; to point to a project directory such as '/project/obr/COPE' +; FNUM - the flight number, bnnn +; AVERAGE1HZ = MAKE_AVERAGE - If set and non-zero, output dataset is 1Hz average +; RELEASE_DATA = DATARELEASE - If set, specifies an alternative data release number (default = 0) +; +; DATE: 12/11/2013 +; AUTHOR: Steve Abel, with modifications by Phil Brown +; +; VERSION: v001 - 06/12/2013 Initial version set up to idenify and process all AIMMS 20Hz ascii files found +; in the target directory. Options to match output time array to Core data file and to +; generate 1Hz average output. Contains a fix to identify and correct time jumps in the input +; ascii data. Ascii file post-processing using "ekf553_oemv.exe". +; v002 - 05/02/2014 Update to NetCDF attributes: +; - additional GLOBAL attributes specified for similarity with FAAM Core files, +; - additional CF-1.6 compliant standard_name attributes supplied for variables, where possible +; - applies adjustment to AIMMS date/time where Core data starts before midnight on the previous day +; v003 - 18/02/2014 Further updates to NetCDF attributes: +; - units for dimensionless variables = 1 +; - standard names of latitude and longitude parameters +; - time units +; - status variable excluded +; - correct long_name now given for AOSS and AOA +; - 28/02/2014 FILE_SEARCH replaces FINDFILE (line 50) +; - 07/03/2014 minor correction to standard_name attribute for latitude +; - 21/05/2014 minor change to dealing with skip/jump times - jumps preceding the first skip are ignored +; - 23/05/2014 fudge skip times for B800 and b807 +; - 04/12/2014 Test for existence of Core and Core1hz files before trying to open them. Allows processing +; when only the Core1hz is available +; - 03/08/2015 Changes to allow runng in garden-variety IDL (using IDL standard functions rather +; than Met Office specific ones. +; +on_error, 0 + +; COMMON to hold NetCDF file and variable id's +common varids, id, LTIME, LCAL, cp0_id,cpa_id,cpb_id,B0_id,Ba_id,Bb_id,A0_id,Aa_id,Ab_id, $ + time_id,tk_id,rh_id,p_id,u_id,v_id,w_id,lat_id,lon_id,alt_id,vn_id,ve_id,vz_id, $ + roll_id,ptch_id,hdg_id,tas_id,aoss_id,dpaoa_id,dpaoss_id,aoa_id + +version = 'v003' +if not(keyword_set(datarelease)) then datarelease=0 +rstring = 'r'+STRTRIM(STRING(datarelease),1) + +;fdir = STRUPCASE(fnum)+'/' ; find ascii input files under base directory +;daq_file = basedir+'/AIMMS/'+fdir+'*'+fnum+'*.out' +daq_file = basedir+fnum+'*.out' +daq_files = file_search(daq_file) +;daq_file = aimmsfile +nfiles = n_elements(daq_files) +;if daq_files(0) eq '' then begin +; print,'Input data not found: ', daq_file +; return +;ndif +;print,nfiles,' data files for reading.',daq_files + +; identify Core file and read time data from it. Time data is the same in Core and Core1hz files. If you don't find the Core file +; then look for the Core1hz. If you still don't find that, then exit. +; +;corefile = findcorefile(basedir,fnum,core1hz) +;print, '***', corefile +;if strlen(corefile) gt 0 then begin +cid = NCDF_OPEN(corefile,/NOWRITE) +;endif else begin +; if strlen(core1hz) gt 0 then begin +; cid = NCDF_OPEN(core1hz) +; endif else begin +; print,'No core files could be found. Exiting.....' +; return +; endelse +;endelse + +NCDF_VARGET,cid,'Time',core_time ; Core data time in seconds after midnight +NCDF_ATTGET,cid,'Time','units',coretimeunits ; units of Core data time (specifies which day) +print,string(coretimeunits) +print,'Range of core time = ',gmt(core_time(0)),gmt(core_time(n_elements(core_time)-1)) +; +cyear = fix(string(coretimeunits(14:17))) ; year, month, day of Core data +cmonth= fix(string(coretimeunits(19:20))) +cday = fix(string(coretimeunits(22:23))) + +start_time = core_time(0) +if not(keyword_set(make_average)) then begin ; create 20Hz time array to span range of Core time + core_time = findgen(n_elements(core_time)*20)*0.05 + start_time +endif + +; ********************************************************************************************************** +; start of loop over the number of ascii input file +; ********************************************************************************************************** + +FOR jfile=0,nfiles-1 DO BEGIN +print,'Reading file ',jfile + +; first read aerodynamic calibration coefficients from top of ascii file +cal_coeff = READ_ASCII(daq_file,DATA_START=1,NUM_RECORDS=1) +Cp_0 = cal_coeff.field01(0) +Cp_alpha = cal_coeff.field01(1) +Cp_beta = cal_coeff.field01(2) +B_0 = cal_coeff.field01(5) +B_alpha = cal_coeff.field01(6) +B_beta = cal_coeff.field01(7) +A_0 = cal_coeff.field01(8) +A_alpha = cal_coeff.field01(9) +A_beta = cal_coeff.field01(10) + +; now read data from the ascii file + +DATA = READ_ASCII(DAQ_FILES(JFILE),COUNT=NT,DATA_START=2,HEADER=HEADER) +Time = REFORM(data.FIELD01(0,*)) +Tc = REFORM(data.FIELD01(1,*)) +RH = REFORM(data.FIELD01(2,*)) +Pres = REFORM(data.FIELD01(3,*)) +V = REFORM(data.FIELD01(4,*)) +U = REFORM(data.FIELD01(5,*)) +Lat = REFORM(data.FIELD01(6,*)) +Lon = REFORM(data.FIELD01(7,*)) +alt = REFORM(data.FIELD01(8,*)) +Vn = REFORM(data.FIELD01(9,*)) +Ve = REFORM(data.FIELD01(10,*)) +Vz = REFORM(data.FIELD01(11,*)) +Roll = REFORM(data.FIELD01(12,*)) +Pitch= REFORM(data.FIELD01(13,*)) +Hdg = REFORM(data.FIELD01(14,*)) +Tas = REFORM(data.FIELD01(15,*)) +W = REFORM(data.FIELD01(16,*)) +Aoss = REFORM(data.FIELD01(17,*)) +Dpaoa= REFORM(data.FIELD01(18,*)) +Dpaoss= REFORM(data.FIELD01(19,*)) +; Status= REFORM(data.FIELD01(20,*)) + +Time = Time*3600. ; seconds after midnight +print,'Raw time converted to seconds.' + +; ********************************************************************************************************** +; First, detect any time data where the clock has wrapped around midnight. In this case, times will be less than +; the initial time in the data so add 24 hours to them. +initial_time = Time(0) +next_day = where(Time lt initial_time) +if next_day(0) ne -1 then begin + Time(next_day) = Time(next_day) + 86400. + print,'Time adjustment made where AIMMS clock has crossed midnight.' +endif + +; ********************************************************************************************************** +; now search for periods when time skips due to purge event. These events take the form of about 1 second's worth +; of data points that skip back in time by about 0.5 sec. At the end of this event, the time jumps ahead by about +; the same amount to resume the expected sequence. + +stime = time +index = lindgen(n_elements(time)) + +if (fnum eq 'b807') then begin ; fudge time skips for b807 only + time(231691:231700) = time(231691:231700) + 0.45 + time(231701:231710) = time(231701:231710) + 0.90 + time(231711:231719) = time(231711:231719) + 1.35 +endif + +skip = where(time(index)-time(index-1) lt 0.0) ; first element where time has skipped back +jump = where(time(index+1)-time(index) gt 0.06) ; last element before time skips forward again +help,skip,jump + +nfirst = min(where(jump ge skip(0))) +njump = n_elements(jump) +jump = jump(nfirst:njump-1) ; trims any jumps before the first skip + +; if (jump(0) lt skip(0) and n_elements(jump) gt n_elements(skip)) then begin +; njump = n_elements(jump) +; jump = jump(1:njump-1) +; help,skip,jump +; endif +; +; Here are some ad-hoc adjustments to cope with individual flights +; +if (fnum eq 'b765') then skip = skip([0,2,3,4]) +if (fnum eq 'b800') then skip = skip(0:6) +;if (fnum eq 'b882') then begin +; skip = skip(0:2) +; jump = [jump(0:1),jump(3:(n_elements(jump)-1))] +;endif +if (fnum eq 'b884') then begin + jump = [jump(0),jump(2:(n_elements(jump)-1))] +endif + +if skip(0) gt -1 then begin + print,'Skip times: ',gmt(time(skip)) + print,'Jump times: ',gmt(time(jump)) + nevent = n_elements(skip) ; count of number of events + for j=0,nevent-1 do begin + deltat = time(skip(j)-1) - time(skip(j)) +0.05 ; amount to shift times forward + stime(skip(j):jump(j)) = stime(skip(j):jump(j)) + deltat ; move this group of times forward + endfor +endif + +; ********************************************************************************************************** +; now re-create a 20Hz time array starting at the same time and find the start of the first FULL second + +atime = stime ; atime holds original time values read from data corrected for jumps + +btime = round(atime*100) ; using ROUND for IDL compatibility in place of NINT +ctime = round(atime)*100 +diff = btime - ctime +nsec = max(btime)/100 - min(btime)/100 - 2 ; remove the first and last second that has data because + ; it will generally not be full +help,nsec +nvals20 = long(nsec*20) ; the number of 20Hz values in the full seconds +help,nvals20 +nstart = min(where(diff eq 0)) ; start index of the first full second of data +help,nstart + +dtime = dindgen(nvals20)*0.05+double(btime(nstart))/100. ; regular 20Hz time array spanning same interval +help,dtime + +; ********************************************************************************************************** +; now spline interpolate data onto the regular time array +print,'Interpolate data onto regular time array.' + +help,atime,tc +Tc = spline(atime, tc, dtime, 1.0) +help,dtime,tc +RH = spline(atime, rh, dtime, 1.0) +Pres = spline(atime, pres, dtime, 1.0) +V = spline(atime, v, dtime, 1.0) +U = spline(atime, u, dtime, 1.0) +W = spline(atime, w, dtime, 1.0) +Lat = spline(atime, lat, dtime, 1.0) +Lon = spline(atime, lon, dtime, 1.0) +alt = spline(atime, alt, dtime, 1.0) +Vn = spline(atime, vn, dtime, 1.0) +Ve = spline(atime, ve, dtime, 1.0) +Vz = spline(atime, vz, dtime, 1.0) +Roll = spline(atime, roll, dtime, 1.0) +Pitch= spline(atime, pitch, dtime, 1.0) +Hdg = spline(atime, hdg, dtime, 1.0) +Tas = spline(atime, tas, dtime, 1.0) +Aoss = spline(atime, aoss, dtime, 1.0) +Dpaoa= spline(atime, dpaoa, dtime, 1.0) +Dpaoss= spline(atime, dpaoss, dtime, 1.0) +;Status= spline(atime, status, dtime, 1.0) + +print,'Interpolation complete.' +; tplot,dtime,lat,psym=1 + +; ********************************************************************************************************** +; create additional data arrays, to be calculated from input data + +Aoa = -1.0*(A_0 +(A_alpha*dpaoa) +(A_beta*dpaoss)) ; calculate AOA from existing variables + +; ********************************************************************************************************** +; if output is to be 1hz averages then first create these +if keyword_set(make_average) then begin + dtime = REFORM(dtime,20,nsec) + dtime = REFORM(dtime(0,*)) + print,dtime(0) + Tc = MEAN(REFORM(tc,20,nsec),DIMENSION=1,/NAN) + RH = MEAN(REFORM(rh,20,nsec),DIMENSION=1,/NAN) + Pres = MEAN(REFORM(pres,20,nsec),DIMENSION=1,/NAN) + V = MEAN(REFORM(v,20,nsec),DIMENSION=1,/NAN) + U = MEAN(REFORM(u,20,nsec),DIMENSION=1,/NAN) + W = MEAN(REFORM(w,20,nsec),DIMENSION=1,/NAN) + Lat = MEAN(REFORM(lat,20,nsec),DIMENSION=1,/NAN) + Lon = MEAN(REFORM(lon,20,nsec),DIMENSION=1,/NAN) + alt = MEAN(REFORM(alt,20,nsec),DIMENSION=1,/NAN) + Vn = MEAN(REFORM(vn,20,nsec),DIMENSION=1,/NAN) + Ve = MEAN(REFORM(ve,20,nsec),DIMENSION=1,/NAN) + Vz = MEAN(REFORM(vz,20,nsec),DIMENSION=1,/NAN) + Roll = MEAN(REFORM(roll,20,nsec),DIMENSION=1,/NAN) + Pitch= MEAN(REFORM(pitch,20,nsec),DIMENSION=1,/NAN) + Hdg = MEAN(REFORM(hdg,20,nsec),DIMENSION=1,/NAN) + Tas = MEAN(REFORM(tas,20,nsec),DIMENSION=1,/NAN) + Aoss = MEAN(REFORM(aoss,20,nsec),DIMENSION=1,/NAN) + Aoa = MEAN(REFORM(aoa,20,nsec),DIMENSION=1,/NAN) + Dpaoa= MEAN(REFORM(dpaoa,20,nsec),DIMENSION=1,/NAN) + Dpaoss= MEAN(REFORM(dpaoss,20,nsec),DIMENSION=1,/NAN) +; Status= MEAN(REFORM(status,20,nsec),DIMENSION=1,/NAN) + help,dtime,tc +endif + +start_time = dtime(0) +date = READ_ASCII(daq_files(jfile),DATA_START=0,NUM_RECORDS=1) ; read date information from ascii file +day = FLOOR(date.field1(0)) +month = FLOOR(date.field1(1)) +year = FLOOR(date.field1(2)) + +; ********************************************************************************************************** +; Test AIMMS date information against Core. If Core data started on the previous day, then adjust accordingly. +; The only plausible circumstance is that Core data recording commence before midnight and AIMMS data after midnight, +; so set AIMMS date information equal to Core and increment AIMMS time by 24*60*60 = 86400. +; ********************************************************************************************************** + +if (day ne cday) or (month ne cmonth) or (year ne cyear) then begin + day = cday + month = cmonth + year = cyear + start_time = start_time + 86400. + print,'AIMMS date and start time re-alligned with Core.' +endif + +print,'Start time =',start_time + +; ********************************************************************************************************** +; if this is the first ascii file, read date information, create NetCDF file name, open it and write other required +; creation information - +; ********************************************************************************************************** + +if jfile eq 0 then begin + date_str = '00000000' ; create date string for NC file - yyyymmdd + STRPUT,date_str,STRTRIM(year,1),0 + if month lt 10 then pos=5 else pos=4 + STRPUT,date_str,STRTRIM(month,1),pos + if day lt 10 then pos=7 else pos=6 + STRPUT,date_str,STRTRIM(day,1),pos + +; create NC filename + ;nc_file = basedir+'/AIMMS/'+fdir+'metoffice-aimms_faam_'+date_str+'_'+version+'_'+rstring+'_'+fnum + ;if keyword_set(make_average) then nc_file=nc_file+'_1hz' + ;nc_file = nc_file+'.nc' + nc_file = ofile + print,'Input: ', daq_files(jfile) + Print,'Output: ', nc_file + +; open the NetCDF output file and create global attributes and variable information +; id=NCDF_CREATE(nc_file,/CLOBBER) + + id=NCDF_CREATE(ofile, /CLOBBER) + s_start = strtrim(string(gmt(dtime(0))),2) + if strlen(s_start) lt 6 then s_start='0'+s_start + s_end = strtrim(string(gmt(max(dtime))),2) + if strlen(s_end) lt 6 then s_end='0'+s_end + + NCDF_ATTPUT,id,/GLOBAL,'title','Standard AIMMS-20 data from '+fnum+' on '+date_str + NCDF_ATTPUT,id,/GLOBAL,'AIMMS_files',daq_file + NCDF_ATTPUT,id,/GLOBAL,'Date',date_str + NCDF_ATTPUT,id,/GLOBAL,'TimeInterval',s_start+'-'+s_end + NCDF_ATTPUT,id,/GLOBAL,'Conventions','CF-1.6' + NCDF_ATTPUT,id,/GLOBAL,'INSTITUTION','FAAM' + NCDF_ATTPUT,id,/GLOBAL,'SOURCE','FAAM BAe146 aircraft data' + NCDF_ATTPUT,id,/GLOBAL,'REFERENCES','http://www.faam.ac.uk' + ; + descriptor = 'Post-processed 20Hz AIMMS data converted to NetCDF' + if keyword_set(make_average) then descriptor=descriptor+' and 1hz averages' + NCDF_ATTPUT,id,/GLOBAL,'Description',descriptor + NCDF_ATTPUT,id,/GLOBAL,'Post_processor_executable','ekf556' + NCDF_ATTPUT,id,/GLOBAL,'Software_version',version + +; ********************************************************************************************************** +; Define output variables and attributes +; ********************************************************************************************************** + + LTIME = NCDF_DIMDEF(id,'TIME',/UNLIMITED) ; TIME dimension created unlimited so as to be extensible + LCAL = NCDF_DIMDEF(id,'CONST',1) + + cp0_id = NCDF_VARDEF(id,'CP_0',[LCAL],/FLOAT) + NCDF_ATTPUT,id,cp0_id,'units','1' + NCDF_ATTPUT,id,cp0_id,'long_name','CP_0 calibration coefficient' + + cpa_id = NCDF_VARDEF(id,'CP_alpha',[LCAL],/FLOAT) + NCDF_ATTPUT,id,cpa_id,'units','1' + NCDF_ATTPUT,id,cpa_id,'long_name','CP_alpha calibration coefficient' + + cpb_id = NCDF_VARDEF(id,'CP_beta',[LCAL],/FLOAT) + NCDF_ATTPUT,id,cpb_id,'units','1' + NCDF_ATTPUT,id,cpb_id,'long_name','CP_beta calibration coefficient' + + B0_id = NCDF_VARDEF(id,'B_0',[LCAL],/FLOAT) + NCDF_ATTPUT,id,B0_id,'units','degree' + NCDF_ATTPUT,id,B0_id,'long_name','B_0 calibration coefficient' + + Ba_id = NCDF_VARDEF(id,'B_alpha',[LCAL],/FLOAT) + NCDF_ATTPUT,id,Ba_id,'units','degree' + NCDF_ATTPUT,id,Ba_id,'long_name','B_alpha calibration coefficient' + + Bb_id = NCDF_VARDEF(id,'B_beta',[LCAL],/FLOAT) + NCDF_ATTPUT,id,Bb_id,'units','degree' + NCDF_ATTPUT,id,Bb_id,'long_name','B_beta calibration coefficient' + + A0_id = NCDF_VARDEF(id,'A_0',[LCAL],/FLOAT) + NCDF_ATTPUT,id,A0_id,'units','degree' + NCDF_ATTPUT,id,A0_id,'long_name','A_0 calibration coefficient' + + Aa_id = NCDF_VARDEF(id,'A_alpha',[LCAL],/FLOAT) + NCDF_ATTPUT,id,Aa_id,'units','degree' + NCDF_ATTPUT,id,Aa_id,'long_name','A_alpha calibration coefficient' + + Ab_id = NCDF_VARDEF(id,'A_beta',[LCAL],/FLOAT) + NCDF_ATTPUT,id,Ab_id,'units','degree' + NCDF_ATTPUT,id,Ab_id,'long_name','A_beta calibration coefficient' + +; ********************************************************************************************************** + + time_units = 'seconds since '+strmid(date_str,0,4)+'-'+strmid(date_str,4,2)+'-'+strmid(date_str,6,2)+' 00:00:00 +0000' + time_id = NCDF_VARDEF(id,'TIME',[LTIME],/FLOAT) + NCDF_ATTPUT,id,time_id,'units',time_units + NCDF_ATTPUT,id,time_id,'long_name','time of measurement' + NCDF_ATTPUT,id,time_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,time_id,'standard_name','time' + + tk_id = NCDF_VARDEF(id,'TK',[LTIME],/FLOAT) + NCDF_ATTPUT,id,tk_id,'units','K' + NCDF_ATTPUT,id,tk_id,'long_name','AIMMS true air temperature' + NCDF_ATTPUT,id,tk_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,tk_id,'standard_name','air_temperature' + + rh_id = NCDF_VARDEF(id,'RH',[LTIME],/FLOAT) + NCDF_ATTPUT,id,rh_id,'units','percent' + NCDF_ATTPUT,id,rh_id,'long_name','AIMMS Relative humidity wrt water' + NCDF_ATTPUT,id,rh_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,rh_id,'standard_name','relative_humidity' + + p_id = NCDF_VARDEF(id,'PRES',[LTIME],/FLOAT) + NCDF_ATTPUT,id,p_id,'units','hPa' + NCDF_ATTPUT,id,p_id,'long_name','AIMMS Static pressure' + NCDF_ATTPUT,id,p_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,p_id,'standard_name','air_pressure' + + u_id = NCDF_VARDEF(id,'U',[LTIME],/FLOAT) + NCDF_ATTPUT,id,u_id,'units','m s-1' + NCDF_ATTPUT,id,u_id,'long_name','AIMMS eastwards wind component' + NCDF_ATTPUT,id,u_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,u_id,'standard_name','eastward_wind' + + v_id = NCDF_VARDEF(id,'V',[LTIME],/FLOAT) + NCDF_ATTPUT,id,v_id,'units','m s-1' + NCDF_ATTPUT,id,v_id,'long_name','AIMMS northwards wind component' + NCDF_ATTPUT,id,v_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,v_id,'standard_name','northward_wind' + + w_id = NCDF_VARDEF(id,'W',[LTIME],/FLOAT) + NCDF_ATTPUT,id,w_id,'units','m s-1' + NCDF_ATTPUT,id,w_id,'long_name','AIMMS vertical wind component' + NCDF_ATTPUT,id,w_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,w_id,'standard_name','upward_air_velocity' + + lat_id = NCDF_VARDEF(id,'LAT',[LTIME],/FLOAT) + NCDF_ATTPUT,id,lat_id,'units','degree_north' + NCDF_ATTPUT,id,lat_id,'long_name','AIMMS GPS latitude' + NCDF_ATTPUT,id,lat_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,lat_id,'standard_name','latitude' + + lon_id = NCDF_VARDEF(id,'LON',[LTIME],/FLOAT) + NCDF_ATTPUT,id,lon_id,'units','degree_east' + NCDF_ATTPUT,id,lon_id,'long_name','AIMMS GPS longitude' + NCDF_ATTPUT,id,lon_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,lon_id,'standard_name','longitude' + + alt_id = NCDF_VARDEF(id,'ALT',[LTIME],/FLOAT) + NCDF_ATTPUT,id,alt_id,'units','m' + NCDF_ATTPUT,id,alt_id,'long_name','AIMMS GPS altitude' + NCDF_ATTPUT,id,alt_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,alt_id,'standard_name','altitude' + + vn_id = NCDF_VARDEF(id,'VN',[LTIME],/FLOAT) + NCDF_ATTPUT,id,vn_id,'units','m s-1' + NCDF_ATTPUT,id,vn_id,'long_name','AIMMS Northwards ground speed' + NCDF_ATTPUT,id,vn_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,vn_id,'standard_name','platform_speed_wrt_ground' + + + ve_id = NCDF_VARDEF(id,'VE',[LTIME],/FLOAT); now read data from the ascii file + NCDF_ATTPUT,id,ve_id,'units','m s-1' + NCDF_ATTPUT,id,ve_id,'long_name','AIMMS Eastwards ground speed' + NCDF_ATTPUT,id,ve_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,ve_id,'standard_name','platform_speed_wrt_ground' + + vz_id = NCDF_VARDEF(id,'VZ',[LTIME],/FLOAT) + NCDF_ATTPUT,id,vz_id,'units','m s-1' + NCDF_ATTPUT,id,vz_id,'long_name','AIMMS vertical speed' + NCDF_ATTPUT,id,vz_id,'_FillValue',-9999.0 +; no standard name + + roll_id = NCDF_VARDEF(id,'ROLL',[LTIME],/FLOAT) + NCDF_ATTPUT,id,roll_id,'units','degree' + NCDF_ATTPUT,id,roll_id,'long_name','AIMMS roll angle' + NCDF_ATTPUT,id,roll_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,roll_id,'standard_name','platform_roll_angle' + + ptch_id = NCDF_VARDEF(id,'PITCH',[LTIME],/FLOAT) + NCDF_ATTPUT,id,ptch_id,'units','degree' + NCDF_ATTPUT,id,ptch_id,'long_name','AIMMS pitch angle' + NCDF_ATTPUT,id,ptch_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,ptch_id,'standard_name','platform_pitch_angle' + + hdg_id = NCDF_VARDEF(id,'HDG',[LTIME],/FLOAT) + NCDF_ATTPUT,id,hdg_id,'units','degree' + NCDF_ATTPUT,id,hdg_id,'long_name','AIMMS Heading angle' + NCDF_ATTPUT,id,hdg_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,hdg_id,'standard_name','platform_yaw_angle' + + tas_id = NCDF_VARDEF(id,'TAS',[LTIME],/FLOAT) + NCDF_ATTPUT,id,tas_id,'units','m s-1' + NCDF_ATTPUT,id,tas_id,'long_name','AIMMS True air speed' + NCDF_ATTPUT,id,tas_id,'_FillValue',-9999.0 + NCDF_ATTPUT,id,tas_id,'standard_name','platform_speed_wrt_air' + + aoss_id = NCDF_VARDEF(id,'AOSS',[LTIME],/FLOAT) + NCDF_ATTPUT,id,aoss_id,'units','degree' + NCDF_ATTPUT,id,aoss_id,'long_name','AIMMS angle of sideslip (positive, flow from left)' + NCDF_ATTPUT,id,aoss_id,'_FillValue',-9999.0 +; no standard name + + dpaoa_id = NCDF_VARDEF(id,'DPAOA',[LTIME],/FLOAT) + NCDF_ATTPUT,id,dpaoa_id,'units','1' + NCDF_ATTPUT,id,dpaoa_id,'long_name','AIMMS non-dimensional angle of attack differential pressure' + NCDF_ATTPUT,id,dpaoa_id,'_FillValue',-9999.0 +; no standard name + + dpaoss_id = NCDF_VARDEF(id,'DPAOSS',[LTIME],/FLOAT) + NCDF_ATTPUT,id,dpaoss_id,'units','1' + NCDF_ATTPUT,id,dpaoss_id,'long_name','AIMMS non-dimensional angle of sideslip differential pressure' + NCDF_ATTPUT,id,dpaoss_id,'_FillValue',-9999.0 +; no standard name + +; status_id = NCDF_VARDEF(id,'STATUS',[LTIME],/FLOAT) +; NCDF_ATTPUT,id,status_id,'units',' ' +; NCDF_ATTPUT,id,status_id,'long_name','AIMMS Status flag (0 - solution invalid)' +; NCDF_ATTPUT,id,status_id,'_FillValue',-9999.0 +; no standard name + +; additional variables + aoa_id = NCDF_VARDEF(id,'AOA',[LTIME],/FLOAT) + NCDF_ATTPUT,id,aoa_id,'units','degree' + NCDF_ATTPUT,id,aoa_id,'long_name','AIMMS angle of attack (positive, flow from below aircraft)' + NCDF_ATTPUT,id,aoa_id,'_FillValue',-9999.0 +; no standard name + + NCDF_CONTROL,id,/ENDEF ; end of file definition stage + print,'NetCDF file definition completed.' + +; first file so write the aerodynamic calibration coeffs to output + + NCDF_VARPUT,id,cp0_id,cp_0 + NCDF_VARPUT,id,cpa_id,cp_alpha + NCDF_VARPUT,id,cpb_id,cp_beta + NCDF_VARPUT,id,A0_id,A_0 + NCDF_VARPUT,id,Aa_id,A_alpha + NCDF_VARPUT,id,Ab_id,A_beta + NCDF_VARPUT,id,B0_id,B_0 + NCDF_VARPUT,id,Ba_id,B_alpha + NCDF_VARPUT,id,Bb_id,B_beta + print,'NetCDF aerodynamic constants written.' + +; First file, so write out the entire Core time array to the time variable and then pad all variables with NaN + NCDF_VARPUT,id,time_id,core_time,OFFSET=0L + pad_value = -9999.0 + padval = fltarr(n_elements(core_time))+pad_value ; set array of fill-in values + + NCDF_VARPUT, id, tk_id, padval, OFFSET=0L + NCDF_VARPUT, id, p_id, padval, OFFSET=0L + NCDF_VARPUT, id, lat_id, padval, OFFSET=0L + NCDF_VARPUT, id, lon_id, padval, OFFSET=0L + NCDF_VARPUT, id, rh_id, padval, OFFSET=0L + NCDF_VARPUT, id, u_id, padval, OFFSET=0L + NCDF_VARPUT, id, v_id, padval, OFFSET=0L + NCDF_VARPUT, id, w_id, padval, OFFSET=0L + NCDF_VARPUT, id, alt_id, padval, OFFSET=0L + NCDF_VARPUT, id, vn_id, padval, OFFSET=0L + NCDF_VARPUT, id, ve_id, padval, OFFSET=0L + NCDF_VARPUT, id, vz_id, padval, OFFSET=0L + NCDF_VARPUT, id, roll_id, padval, OFFSET=0L + NCDF_VARPUT, id, ptch_id, padval, OFFSET=0L + NCDF_VARPUT, id, hdg_id, padval, OFFSET=0L + NCDF_VARPUT, id, tas_id, padval, OFFSET=0L + NCDF_VARPUT, id, aoss_id, padval, OFFSET=0L + NCDF_VARPUT, id, aoa_id, padval, OFFSET=0L + NCDF_VARPUT, id, dpaoa_id, padval, OFFSET=0L + NCDF_VARPUT, id, dpaoss_id, padval, OFFSET=0L +; NCDF_VARPUT, id, status_id, padval, OFFSET=0L + +endif ; end of items required when processing first file + +; output only the data for full seconds and on adjusted time +; if not(keyword_set(extendtime)) then NCDF_VARPUT,id,time_id, dtime, OFFSET=data_offset +; TIME variable already pre-filled with core_time + +data_offset = where(core_time eq start_time) +print,'Data output offset =',data_offset + +NCDF_VARPUT,id,tk_id,tc + 273.15, OFFSET=data_offset ; convert to K +NCDF_VARPUT,id,rh_id,rh * 100.0, OFFSET=data_offset ; convert to percent +NCDF_VARPUT,id,p_id,pres* 0.01, OFFSET=data_offset ; convert to hPa +NCDF_VARPUT,id,u_id,u, OFFSET=data_offset +NCDF_VARPUT,id,v_id,v, OFFSET=data_offset +NCDF_VARPUT,id,w_id,-1.0 * w, OFFSET=data_offset ; change sign +NCDF_VARPUT,id,lat_id,lat, OFFSET=data_offset +NCDF_VARPUT,id,lon_id,lon, OFFSET=data_offset +NCDF_VARPUT,id,alt_id,alt, OFFSET=data_offset +NCDF_VARPUT,id,vn_id,vn, OFFSET=data_offset +NCDF_VARPUT,id,ve_id,ve, OFFSET=data_offset +NCDF_VARPUT,id,vz_id,vz, OFFSET=data_offset ; not multiplied by -1.0 +NCDF_VARPUT,id,roll_id,roll, OFFSET=data_offset +NCDF_VARPUT,id,ptch_id,pitch, OFFSET=data_offset +NCDF_VARPUT,id,hdg_id,hdg, OFFSET=data_offset +NCDF_VARPUT,id,tas_id,tas, OFFSET=data_offset +NCDF_VARPUT,id,aoss_id,-1.0*aoss, OFFSET=data_offset ; change sign +NCDF_VARPUT,id,aoa_id,Aoa, OFFSET=data_offset +NCDF_VARPUT,id,dpaoa_id,dpaoa, OFFSET=data_offset ;x -1.0? +NCDF_VARPUT,id,dpaoss_id,dpaoss, OFFSET=data_offset +;NCDF_VARPUT,id,status_id,status, OFFSET=data_offset + +last_time = max(dtime) ; save the end of data from this ascii file + +ENDFOR +; ****************************************************************************************************************** +; end of loop over the number of ascii input files +; ****************************************************************************************************************** + +NCDF_CLOSE,id ; close the NetCDF output + + +END \ No newline at end of file diff --git a/faampy/aimms/ekf556.exe b/faampy/aimms/ekf556.exe new file mode 100644 index 0000000..f2b5e07 Binary files /dev/null and b/faampy/aimms/ekf556.exe differ diff --git a/faampy/aimms/file_info.py b/faampy/aimms/file_info.py new file mode 100644 index 0000000..be477e5 --- /dev/null +++ b/faampy/aimms/file_info.py @@ -0,0 +1,151 @@ +import os +import re + + +DATA_TYPES = {'core-hires': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].nc$', + 'core-lowres': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_1[Hh]z.nc$', + 'core-descrip': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_descrip.txt$', + 'core-quality': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_quality.txt$', + 'dropsonde-proc': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_proc.nc$', + 'dropsonde-raw': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_raw.nc$', + 'dropsonde-descrip': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_descrip.txt$', + 'flight-cst': 'flight-cst_faam_20[0-9][0-9][0-1][0-9][0-3][0-9]_r.*_[bBcC][0-9][0-9][0-9].txt$', + 'flight-log': 'flight-log_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].pdf$', + 'flight-sum': 'flight-sum_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].txt$', + 'rawdrs': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawdrs.zip$', + 'rawgin': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawgin.zip$', + 'rawgps': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawgps.zip$', + 'rawdlu': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawdlu.zip$'} + + +def get_revision_from_filename(filename): + """ + Extracts the revision number from the netCDF core filename + + Example: + >>> file = 'core_faam_20090529_v004_r1_b450.nc' + >>> getRevisionFromFilename(file) + 1 + >>> + """ + fn = os.path.basename(filename) + fn = fn.split('.')[0] + parts = fn.split('_') + for p in parts: + if re.match('r\d', p): + result = int(p[1:]) + return result + return + + +def get_data_type_from_filename(filename): + """ + returns the datatype for the input filename determined using the DATA_TYPES + dictionary + """ + for key in DATA_TYPES.keys(): + if re.match(DATA_TYPES[key], os.path.basename(filename)): + return key + return + + +def get_fid_from_filename(filename): + """ + Extracts the flight number from the netCDF core filename + + Example: + >>> ncfile = 'core_faam_20090529_v004_r1_b450.nc' + >>> getFlightNumbserFromFilename(ncfile) + b450 + >>> + """ + fn = os.path.basename(filename) + fn = fn.split('.')[0] + parts = fn.split('_') + for p in parts: + if re.match('[bBcC][0-9][0-9][0-9]', p): + return p.lower() + return + + +def get_date_from_filename(filename): + """ + Extracts the flight date from the netCDF core filename + + Example: + >>> ncfile = 'core_faam_20090529_v004_r1_b450.nc' + >>> getDateFromFilename(ncfile) + 20090529 + >>> + """ + fn = os.path.basename(filename) + fn = fn.split('.')[0] + parts = fn.split('_') + for p in parts: + if re.match('20\d{6}', p): + return p + elif re.match('20\d{12}', p): + return p + else: + pass + return + + +class File_Info(object): + """ + Holds all file specific information for a FAAM data file: + * filename + * path + * Flight Number (fid) + * date + * revision + * datatype + """ + def __init__(self, filename): + self.filename = os.path.basename(filename) + self.path = os.path.dirname(filename) + self.fid = get_fid_from_filename(filename) + self.date = get_date_from_filename(filename) + self.rev = get_revision_from_filename(filename) + self.data_type = get_data_type_from_filename(filename) + + def __str__(self): + output = '\n' + labels = ['Filename', 'Path', 'FID', 'Date', 'Revision', 'Data Type'] + values = [self.filename, + self.path, + self.fid, + self.date, + str(self.rev), + self.data_type] + for s in zip(labels, values): + output += '%9s: %s\n' % s + return output + +# def __cmp__(self, obj): +# cmp_key = '%4s_%0.3i_%s' % (self.fid, self.rev, self.datatype) +# cmp_key_other = '%4s_%0.3i_%s' % (obj.fid, obj.rev, obj.datatype) +# if cmp_key < cmp_key_other: +# return -1 +# elif cmp_key == cmp_key_other: +# return 0 +# elif cmp_key > cmp_key_other: +# return 1 +# else: +# pass + + def __eq__(self, other): + return ((self.fid, self.rev, self.data_type) == + (other.fid, other.rev, other.data_type)) + def __ne__(self, other): + return not self == other + def __gt__(self, other): + return (self.fid, self.rev) > (other.fid, other.rev) + def __lt__(self, other): + return (self.fid, self.rev) < (other.fid, other.rev) + def __ge__(self, other): + return (self > other) or (self == other) + def __le__(self, other): + return (self < other) or (self == other) + + diff --git a/faampy/aimms/file_list.py b/faampy/aimms/file_list.py new file mode 100644 index 0000000..60a0b39 --- /dev/null +++ b/faampy/aimms/file_list.py @@ -0,0 +1,73 @@ +import os +import sys + +import file_info + + +class File_List(list): + """ + A list of File_Info objects. The list can be sorted and filtered which can + be useful for batch processing. + + For example it is possible to (i) get all DECADES rawdlu and flight-constant + files from a path, (ii) filter those for the latest revisions and reprocess + them. + + """ + def __init__(self, path): + """ + Get all FAAM data files in the path. + + :param path: path which will be walked and checked for FAAM data files + """ + self.Path = path + if os.path.isdir(path): + for root, subFolders, files in os.walk(self.Path): + for f in files: + if file_info.get_data_type_from_filename(f): + self.append(file_info.File_Info(os.path.join(root, f))) + else: + sys.stdout.write('%s is not a directory.\n' % path) + self.sort() + + def filter_by_data_type(self, dtype): + """ + Filtering by data type. + """ + if not dtype in file_info.DATA_TYPES: + sys.stdout.write('Submitted dtype unknown.\nValid data types are: %s\n' % ', '.join(sorted(file_info.DATA_TYPES.keys()))) + + bad_index = [] + for i in self: + if not i.data_type == dtype: + bad_index.append(i) + for b in bad_index: + self.remove(b) + + def filter_latest_revision(self): + """ + Compresses the list and keeps only the latest revision file for a FID + """ + bad_index = [] + self.sort(key=lambda i: '%4s_%s_%s_%0.3i' % (i.fid, i.date, i.data_type, i.rev)) + self.reverse() + for i in range(len(self)-1): + if ((self[i].fid, self[i].date, self[i].data_type)) == ((self[i+1].fid, self[i+1].date, self[i+1].data_type)): + bad_index.append(self[i+1]) + for b in bad_index: + self.remove(b) + self.sort() + + def __str__(self): + output = '' + for i in self: + output += '%s\n' % (i.filename,) + return output + + def get_filenames(self): + """ + Returns the filenames + """ + result = [os.path.join(i.path, i.filename) for i in self] + return result + diff --git a/faampy/aimms/findcorefile.pro b/faampy/aimms/findcorefile.pro new file mode 100644 index 0000000..4c61615 --- /dev/null +++ b/faampy/aimms/findcorefile.pro @@ -0,0 +1,25 @@ +FUNCTION findcorefile, basedir, fnum, core1hz + +; FUNCTION: FINDCOREFILE +; +; PURPOSE: Returns a string value containing the full path/filename information for a FAAM Core NetCDF file. +; +; ARGUMENTS: BASEDIR - String variable containing the base directory in which to start searching. This assumes that data files are organised +; within a directory structure: /project/obr/project_name with sub-directories for Core and +; other data below that level. +; Example: /pr + +fnum=STRLOWCASE(fnum) +help,fnum +corefile=basedir+'/faam_core/'+'*'+fnum+'*.nc' +corefile=file_search(corefile) +print,'Core: ',corefile + +core1hz=basedir+'/faam_core/'+'*'+fnum+'*_1hz.nc' +core1hz=file_search(core1hz) +print,'Core_1hz: ',core1hz + + +return, core1hz +END + \ No newline at end of file diff --git a/faampy/aimms/qa_figure.py b/faampy/aimms/qa_figure.py new file mode 100644 index 0000000..27d08bd --- /dev/null +++ b/faampy/aimms/qa_figure.py @@ -0,0 +1,195 @@ +core_filename = '/home/axel/Dropbox/campaigns/vanaheim2016/b988-oct-20/core_faam_20161020_v004_r0_b988.nc' +aimms_filename = '/home/axel/Dropbox/campaigns/vanaheim2016/b988-oct-20/metoffice-aimms_faam_20161020_v003_r0_b988.nc' + +core_filename = '/home/axel/Dropbox/campaigns/wintex2017/b994-feb-03/core_faam_20170203_v004_r0_b994.nc' +aimms_filename = '/mnt/faamarchive/scanner/aimms/metoffice-aimms_faam_20170203_v003_r0_b994.nc' + + +import sys +sys.path.insert(0, '/home/axel/git-repos/faampy/') + + +import netCDF4 +import pandas as pd +import numpy as np + +import matplotlib.gridspec as gridspec + +from faampy.core.utils import get_mpl_time +from matplotlib.dates import num2date, date2num +from matplotlib import dates +import matplotlib.pyplot as plt +from matplotlib.lines import Line2D + +plt.style.use('seaborn-deep') + + +# plot time series of the u,v,w and delta; add top figure with altitude and liquid water content +# plot time series of t and rh and delta +# plot spectra for u,v,w,tas +# plot scatter plots matrix for u,v,w,tas + +def plot_wind_vector_components(): + pass + + +core_ds = netCDF4.Dataset(core_filename, 'r') +aimms_ds = netCDF4.Dataset(aimms_filename, 'r') + + +#def get_data(var, core_ds, aimms_ds): +varnames = [['U_C', 'U'], + ['V_C', 'V'], + ['W_C', 'W'], + ['TAS', 'TAS']] + +cols_c, cols_a = zip(*varnames) +t_c = np.array(num2date(get_mpl_time(core_ds, 32))) +t_a = netCDF4.num2date(aimms_ds.variables['TIME'][:], aimms_ds.variables['TIME'].units) + + +df_colnames = ['core_%s' % s.lower() for s in cols_a] +df_c = pd.DataFrame(index=t_c.ravel(), columns=df_colnames) +for i,j in zip(df_colnames, cols_c): + df_c[i] = core_ds.variables[j][:].data.ravel() +df_colnames = ['aimms_%s' % s.lower() for s in cols_a] +df_a = pd.DataFrame(index=t_a, columns=df_colnames) +df_a = df_a.tz_localize('UTC') +for i,j in zip(df_colnames, cols_a): + df_a[i] = aimms_ds.variables[j][:].data.ravel() + +# filter data frame; only use data points when the aircraft was in the air +wow = core_ds.variables['WOW_IND'][:].data +wow_ix = (np.min(np.where(wow == 0)[0]), + np.max(np.where(wow == 0)[0])) +t_ix = (t_c[wow_ix[0],0], t_c[wow_ix[1],0]) +df_c = df_c[(df_c.index >= t_ix[0]) & (df_c.index < t_ix[1])] +df_a = df_a[(df_a.index >= t_ix[0]) & (df_a.index < t_ix[1])] + +#a = aimms_data.variables[varnames[1]][:] +#wow = core_data.variables['WOW'][:] + + +# time series plots +fig = plt.figure() +gs = gridspec.GridSpec(4, 1, left=0.1, hspace=0.05) + +_ax = {} +_ax['ax_u'] = fig.add_subplot(gs[3]) # axes u wind component +_ax['ax_v'] = fig.add_subplot(gs[2], sharex=fig.get_axes()[0]) # axes v wind component +_ax['ax_w'] = fig.add_subplot(gs[1], sharex=fig.get_axes()[0]) # axes w wind component +_ax['ax_tas'] = fig.add_subplot(gs[0], sharex=fig.get_axes()[0]) # axes tas wind component + + +for wv in ['u', 'v', 'w', 'tas']: + #df_c['core_'+wv].plot(ax=_ax['ax_'+wv], label='core-'+wv, ylabe) + #df_a['aimms_'+wv].plot(ax=_ax['ax_'+wv], label='aimms-'+wv)) + ax = _ax['ax_'+wv] + l1 = ax.plot_date(date2num(df_c.index.to_pydatetime()), df_c['core_'+wv].values, '-', label='core') + l2 = ax.plot_date(date2num(df_a.index.to_pydatetime()), df_a['aimms_'+wv].values, '-', label='aimms') + #ax.legend() + ax.grid(True) + ax.text(0.02, 0.95, wv, verticalalignment='top', transform=ax.transAxes) + ax.set_ylabel(r'%s $(ms^{-1})$' % wv) + # the delta is plotted for 1Hz averaged data to keep it simple + #delta = df_c['core_'+wv].resample('S').mean()-df_a['aimms_'+wv].resample('S').mean() + delta = df_c['core_'+wv].resample('S')-df_a['aimms_'+wv].resample('S') + ax2 = _ax['ax_'+wv].twinx() + l3 = ax2.plot_date(date2num(delta.index.to_pydatetime()), delta.values, '-', color='firebrick', label=r'$\Delta$') + ax2.set_ylabel(r'$\Delta \quad (ms^{-1})$') + if wv != 'u': + for label in ax.get_xticklabels()[:]: + label.set_visible(False) + else: + hfmt = dates.DateFormatter('%H:%M') + ax.xaxis.set_major_locator(dates.MinuteLocator(byminute=range(0, 60, 30))) + ax.xaxis.set_major_formatter(hfmt) + ax.set_xlabel('time (utc)') + + if wv == 'tas': + lines = l1+l2+l3 + labs = [l.get_label() for l in lines] + ax.legend(lines,labs, loc='upper right') + + + + +# scatter plots +fig = plt.figure() +gs = gridspec.GridSpec(2, 2) + +_ax = {} +_ax['ax_u'] = fig.add_subplot(gs[3], aspect='equal') # axes u wind component +_ax['ax_v'] = fig.add_subplot(gs[2], aspect='equal') # axes v wind component +_ax['ax_w'] = fig.add_subplot(gs[1], aspect='equal') # axes w wind component +_ax['ax_tas'] = fig.add_subplot(gs[0], aspect='equal') # axes tas wind component + +for wv in ['u', 'v', 'w', 'tas']: + ax = _ax['ax_'+wv] + x = np.array(df_c['core_'+wv].resample('S')) + y = np.array(df_a['aimms_'+wv].resample('S')) + ax.plot(x, y, '.') + ax.grid(True) + _lim=(np.min([ax.axes.get_xlim()[0], ax.axes.get_ylim()[0]]), + np.max([ax.axes.get_xlim()[1], ax.axes.get_ylim()[1]])) + ax.set_xlim(_lim) + ax.set_ylim(_lim) + l = Line2D([0,1],[0,1], color='0.3', transform=ax.transAxes) + ax.add_line(l) + ax.text(0.05, 0.95, wv, verticalalignment='top', transform=ax.transAxes) + + + +def running_mean(x, N): + return np.convolve(x, np.ones((N,))/N)[(N-1):] + + +def power_spectrum(data, *args): + """ + calculates the power spectrum + + """ + if args: + freq = args[0] + else: + freq = data.shape[1] + + ps = np.abs(np.fft.fft(data.ravel()))**2 + time_step = 1./freq + freqs = np.fft.fftfreq(data.size, time_step) + idx = np.argsort(freqs) + return(freqs[idx], ps[idx]) + + +# spectra plots + +fig = plt.figure() +gs = gridspec.GridSpec(2, 2) + +_ax = {} +_ax['ax_u'] = fig.add_subplot(gs[3]) # axes u wind component +_ax['ax_v'] = fig.add_subplot(gs[2]) # axes v wind component +_ax['ax_w'] = fig.add_subplot(gs[1]) # axes w wind component +_ax['ax_tas'] = fig.add_subplot(gs[0]) # axes tas wind component + +for wv in ['u', 'v', 'w', 'tas']: + ax = _ax['ax_'+wv] + c_freqs, c_ps = power_spectrum(np.array(df_c['core_'+wv]), 32) + a_freqs, a_ps = power_spectrum(np.array(df_a['aimms_'+wv]), 20) + + c_freqs = c_freqs[:-1] + a_freqs = a_freqs[:-1] + c_ps = c_ps[:-1] + a_ps = a_ps[:-1] + + ax.plot(c_freqs[c_freqs < 15.5], running_mean(c_ps, 200)[c_freqs < 15.5]) + ax.plot(a_freqs[a_freqs < 9.5], running_mean(a_ps, 200)[a_freqs < 9.5]) + + ax.set_xscale('log') + ax.set_yscale('log') + ax.set_ylim(1, 10**8) + ax.set_xlim(0.1, 16) + xticks = [1, 2, 3, 4, 5, 6, 8, 10, 16] + ax.set_xticks(np.array(xticks)) + ax.set_xticklabels([str(i) for i in xticks]) + ax.text(0.05, 0.95, wv, verticalalignment='top', transform=ax.transAxes) diff --git a/faampy/avaps/__init__.py b/faampy/avaps/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/faampy/avaps/dropsonde.py b/faampy/avaps/dropsonde.py new file mode 100644 index 0000000..5dc9a10 --- /dev/null +++ b/faampy/avaps/dropsonde.py @@ -0,0 +1,399 @@ +#!/usr/bin/python + +''' +Created on 19 May 2010 + +@author: axel +''' + +import datetime +import matplotlib +import matplotlib.pyplot as plt +import netCDF4 +import numpy as np +import os +import re +import sys +import tempfile +import zipfile +import time + +#from faampy._3rdparty.haversine import points2distance, recalculate_coordinate + +from haversine import points2distance, recalculate_coordinate + +_DROPSONDE_ICON = '\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x03\x00\x00\x00D\xa4\x8a\xc6\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x003PLTE@\x00\x00\t\x06\x03I\x17\x182.*{&)bICFPc\xa99=\xcd28wP7\xe5_`\xaa{Zt\x97\xc8\xa1\x92\x93\xd7\x86\x82\xf7\xb2\xae\xd8\xcf\xd0o\x99\xb5\xad\x00\x00\x00\x01tRNS\x00@\xe6\xd8f\x00\x00\x00\x01bKGD\x00\x88\x05\x1dH\x00\x00\x00\tpHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xda\n\x05\t\x0e \xa8#K\xb3\x00\x00\x00\xe5IDAT8\xcb\xa5\x92\xc1b\xc4 \x08D\x1d\x81\x88\xa4\xa8\xff\xff\xb5\x8b\xb9\xb4\xdb\x86\xf4\xb0\x1c\xbc\xccs\x98I,\xe5m\x18\x95[\xab(w#\x8a\xc6\x18\xc3\xcc\x82\xf9\x03\xd5\xb5\xe2r \xd56\x81\xfe\x8e\x88\xa9\x1a+W\xe6\xc6\x9b0\x0e\xb39\xe7\x0f\x07SkZyg\x08*\x0e\xc4\x1e\xf9\x06\x94Uu\xef\x0e9|b~\xa7\x08\xddj\xd5\x06\xde\x0c\xdf4\xc1\x0eo\xd0\xba\xe7\xbe\xe8\x1c\x83m\xb4L\x8ei\x91{"\x95K!\x9b\xfa _A\xcag\xfa\xbf\xc0\xfa\xc4`\x01 B\xde\x91B~\x04.\xfd\x198\x8e\r\xa4\xba\xf4.TS\x00\xd4\xcfM +Jr\x06@\x90L\xef}\x03\xe9\x02\xa1K\x17Pjp\x06 O\x0f\xa1x\x17\x81g_\xf9\xab,\x97\xa8\x90U\\\xcb\x9d\x8e\xb5\x8e\xfb\xeb\x0e\x12w\xc0\xb3\x84\x88\x8aN\xf9\x82\xb0p\xd9?)\x01^\xa0$\x07L\x0c\x13\x1c\x17\x00\x00\x00\x00IEND\xaeB`\x82' + +_KML_HEADER = """ + + + Dropsonde + 0 +""" + +_KML_PLACEMARK = """ + %s + %s + + +""" + +_KML_POINT = """ + absolute + + %f,%f,%f + + """ + +_KML_LINESTRING = """ + 1 + 1 + absolute + + %s + + + + +""" + + +_KML_FOOTER = """ + + +""" + + +class Dropsonde(object): + """Class that processes the dropsonde files. It's main purpose + at the moment is to create a kmz-file, that is viewable in googleearth.""" + + def __init__(self): + self.kmz_filename = None + self.kmz_path = None + #creates directory structure for the kmz + self.kmz_tmp_directory = tempfile.mkdtemp() + os.mkdir(os.path.join(self.kmz_tmp_directory, 'icons')) + os.mkdir(os.path.join(self.kmz_tmp_directory, 'figures')) + icon = open(os.path.join(self.kmz_tmp_directory, 'icons', 'dropsonde_32x32.png'), 'wb') + icon.write(_DROPSONDE_ICON) + icon.close() + + def __decdeg2dms__(self, dd): + """converts degree representation of lon/lat to decimal""" + mnt,sec = divmod(dd * 3600, 60) + deg,mnt = divmod(mnt, 60) + return deg,mnt,sec + + def __zip__(self): + files4zipping = [] + for root, subFolders, files in os.walk(self.kmz_tmp_directory): + for file in files: + files4zipping.append(os.path.join(root,file)) + #if not self.kmz_file: + # self.setOutputfile() + outfile = os.path.join(self.kmz_path, self.kmz_filename) + zip = zipfile.ZipFile(outfile, mode='w') + for file in files4zipping: + zipname = file[len(self.kmz_tmp_directory) + (len(os.sep)):] + zip.write(file, zipname) + + def close(self): + self.ds.close() + + def set_outpath(self, path): + if os.path.exists(path): + self.kmz_path = path + else: + os.mkdir(path) + self.kmz_path = path + + def process(self): + self.kml = _KML_HEADER + for ncfile in self.FileList: + self.read(ncfile) + self.calc_drift() + self.create_figure() + self.create_kml() + self.close() + self.kml += _KML_FOOTER + self.write_kmz() + self.__zip__() + + def read(self, file): + """Wrapper for the netcdf or text file read functions""" + self.kmz_filename = os.path.splitext(os.path.basename(file))[0] + '.kmz' + if os.path.splitext(file)[1] == '.nc': + self.__read_netcdf__(file) + else: + self.__read_txt__(file) + + def __read_txt__(self, txtfile): + #open the netcdf file + f = open(txtfile, 'r') + data = f.readlines() + self.lat_raw, self.lon_raw, self.alt_raw = [],[], [] + self.alt = [] + self.press, self.rh_raw, self.wspd_raw, self.wdir_raw, self.dz, self.sat_num, self.temp_raw = [], [], [], [], [], [], [] + self.theta_raw, self.gps_alt = [], [] + self.id='' + self.launch_time='' + self.project_name='' + self.mission_id='' + + for line in data: + if line.startswith('AVAPS-T'): + if 'LAU' in line: + self.id = line.split()[2] + elif 'Launch Time' in line: + self.launch_time = line[45:].strip() + elif 'Project Name' in line: + self.project_name=line.split(':')[1].split(',')[0].strip() + self.mission_id=line.split(':')[1].split(',')[1].strip() + elif ((self.id == '') and (line.split()[1] == 'END')): + self.id = line.split()[2] + elif line.startswith('AVAPS-D'): + self.lon_raw.append(float(line.split()[11])) + self.lat_raw.append(float(line.split()[12])) + self.alt_raw.append(float(line.split()[19])) + + R = 8.3114472 # gas constant + T = float(line.split()[5]) + 273.15# current temperature in K + cp = 1.0038 # heat capacity of air + P = float(line.split()[5]) + P_0 = 1000.0 + self.theta_raw.append(T * (P/P_0)**(R/cp)) + self.press.append(float(line.split()[5])) + self.temp_raw.append(float(line.split()[6])) + self.rh_raw.append(float(line.split()[7])) + self.wspd_raw.append(float(line.split()[9])) + self.wdir_raw.append(float(line.split()[8])) + self.dz.append(float(line.split()[10])) + self.sat_num.append(float(line.split()[14])) + self.alt.append(float(line.split()[12])) + + # create empty lists for latitude, longitude, altitude + self.lat = []; self.lon = []; self.alt =[] + + for i in range(len(self.lat_raw)): + if not (-999 in [self.lat_raw[i],self.lon_raw[i], self.alt_raw[i]]): + if not (99999 in [self.lat_raw[i],self.lon_raw[i], self.alt_raw[i]]): + if not (99 in [self.lat_raw[i],self.lon_raw[i], self.alt_raw[i]]): + self.lat.append(self.lat_raw[i]) + self.lon.append(self.lon_raw[i]) + self.alt.append(self.alt_raw[i]) + + def __read_netcdf__(self, ncfile): + + # open the netcdf file + self.ds = netCDF4.Dataset(ncfile, 'r') + + self.id = self.ds.SoundingDescription.split()[1] + self.launch_time = datetime.datetime.strptime(self.ds.variables['base_time'].string, '%a %b %d %H:%M:%S %Y') + + # create empty lists for latitude, longitude, alitude + self.lat = []; self.lon = []; self.alt =[] + + self.lat_raw = list(self.ds.variables['lat'][:]) + self.lon_raw = list(self.ds.variables['lon'][:]) + self.alt_raw = list(self.ds.variables['alt'][:]) + self.theta_raw = list(self.ds.variables['theta'][:]) + self.rh_raw = list(self.ds.variables['rh'][:]) + wspd_raw = list(self.ds.variables['wspd'][:]) + wdir_raw = list(self.ds.variables['wdir'][:]) + + for i in range(len(self.lat_raw)): + if self.lat_raw[i] != -999 and self.lon_raw[i] != -999 and self.alt_raw[i] != -999: + self.lat.append(self.lat_raw[i]) + self.lon.append(self.lon_raw[i]) + self.alt.append(self.alt_raw[i]) + + def __get_fid__(self): + """get the flight number from the netcdf global attributes""" + fid = None + pattern = '[b,B]\d{3}' + #check several attributes to make sure that we get the fid + vars = [self.ds.SoundingDescription,] + + for var in vars: + fid = re.search('[b,B]\d{3}', var) + if fid: + result = fid.group().lower() + return result + + def create_kml(self): + kml = "" + fall_time = max(self.ds.variables['time'][:]) - min(self.ds.variables['time'][:]) + + self.kml_name = '%s-%s' % (self.__get_fid__(), + re.split('[_,.]', self.ds.SoundingDescription)[1]) + + description = "" +self.launch_time.strftime('%Y-%m-%d %H:%M:%S')+"

Summary

" + \ + """

First time stamp: """ + \ + time.strftime('%H:%M:%S', time.gmtime(min(self.ds.variables['time'][:]))) + \ + """
Last time stamp: """ + \ + time.strftime('%H:%M:%S', time.gmtime(max(self.ds.variables['time'][:]))) + \ + """
Fall time: """ + \ + '%im%is (%isecs)' % (fall_time//60, fall_time%60, fall_time) + \ + "
Drift: " + str(int(round(self.drift_tot * 1000))) + "m " + \ + "(NS: " + str(int(round(self.drift_ns * 1000))) + "m, EW: " + str(int(round(self.drift_ew * 1000))) + "m)" + \ + "

Profiles

" + \ + "]]>' + lat_lon_alt = "" + for i in range(len(self.lat)): + lat_lon_alt += '%f,%f,%f\n' % (float(self.lon[i]), + float(self.lat[i]), + float(self.alt[i])) + + # point feature; location of the icon + pt_lat, pt_lon, pt_alt = float(self.lat[-1]), float(self.lon[-1]), float(self.alt[-1]) + + kml += _KML_PLACEMARK % (self.kml_name, description) + kml += _KML_POINT % (pt_lon, pt_lat, pt_alt) + kml += _KML_LINESTRING % (lat_lon_alt) + self.kml = kml + + def create_figure(self): + + params = {'axes.labelsize': 9, + 'font.size': 9, + 'xtick.labelsize': 8, + 'ytick.labelsize': 8, + 'text.usetex': False} + + plt.clf() + plt.rcParams.update(params) + + ymin = 0; ymax = 10 + + line_style = 'b-' + line_width = '2' + + plt.figure(1, figsize=(5,4), dpi=120) + + font = matplotlib.font_manager.FontProperties() + #font.set_size(7) + + alt = self.ds.variables['alt'][:].data / 1000.0 #convert height to km + theta = self.ds.variables['theta'][:].data - 273.15 + dp = self.ds.variables['dp'][:].data + tdry = self.ds.variables['tdry'][:].data + wspd = self.ds.variables['wspd'][:].data + wdir = self.ds.variables['wdir'][:].data + u = self.ds.variables['u_wind'][:].data + v = self.ds.variables['v_wind'][:].data + + plt.subplots_adjust(hspace = 0.25, wspace = 0.15) + + + # --- 1st plot --- + + plt.subplot(1,2,1) + ix = np.where((tdry != -999) & (alt > 0)) + plt.plot(tdry[ix], alt[ix], '-', lw=3, label='tdry') + ix = np.where((dp != -999) & (alt > 0)) + plt.plot(dp[ix], alt[ix], '-', lw=3, label='dp') + ix = np.where((theta > -100) & (alt > 0)) + plt.plot(theta[ix], alt[ix], '-', lw=3, label='theta') + plt.grid() + plt.xlabel('temperature (C)') + plt.legend(loc='upper right') + plt.ylabel('height (km)') + + # --- 2nd plot --- + + plt.subplot(1,2,2) + ix = np.where((wspd[:] > 0) & (alt > 0)) + p1, = plt.plot(wspd[:][ix], alt[ix], '-', color='blue', lw=3, label='wspd') + plt.xlabel('wspd (ms-1)') + plt.twiny() + plt.plot(wdir[:][ix], alt[ix], '-', color='green', lw=3, label='wdir') + plt.xlim(0,360) + plt.xlabel('wdir (deg)') + plt.ylabel('altitude (m)') + plt.legend() + #plt.tight_layout() + self.fig_filename = os.path.join(self.kmz_tmp_directory, 'figures', self.id + '.png') + plt.savefig(self.fig_filename) + plt.close() + + def write_kml(self): + f = open(os.path.join(self.kmz_tmp_directory, 'doc.kml'), 'w') + f.write(self.kml) + f.close() + + def calc_drift(self): + start_point = (self.__decdeg2dms__(self.lon[-1]), self.__decdeg2dms__(self.lat[-1])) + end_point = (self.__decdeg2dms__(self.lon[0]), self.__decdeg2dms__(self.lat[0])) + #calculate north-south drift + self.drift_ns = points2distance((self.__decdeg2dms__(self.lon[-1]), self.__decdeg2dms__(self.lat[-1])), + (self.__decdeg2dms__(self.lon[-1]), self.__decdeg2dms__(self.lat[ 0]))) + #calculate east-west drift + self.drift_ew = points2distance((self.__decdeg2dms__(self.lon[-1]), self.__decdeg2dms__(self.lat[-1])), + (self.__decdeg2dms__(self.lon[ 0]), self.__decdeg2dms__(self.lat[-1]))) + #calculate total drift + self.drift_tot = points2distance(start_point, end_point) + + +def process(iput, opath): + if os.path.isdir(iput): + iput_file_list = [] + for root, subFolders, files in os.walk(iput): + for f in files: + #print(f) + #if f.startswith('faam-dropsonde'): + if re.match('faam-dropsonde.*proc.nc', f): + iput_file_list.append(os.path.join(root, f)) + else: + iput_file_list = [iput,] + + for ifile in iput_file_list: + try: + d = Dropsonde() + d.read(ifile) + d.set_outpath(opath) + d.calc_drift() + d.create_figure() + d.create_kml() + d.write_kml() + d.__zip__() + except: + pass + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser(description="Creates a kmz file from dropsonde data.") + parser.add_argument('--outpath', action="store", type=str, default=os.environ['HOME'], help='Directory where the kmz file will be stored. Default: $HOME.') + parser.add_argument('input', action="store", type=str, help='Individual dropsonde file or folder which contains dropsonde data.') + args = parser.parse_args() + process(args.input, args.outpath) + sys.stdout.write('Done ...\n') + + +#ifile = '/home/axel/Dropbox/campaigns/wintex2016/b949-mar-09/faam-dropsonde_faam_20160309125648_r0_b949_raw.nc' +#ifile = '/home/axel/b949_avaps/D20160309_125648QC.nc' +#d = Dropsonde() +#d.read(ifile) +#d.set_outpath('/home/axel/') +#d.calc_drift() +#d.create_figure() +#d.create_kml() +#d.write_kml() +#d.__zip__() diff --git a/faampy/avaps/haversine.py b/faampy/avaps/haversine.py new file mode 100644 index 0000000..f6342a5 --- /dev/null +++ b/faampy/avaps/haversine.py @@ -0,0 +1,72 @@ +#coding:UTF-8 +""" + Python implementation of Haversine formula + Copyright (C) <2009> Bartek GĂłrny, Mobitel Monitoring sp. z o.o. + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +import math + + +def recalculate_coordinate(val, _as=None): + """ + Accepts a coordinate as a tuple (degree, minutes, seconds) + You can give only one of them (e.g. only minutes as a floating point number) and it will be duly + recalculated into degrees, minutes and seconds. + Return value can be specified as 'deg', 'min' or 'sec'; default return value is a proper coordinate tuple. + """ + deg, min, sec = val + # pass outstanding values from right to left + min = (min or 0) + int(sec) / 60 + sec = sec % 60 + deg = (deg or 0) + int(min) / 60 + min = min % 60 + # pass decimal part from left to right + dfrac, dint = math.modf(deg) + min = min + dfrac * 60 + deg = dint + mfrac, mint = math.modf(min) + sec = sec + mfrac * 60 + min = mint + if _as: + sec = sec + min * 60 + deg * 3600 + if _as == 'sec': return sec + if _as == 'min': return sec / 60 + if _as == 'deg': return sec / 3600 + return deg, min, sec + + +def points2distance(start, end): + """ + Calculate distance (in kilometers) between two points given as (long, latt) pairs + based on Haversine formula (http://en.wikipedia.org/wiki/Haversine_formula). + Implementation inspired by JavaScript implementation from http://www.movable-type.co.uk/scripts/latlong.html + Accepts coordinates as tuples (deg, min, sec), but coordinates can be given in any form - e.g. + can specify only minutes: + (0, 3133.9333, 0) + is interpreted as + (52.0, 13.0, 55.998000000008687) + which, not accidentally, is the lattitude of Warsaw, Poland. + """ + start_long = math.radians(recalculate_coordinate(start[0], 'deg')) + start_latt = math.radians(recalculate_coordinate(start[1], 'deg')) + end_long = math.radians(recalculate_coordinate(end[0], 'deg')) + end_latt = math.radians(recalculate_coordinate(end[1], 'deg')) + d_latt = end_latt - start_latt + d_long = end_long - start_long + a = math.sin(d_latt/2)**2 + math.cos(start_latt) * math.cos(end_latt) * math.sin(d_long/2)**2 + c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a)) + #return 6371 * c + return 6378.137 * c diff --git a/faampy/avaps/thermo.py b/faampy/avaps/thermo.py new file mode 100644 index 0000000..22cfdc9 --- /dev/null +++ b/faampy/avaps/thermo.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# encoding: utf-8 +""" +thermo.py + +Various thermodynamic relationship for ice and water. + +Created by Geoff Cureton on 2009-04-04. +Copyright (c) 2011 University of Wisconsin SSEC. All rights reserved. +""" + +file_Date = '$Date$' +file_Revision = '$Revision$' +file_Author = '$Author$' +file_HeadURL = '$HeadURL$' +file_Id = '$Id$' + +__author__ = 'G.P. Cureton ' +__version__ = '$Id$' +__docformat__ = 'Epytext' + +from scipy import log10 + +def rh_to_mr( rh, p, t) : + ''' + Returns mixing ratio, in g/kg, given relative humidity in %, + pressure in hPa and temperature in K. + ''' + return rh * 0.01 * satmix(p, t) + +def rh_to_mr_wat( rh, p, t) : + ''' + Returns mixing ratio over water, in g/kg, given relative humidity in %, + pressure in hPa and temperature in K. + ''' + return rh * 0.01 * satmixwat(p, t) + +def rh_to_mr_ice( rh, p, t) : + ''' + Returns mixing ratio over ice, in g/kg, given relative humidity in %, + pressure in hPa and temperature in K. + ''' + return rh * 0.01 * satmixice(p, t) + +def mr_to_rh( mr, p, t) : + ''' + Returns relative humidity in %, given the mixing ratio in g/kg, + pressure in hPa and temperature in K. + ''' + return mr * 100. / satmix(p, t) + +def mr_to_rh_wat( mr, p, t) : + ''' + Returns relative humidity in %, given the mixing ratio over water in g/kg, + pressure in hPa and temperature in K. + ''' + return mr * 100. / satmixwat(p, t) + +def mr_to_rh_ice( mr, p, t) : + ''' + Returns relative humidity in %, given the mixing ratio over ice in g/kg, + pressure in hPa and temperature in K. + ''' + return mr * 100. / satmixice(p, t) + +def satmix( p, t) : + ''' + Returns saturation mixing ratio in g/kg, given pressure in hPa and + temperature in K. + ''' + if (t > 253.) : + return satmixwat(p, t) + else : + return satmixice(p, t) + +def satmixwat( p, t) : + ''' + Returns saturation mixing ratio over water, in g/kg, given pressure in hPa and + temperature in K. + ''' + es = svpwat(t) + return (622. * es)/p + +def satmixice( p, t) : + ''' + Returns saturation mixing ratio over ice, in g/kg, given pressure in hPa and + temperature in K. + ''' + es = svpice(t); + return (622. * es) / p; + + +def svpwat(t) : + ''' + Returns saturation vapor pressure over water, in hPa, given temperature in K. + + ''' + + a0 = 0.999996876e0 + a1 = -0.9082695004e-2 + a2 = 0.7873616869e-4 + a3 = -0.6111795727e-6 + a4 = 0.4388418740e-8 + a5 = -0.2988388486e-10 + a6 = 0.2187442495e-12 + a7 = -0.1789232111e-14 + a8 = 0.1111201803e-16 + a9 = -0.3099457145e-19 + b = 0.61078e+1 + t -= 273.16 + return (b / ((a0+t*(a1+t*(a2+t*(a3+t*(a4+t*(a5+t*(a6+t*(a7+t*(a8+t*a9)))))))))**8.)) + +def svpice( t) : + ''' + Returns saturation vapor pressure over ice, in hPa, given temperature in K. + The Goff-Gratch equation (Smithsonian Met. Tables, 5th ed., pp. 350, 1984) + ''' + a = 273.16 / t + exponent = -9.09718 * (a - 1.) - 3.56654 * log10(a) + 0.876793 * (1. - 1./a) + log10(6.1071) + + return 10.0**exponent diff --git a/faampy/avaps/utils.py b/faampy/avaps/utils.py new file mode 100644 index 0000000..ea67600 --- /dev/null +++ b/faampy/avaps/utils.py @@ -0,0 +1,64 @@ +''' +Created on 16 Nov 2011 + +@author: axel +''' + +import netCDF4 + +def get_sonde_id(ifile): + sonde_id=None + + if ifile.endswith('.nc'): + ds=netCDF4.Dataset(ifile, 'r') + sonde_id=str.split(str(ds.SoundingDescription))[1] + ds.close() + return sonde_id + + + f=open(ifile, 'r') + lines=f.readlines() + f.close() + for line in lines: + try: + if line.split()[1] == 'STA' or line.split()[1] == 'LAU': + sonde_id = line.split()[2] + elif line.startswith('Sonde ID'): + sonde_id = line.split()[2][:-1] + else: + pass + except: + pass + return sonde_id + + +def conv_height_to_pressure(gps_alt, p_0=None): + """Calculates pressure from height using the barometric formula. + + see: http://en.wikipedia.org/wiki/Atmospheric_pressure + """ + if not p_0: + p_0 = 1013.25 # sea level standard atmospheric pressure [Pa] + L=0.0065 # temperature lapse rate [K/m] + T_0=288.15 # sea level standard temperature [K] + R=8.31447 # universal gas constant [J/(mol*K)] + M=0.0289644 # molar mass of dry air [kg/mol] + g=9.80665 # Earth-surface gravitational acceleration [m/s^2] + p=p_0 * (1. - (L*gps_alt)/T_0)**((g*M)/(R*L)) + return p + + +def conv_pressure_to_height(pressure, p_0=None): + """Calculates height from pressure using the barometric formula. + + see: http://en.wikipedia.org/wiki/Atmospheric_pressure + """ + if not p_0: + p_0=1013.25 # sea level standard atmospheric pressure [Pa] + L=0.0065 # temperature lapse rate [K/m] + T_0=288.15 # sea level standard temperature [K] + R=8.31447 # universal gas constant [J/(mol*K)] + M=0.0289644 # molar mass of dry air [kg/mol] + g=9.80665 # Earth-surface gravitational acceleration [m/s^2] + alt=((pressure/p_0)**((R*L)/(g*M)) - 1.0) * (-T_0/L) + return alt diff --git a/faampy/core/__init__.py b/faampy/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/faampy/core/calclib.py b/faampy/core/calclib.py new file mode 100644 index 0000000..febfc63 --- /dev/null +++ b/faampy/core/calclib.py @@ -0,0 +1,50 @@ +import numpy as np + + +def is_slr(ds, s_ix, e_ix): + """Checks if a time interval is a straight and level run using + the ROLL_GIN and P9_STAT variables from the FAAM core netcdf. + + """ + criteria = {'ROLL_GIN': 3, 'P9_STAT': 2} + roll_gin = ds.variables['ROLL_GIN'][s_ix:e_ix, :] + p9_stat = ds.variables['P9_STAT'][s_ix:e_ix, :] + ix1 = (np.where(np.abs(roll_gin ) > criteria['ROLL_GIN']))[0].size + ix2 = (np.max(p9_stat) - np.min(p9_stat)) > criteria['P9_STAT'] + if ix1 or ix2: + result = False + else: + result = True + return result + + +def calc_flight_level(pressure): + """https://www.brisbanehotairballooning.com.au/faqs/education/113-pressure-altitude-conversion.html + + """ + fl = (10**((np.log10(np.array(pressure) / 1013.25))/5.2558797) -1) / (-6.8755856 * 10**-6*100) + return fl.astype(int) + + +def calc_qnh(pressure, height_amsl): + """pressure in mb + and height_amsl: height above mean sea level in meters + + http://en.wikipedia.org/wiki/Barometric_formula + + """ + L = 0.0065 # temperature lapse rate [K/m] + T_0 = 288.15 # sea level standard temperature [K] + R = 8.31447 # universal gas constant [J/(mol*K)] + M = 0.0289644 # molar mass of dry air [kg/mol] + g = 9.80665 # Earth-surface gravitational acceleration [m/s^2] + qnh = pressure / ((1.0 - (L * (height_amsl)/T_0))**((g*M)/(R*L))) + return qnh + + +testing = False +if testing: + print(calc_flight_level(np.array([1000, 900, 800, 700, 400]))) + print(calc_qnh(900., 3200., 40)) + print(calc_qnh(1015., 107., 0.)) + diff --git a/faampy/core/faam_data.py b/faampy/core/faam_data.py new file mode 100644 index 0000000..e5ea033 --- /dev/null +++ b/faampy/core/faam_data.py @@ -0,0 +1,585 @@ +# -*- coding: utf-8 -*- + +import itertools +import netCDF4 +import numpy as np +import datetime +import os +import osgeo.ogr +import pandas as pd +import re +import sys + +from faampy._3rdparty import rdp # Ramer-Douglas-Peucker algorithm (RDP) + +DEBUG = True + + +NETCDF_VARIABLE_IDS = """515,Time +516,IAS_RVSM +517,TAS_RVSM +520,TAT_DI_R +525,TAT_ND_R +529,TDEW_GE +535,LWC_JW_U +537,BTHEIM_U +538,INU_ACLF +539,INU_ACLS +540,INU_ACLU +548,AOA +549,AOSS +557,INU_VZ +560,INU_ROLL +561,INU_PTCH +562,INU_HDG +563,INU_GSPD +564,INU_DRFT +565,INU_PITR +566,INU_HDGR +567,INU_ROLR +568,CPC_CONC +572,TWC_EVAP +574,O3_TECO +575,HGT_RADR +576,PS_RVSM +577,Q_RVSM +578,PALT_RVS +579,CAB_PRES +580,LAT_GPS +581,LON_GPS +582,GPS_ALT +583,GPS_VELN +584,GPS_VELE +585,GPS_VELZ +602,NV_LWC_U +605,NV_TCW_U +616,ROLL_GIN +617,PTCH_GIN +618,HDG_GIN +620,TRCK_GIN +621,GSPD_GIN +622,ROLR_GIN +623,PITR_GIN +624,HDGR_GIN +625,ACLF_GIN +626,ACLS_GIN +627,ACLD_GIN +642,SOL_AZIM +643,SOL_ZEN +648,PSAP_LIN +649,PSAP_LOG +660,CAB_TEMP +664,TWC_DET +666,TWC_TSAM +674,UPP_VIS_RED_SIG +673,UPP_VIS_CLR_SIG +675,UPP_I/R_SIGNAL +676,UPP_VIS_CLR_ZERO +677,UPP_VIS_RED_ZERO +678,UPP_I/R_ZERO +679,UPP_VIS_CLR_TEMP +680,UPP_VIS_RED_TEMP +681,UPP_I/R_TEMP +682,LWR_VIS_CLR_SIG +683,LWR_VIS_RED_SIG +684,LWR_I/R_SIGNAL +685,LWR_VIS_CLR_ZERO +686,LWR_VIS_RED_ZERO +687,LWR_I/R_ZERO +688,LWR_VIS_CLR_TEMP +689,LWR_VIS_RED_TEMP +690,LWR_I/R_TEMP +714,V_C +715,U_C +716,W_C +723,BTHEIM_C +724,LWC_JW_U +725,TWC_TDEW +730,LAT_INUC +731,LON_INUC +735,VN_INUC +736,VE_INUC +610,LAT_GIN +611,LON_GIN +612,ALT_GIN +613,VELN_GIN +614,VELE_GIN +615,VELD_GIN +760,NEPH_PR +761,NEPH_T +762,TSC_BLUU +763,TSC_GRNU +764,TSC_REDU +765,BSC_BLUU +766,BSC_GRNU +767,BSC_REDU +770,NO_TECO +771,NO2_TECO +772,NOX_TECO +773,P0_S10 +774,PA_TURB +775,PB_TURB +778,P9_STAT +779,TAS +781,PSP_TURB +782,CO_AERO +1019,SW_DN_C +1020,RED_DN_C +1021,IR_DN_C +1022,SW_UP_C +1023,RED_UP_C +1024,IR_UP_C""" + + +def flatten(l): + """ + flattens a list; get rids of all nested lists + :param l: input list to be flattened + """ + return [item for sublist in l for item in sublist] + + +class Translator(dict): + """ + Dictionary for translating old variable names like 'PARA0515' to more + meaningful names as they have been in us more recently. This nomination was + used for early FAAM flights and was inherited from MRF days. + + """ + def __init__(self): + keys = ['PARA%.4i' % int(i.split(',')[0]) for i in NETCDF_VARIABLE_IDS.split('\n')] + keys += ['PARA%.4iFLAG' % int(i.split(',')[0]) for i in NETCDF_VARIABLE_IDS.split('\n')] + vals = [i.split(',')[1].strip() for i in NETCDF_VARIABLE_IDS.split('\n')] + vals += [i.split(',')[1].strip()+'_FLAG' for i in NETCDF_VARIABLE_IDS.split('\n')] + for k,v in zip(keys, vals): + self[k] = v + + +class Coords(list): + + def __init__(self, epsilon=0.01): + self.Simple_mask = [] + self.Epsilon = epsilon + + def as_wkt(self, simplified=False, as_type='MULTIPOINT'): + """ + Returns the coordinates in well-known text format + + :param boolean simplified: If set returns a geometry with a reduced + number of points + + """ + if simplified: + xyz = self.simplified() + else: + xyz = self + + if as_type.upper().startswith('LINESTRING'): + return "LINESTRINGZ(" + ','.join(['%f %f %f' % tuple(item) for item in xyz]) + ")" + elif as_type.upper() == 'MULTIPOINT': + return "MULTIPOINT(" + ','.join(['%f %f %f' % tuple(item) for item in xyz]) + ")" + elif as_type.upper() == 'POINT': + return ["POINT(%f %f %f)" % tuple(item) for item in xyz] + + def as_kml(self, simplified=True, extrude=1, tessellate=1): + """ + Return kml formatted string + :return : kml string + + """ + if simplified: + xyz = self.simplified() + else: + xyz=self + kml = "" + kml += "" + kml += str(extrude) + kml += "" + kml += "" + kml += str(tessellate) + kml += "" + kml +="" + kml += '\n'.join(['%f,%f,%f' % tuple(item) for item in xyz]) + kml +="" + kml += "" + return kml + + def get_bbox(self): + """ + Returns boundary box for the coordinates. Useful for setting up + the map extent for plotting on a map. + :return tuple: corner coordinates (llcrnrlat, urcrnrlat, llcrnrlon, + urcrnrlon) + """ + x, y, z = zip(self) + llcrnrlat = np.nanmin(y) + urcrnrlat = np.nanmax(y) + llcrnrlon = np.nanmin(x) + urcrnrlon = np.nanmax(x) + return (llcrnrlat, + urcrnrlat, + llcrnrlon, + urcrnrlon) + + def _simplify_(self, step=10): + """ + Simplifies the coordinates by reducing the number using the + Ramer-Douglas-Peucker algorithm (RDP). The list of coordinates + itself is not shrinked, but rather a mask array is produced. If + *self.simplified* is called the mask is used as an index. + + :param int step: step size for array slicing. Using only every + 10th value for example speeds things up considerably. + """ + + xyz = [list(i) for i in self] # copy the coordinates into a list + # in the case that the first and the last coordinate are identical the + # rdp algorithmen fails; a quick pop fixes that + while xyz[0] == xyz[-1]: + xyz.pop() + self.pop() + # use only every 10th value to speed things up + # TODO: impact should be checked at some point + m = rdp.rdp(xyz[::step], epsilon=self.Epsilon, return_mask=True) + # flatten = lambda l: [item for sublist in l for item in sublist] + m = flatten([[m, ]+(step-1)*[False] for i in m]) + while len(m) < self.__len__(): + m.append(False) + self.Simple_mask = m + return + + def simplified(self): + """ + Returns the reduced number of coordinates + """ + if not self.Simple_mask: + self._simplify_() + return list(itertools.compress(self, self.Simple_mask)) + + +class FAAM_Dataset(object): + """ + Dataset class which has much in common with the netCDF4.Dataset. The class + has methods that helps to perform common tasks like merging and can copy + + + """ + + def __init__(self, filename): + """ + :param filename: FAAM core filename to read in + """ + + translate = Translator() + self.coords = Coords() + self.variables = {} + self.ds = netCDF4.Dataset(filename, 'r') + self.ds.set_auto_mask(False) + + self.ncattr = {} + self.ncattr['Conventions'] = 'NCAR-RAF/nimbus' + self.ncattr['Version'] = '1.5' + # Get all the variables and copy them over + for var_name in self.ds.variables.keys(): + # Fix and oddity where the variables was named altitude + if var_name == 'altitude': + self.variables['ALT_GIN'] = self.ds.variables[var_name] + # Make sure that time variable is always + # called Time and not TIME or time + elif var_name.lower() == 'time': + self.variables['Time'] = self.ds.variables[var_name] + elif var_name.startswith('PARA'): + self.variables[translate[var_name]] = self.ds.variables[var_name] + nan_ix = np.isnan(self.variables[translate[var_name]]) + self.variables[translate[var_name]][:][nan_ix] = -9999.0 + else: + self.variables[var_name] = self.ds.variables[var_name] + nan_ix = np.isnan(self.variables[var_name]) + self.variables[var_name][:][nan_ix] = -9999. + # make sure that there are no nan's in the data + + # Copy all the global attributes + for attr in self.ds.ncattrs(): + self.ncattr[attr] = self.ds.__getattribute__(attr) + + # make sure that FLIGHT and DATE are set as global attributes + if 'FLIGHT' in self.ncattr.keys(): + if 'TITLE' in self.ncattr.keys(): + self.ncattr['FLIGHT'] = self.TITLE.split()[2].lower() + elif 'FLIGHT_NUMBER' in self.ncattr.keys(): + self.ncattr['FLIGHT'] = self.FLIGHT_NUMBER.lower() + + if 'Time' in self.ds.variables.keys(): + dt = datetime.datetime.strptime(str(self.ds.variables['Time'].units).strip(), 'seconds since %Y-%m-%d 00:00:00 +0000') + elif 'TIME' in self.ds.variables.keys(): + dt = datetime.datetime.strptime(str(self.ds.variables['TIME'].units).strip(), 'seconds since %Y-%m-%d 00:00:00 +0000') + elif 'time' in self.ds.variables.keys(): + dt = datetime.datetime.strptime(re.findall('\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}', self.ds.variables['time'].units)[0], '%Y-%m-%d 00:00:00') + elif hasattr(self.ds, 'Flight_Date'): + dt = datetime.datetime.strptime(self.ds.Flight_Date, '%d-%b-%y') + elif 'PARA0515' in self.ds.variables.keys(): + dt = datetime.datetime.strptime(self.ds.title.split()[-1], '%d-%b-%y') + else: + dt = datetime.datetime(self.ds.DATE[2], self.ds.DATE[1], self.ds.DATE[0]) + self.ncattr['DATE'] = [dt.day, dt.month, dt.year] + + if 'WOW_IND' not in self.variables.keys(): + # Estimate the WOW_IND using indicated air speed + wow = np.array([1]*self.variables['Time'].size) + if len(self.variables['IAS_RVSM'].shape) == 1: + ias_rvsm=self.variables['IAS_RVSM'][:] + else: + ias_rvsm=self.variables['IAS_RVSM'][:,0] + ix = np.where((ias_rvsm > 60) & (ias_rvsm < 300))[0] + wow[ix] = 0 + self.variables['WOW_IND'] = wow[:] + + # using the more sophisticated np.datetime64 data type + base_time = np.datetime64('%i-%0.2i-%0.2iT00:00:00' % (self.ncattr['DATE'][2], self.ncattr['DATE'][1], self.ncattr['DATE'][0])) + + self.index = base_time + np.array(self.variables['Time'][:].ravel(), dtype=np.int) + + self._set_coordinates_() + + self.Geometry=osgeo.ogr.CreateGeometryFromWkt("LINESTRING (" + ','.join(['%f %f %f' % tuple(i) for i in self.coords.simplified()])+ ")") + + def _set_coordinates_(self): + lon_var_name = None + if 'LAT_GIN' in self.variables.keys(): + lon_var_name = 'LON_GIN' + lat_var_name = 'LAT_GIN' + alt_var_name = 'ALT_GIN' + self.ncattr['Coordinates'] = 'LON_GIN LAT_GIN ALT_GIN Time' + elif 'LAT_GPS' in self.variables.keys(): + lon_var_name = 'LON_GPS' + lat_var_name = 'LAT_GPS' + alt_var_name = 'GPS_ALT' + self.ncattr['Coordinates'] = 'LON_GPS LAT_GPS GPS_ALT Time' + if not lon_var_name: + return + if self.variables[lon_var_name].size == 0: + return + + if len(self.variables[lon_var_name][:].shape) > 1: + x = self.variables[lon_var_name][:, 0].ravel() + y = self.variables[lat_var_name][:, 0].ravel() + z = self.variables[alt_var_name][:, 0].ravel() + else: + x = self.variables[lon_var_name][:].ravel() + y = self.variables[lat_var_name][:].ravel() + z = self.variables[alt_var_name][:].ravel() + + wow = self.variables['WOW_IND'] + + # filter good values + ix = np.where((x > -180) & (x < 180) & (y > -90) & (y < 90) & (z != -9999.0) & (x != 0.0) & (wow != 1))[0] + for i in zip(list(x[ix]), list(y[ix]), list(z[ix])): + self.coords.append(i) + return + + def merge(self, recarray, index='', varnames=[], delay=0): + """ + Merges in a numpy recarray with the FAAM_Dataset using concurring + timestamps + + :param recarray: A numpy numpy.recarray with named data + :type recarray: numpy.recarray + :param index: Name of the column/field that contains the timestamp. + Note that the merging only works on timestamps. The maximum time + resolution is 1sec. + :type index: str + :param varnames: List of varnames from the input array that should be + merged + :type varnames: list of strings + :param int delay: instruments have a time offset compared to the core + data. For example the FGGA is aboute four seconds slower than the + core temperature measurements. The delay keyword takes this care of + this and shifts the data. + """ + BASE_TIME = self.index[0] + + if not varnames: + other_var_names = list(recarray.dtype.names) + + if not index: + # This needs testing + # guess field name for index by looping through some common names + recarray_keys = recarray.dtype.fields.keys() + index_name_list = ['timestamp', 'time', 'datetime'] + for i, name in enumerate(index_name_list): + if name in [item.lower() for item in recarray_keys]: + index = recarray_keys[i] + break + + # if we made it that far we don't have a proper index name + sys.stdout.write('No index for merging found ... Leaving ...\n') + return + + else: + other_index = recarray[index] + # Need to make sure that the datatype is datetime64[s] otherwise + # the merging does not work + if index in other_var_names: + other_var_names.remove(index) + + # The following stuff does what reverse_indices in idl does, + # does not work with the datetime64 data type. We simplify + # things by converting it to secs since BASE_TIME + # so that we can work with two integer numbers + own_index = np.array(self.index-BASE_TIME, dtype=np.int32) + other_index = np.int32((recarray['timestamp']-BASE_TIME).astype('timedelta64[s]')) + bins = np.linspace(own_index[0]-0.5, own_index[-1]+0.5, (len(own_index)+1)) + ind = np.digitize(other_index, bins)-1 + ind_mask = np.select([ind < 0, ind >= len(bins)-1], [0, 0], default=1) + + for other_var_name in other_var_names: + # Make sure that the data are numbers and not string or object type + if recarray[other_var_name].dtype in [np.dtype(np.object), np.dtype(np.str)]: + continue + new_array = np.array([np.nan, ]*len(own_index)) + new_array[ind[ind_mask == 1]] = recarray[other_var_name][ind_mask == 1] + if delay: + new_array = np.roll(new_array, delay) + new_array[-delay] = np.nan + self.variables[other_var_name] = np.copy(new_array) + + def as_dataframe(self, varnames=[]): + """ + Returns the Dataset as a pandas.Dataframe using the timestamp as index, + which opens the world of pandas to you. Only the first column of two + dimensional data sets is grabbed, because pandas does not handle + multidimensional data very well. + + :param varnames: list of variable names that should be exported as + DataFrame. By default all are exported + :type varnames: list + :return: returns a pandas Dataframe with the Timestamp as index + :type return: pandas.Dataframe + """ + if not varnames: + varnames = sorted(self.variables.keys()) + varnames.remove('Time') + else: + varnames = list(set(self.variables.keys()).intersection(varnames)) + + df = pd.DataFrame(index=self.index) + for v in varnames: + shp = self.variables[v].shape + if len(shp) == 2: + data = np.copy(self.variables[v][:, 0]) + else: + data = np.copy(self.variables[v][:]) + df_tmp = pd.DataFrame(data[:], index=self.index, columns=[v, ]) + df = pd.concat([df, df_tmp], axis=1) + # set all missing values to nan + df[df == -9999.0] = np.nan + return df + + def as_kml(self, extrude=1, tessellate=1): + """ + Returns a kml linestring which represents the flight track of the + current dataset + + :param extrude: whether the linestring is extruded + :type extrude: boolean + :param tessellate: whether the linestring is tesselated + :type tessellate: boolean + :return: kml string + """ + template = """ + + + %s-%s-Flight-Track + 0 + + %s + %s + + + +""" + linestring = "" + linestring += "%s%s" % (str(extrude), str(tessellate)) + linestring += (''.join(self.Geometry.ExportToKML().split(''))) + kml = template % (self.FLIGHT, + datetime.datetime(self.DATE[2], self.DATE[1], self.DATE[0]).strftime('%Y-%m-%d'), + self.FLIGHT, + linestring) + return kml + + def close(self): + """ + Closing the Dataset + """ + self.ds.close() + + def write(self, outfilename, v_name_list=[], as_1Hz=True, clobber=False): + """ + Writing the dataset out as netCDF + + :param outfilename: path for the new netCDF + :type outfilename: str + :param v_name_list: list of variables names that should be written. By + default all variables are added to the netCDF + :type v_name_list: list + :param as_1Hz: Writes only 1Hz data out. If the variable + is avaiable in higher frequency only the first value within the + second is used rather than the average from the number of data + points + :type as_1Hz: boolean + :param clobber: Overwrites the files if it exists + :type clobber: boolean + """ + + if os.path.exists(outfilename) : + if not clobber: + sys.stdout.write('File exists ... Leaving ...\n') + return + else: + sys.stdout.write('File exists ... Will overwrite it ...\n') + + # create the netCDF4 output dataset + dsout = netCDF4.Dataset(outfilename, 'w', clobber=clobber) + + # Write the global attributes + for k, v in self.ncattr.items(): + dsout.setncattr(k ,v) + + if not v_name_list: + v_name_list = self.variables.keys() + + + # Now the dimensions + for dname, the_dim in self.ds.dimensions.iteritems(): + dsout.createDimension(dname, len(the_dim) if not the_dim.isunlimited() else None) + outVar = dsout.createVariable(dname, int, ('Time',), fill_value=-9999.) + outvar[:] = self.variables[dname][:] + if dname in v_name_list: + v_name_list.remove(dname) + # Writing the variables + for v_name in v_name_list: + varin = self.variables[v_name] + + if hasattr(varin, 'datatype'): + datatype = varin.datatype + dimensions = varin.dimensions + else: + datatype='f8' + dimensions = ('Time', 'sps01') + + if as_1Hz: + outVar = dsout.createVariable(v_name, datatype, ('Time',), fill_value=-9999.) + if hasattr(varin, 'getncattr'): + outVar.setncatts({k: varin.getncattr(k) for k in varin.ncattrs()}) + if len(self.variables[v_name].shape) == 2: + outVar_data = self.variables[v_name][:,0] + else: + outVar_data = self.variables[v_name][:] + else: + outVar = dsout.createVariable(v_name, datatype, dimensions) + if hasattr(varin, 'getncattr'): + outVar.setncatts({k: varin.getncattr(k) for k in varin.ncattrs()}) + outVar_data = self.variables[v_name][:] + outVar_data[np.isnan(outVar_data)] = -9999. + outVar[:] = outVar_data + dsout.close() diff --git a/faampy/core/faam_spatial.py b/faampy/core/faam_spatial.py new file mode 100644 index 0000000..e7ab893 --- /dev/null +++ b/faampy/core/faam_spatial.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +""" +Created on Wed Nov 30 11:00:50 2016 + +@author: axel +""" + +import os +import pyspatialite.dbapi2 as db +import sys + +class FAAM_Spatial_DB(object): + + def __init__(self, db_file): + self.db_file = db_file + self.conn = db.connect(db_file) + if not os.path.exists(db_file): + self.setup() + return 'DB created ... \n' + + def info(self): + # Test that the spatialite extension has been loaded: + cur = self.conn.execute('SELECT sqlite_version(), spatialite_version()') + info = cur.fetchall() + sys.stdout.write('%s %s\n' % (info[0][0], info[1][0])) + + def setup(self): + cur=self.conn.cursor() + # creating a LINESTRING table + sql = 'CREATE TABLE flight_tracks (' + sql += 'fid TEXT NOT NULL PRIMARY KEY,' + sql += 'date DATE);' + cur.execute(sql) + sql = 'SELECT InitSpatialMetadata(1);' + #http://stackoverflow.com/questions/24047634/performance-problems-using-sqlite-function-initspatialmetadata-to-create-a-spa + sql = 'SELECT InitSpatialMetadata(1);' + cur.execute(sql) + sql = "SELECT AddGeometryColumn('flight_tracks', 'the_geom', 4326, 'LINESTRINGZ', 'XYZ');" + cur.execute(sql) + self.conn.commit() + return + + def insert(self, fid, dt, wkt, overwrite=False): + cur = self.conn.cursor() + if overwrite: + sql = """SELECT * from flight_tracks where fid=%s;""" % (fid,) + cur.execute(sql) + res = cur.fetchone() + if res: + cur.execute("DELETE FROM flight_tracks where fid=%s;" % (fid,)) + + geom = "GeomFromText('" + wkt + "', 4326)" + sql = "INSERT INTO flight_tracks (fid, date, the_geom) " + sql += "VALUES ('%s', '%s', %s);" % (fid, dt.strftime('%Y-%m-%d'), geom) + cur = self.conn.cursor() + cur.execute(sql) + self.conn.commit() + return + + def clean(self): + cur = self.conn.cursor() + cur.execute("DELETE FROM flight_tracks;") + self.conn.commit() + return + + def check_exists(self, fid): + sql = 'SELECT fid FROM flight_tracks;' + cur = self.conn.cursor() + cur.execute(sql) + if fid in [i[0] for i in cur.fetchall()]: + return True + else: + return False + + def close(self): + self.conn.close() + + + + diff --git a/faampy/core/flight_summary.py b/faampy/core/flight_summary.py new file mode 100644 index 0000000..b7faa7b --- /dev/null +++ b/faampy/core/flight_summary.py @@ -0,0 +1,541 @@ +#!/usr/bin/python +''' +FlightSummary module for processing the Flight Managers flight summary. +The class parses the original text file and extract all entries. The module +works with both the "old" (Horace) format and the "new" (DECADES) one. + +All event entries are in a list. + + + +In [1]: import matplotlib.pyplot as plt + +In [2]: import netCDF4 + +In [3]: import faampy.fltsumm.FlightSummary as flsum + +In [4]: fltsummfile='/home/axel/Dropbox/cast2014/fltsums/edited/flight-sum_faam_20140124_r0_b825.csv' +In [5]: ncfile='/home/axel/Dropbox/cast2014/b825-jan-24/core_processed/core_faam_20140124_v004_r2_b825.nc' + +In [6]: outpath='/home/axel/' + +In [7]: fs=flsum.FlightSummary(fltsummfile) + +In [8]: ds=netCDF4.Dataset(ncfile, 'r') + +In [9]: #Print a summary of the flight summary to stdout + +In [10]: print(fs) +Name: taxy +Start Time: 2014-01-24 22:54:50 +End Time: +Comment: + +############################## +Name: Profile 1 +Start Time: 2014-01-24 23:01:33 +End Time: 2014-01-24 23:35:02 +Comment: + +############################## +Name: T/O +Start Time: 2014-01-24 23:01:33 +End Time: +Comment: Guam + +############################## +Name: Run 1 +Start Time: 2014-01-24 23:10:13 +End Time: 2014-01-24 23:28:36 +Comment: + +############################## +Name: Run 2 +Start Time: 2014-01-24 23:35:10 +End Time: 2014-01-24 23:56:42 +Comment: + +############################## +Name: ! +Start Time: 2014-01-24 23:42:41 +End Time: +Comment: slight climb + +... + +In [11]: print('Number of FS Entries: %i' % len(fs.Entries)) +Number of FS Entries: 9 + +In [12]: + +In [12]: #get the 4th event from the + +In [13]: e04=fs.Entries[3] + +In [14]: print(e04) +Name: Run 1 +Start Time: 2014-01-24 23:10:13 +End Time: 2014-01-24 23:28:36 +Comment: + +In [15]: #get the indeces that correspond to the netCDF + +In [16]: e04.set_index(ds) + +In [17]: #pull the non deiced temperature from the netCDF + +In [18]: data=ds.variables['TAT_ND_R'][e04.Index,:] + +In [19]: plt.plot(data.ravel()) +Out[19]: [] + +In [20]: plt.title('%s\n%s to %s' % (e04.Name, e04.Start_time[-8:], e04.Stop_time[-8:])) +Out[20]: + +In [21]: plt.show() + +In [22]: #create html and kml files + +In [23]: fs=flsum.process(fltsummfile, ncfile, outpath) + +In [24]: + +''' + +import os +import re +import sys +import datetime +import lxml.html +import netCDF4 +import numpy as np +import types + +import faampy.core.utils + + +def __two_point_event_as_kml__(self): + TEMPLATE=""" + %s + #line + <![CDATA[]]> + + + absolute + + %s + + + + absolute + + %s + + + + 1 + 1 + absolute + + %s + + + + +""" + + pt1_coord_string='%f,%f,%f\n' % (self.Coords[0][0], + self.Coords[0][1], + self.Coords[0][2]) + pt2_coord_string='%f,%f,%f\n' % (self.Coords[-1][0], + self.Coords[-1][1], + self.Coords[-1][2]) + ls_coord_string='' + for c in self.Coords[::15]: + ls_coord_string+='%f,%f,%f\n' % (c[0], c[1], c[2]) + + result=TEMPLATE % (self.Name, + pt1_coord_string, + pt2_coord_string, + ls_coord_string) + self.kml=result + return result + + +def __one_point_event_as_kml__(self): + TEMPLATE=""" + %s + <![CDATA[]]> + + absolute + + %s + + + +""" + + pt_cor_string='%f,%f,%f\n' % (self.Coords[0], + self.Coords[1], + self.Coords[2]) + + result=TEMPLATE % (self.Name, pt_cor_string) + self.kml=result + return result + + +class Event(object): + + Start_time, Start_time_48, Stop_time, Stop_time_48, Coords, format=None, None, None, None, None, None + Name, Start_height, Stop_height, Hdg, Comment='','','','','' + + def __init__(self): + pass + + def fix_time(self, ds): + if self.format == 'decades': + basetime=faampy.core.utils.get_base_time(ds) + td=datetime.datetime.strptime(self.Start_time, '%Y-%m-%d %H:%M:%S')-basetime + #self.Start_time_48=datetime.datetime.strptime("%02d:%02d:%02d" % (td.seconds/3600, td.seconds%3600/60, (td.seconds%3600)%60), '%H:%M:%S') + seconds=td.total_seconds() + self.Start_time_48="%02d%02d%02d" % (seconds/3600, seconds%3600/60, (seconds%3600)%60) + #self.Start_time_48=basetime.strftime('%Y-%m-%d')+' '+self.Start_time.strftime('%H:%M:%S') + if self.Stop_time: + td=datetime.datetime.strptime(self.Stop_time, '%Y-%m-%d %H:%M:%S')-basetime + seconds=td.total_seconds() + self.Stop_time_48="%02d%02d%02d" % (seconds/3600, seconds%3600/60, (seconds%3600)%60) + #self.Stop_time_48=basetime.strftime('%Y-%m-%d')+' '+self.Stop_time + else: + self.Start_time_48=self.Start_time + self.Stop_time_48=self.Stop_time + + def set_index(self, ds): + if self.format == 'decades': + start_time=datetime.datetime.strptime(self.Start_time, '%Y-%m-%d %H:%M:%S').strftime('%H%M%S') + if self.Stop_time: + stop_time=datetime.datetime.strptime(self.Stop_time, '%Y-%m-%d %H:%M:%S').strftime('%H%M%S') + td=datetime.datetime.strptime(self.Start_time, '%Y-%m-%d %H:%M:%S')-faampy.core.utils.get_base_time(ds) + if td.total_seconds() > 86400: + start_time='%02d' % (int(start_time[0:2])+24)+start_time[2:] + + if self.Stop_time: + td=datetime.datetime.strptime(self.Stop_time, '%Y-%m-%d %H:%M:%S')-faampy.core.utils.get_base_time(ds) + if td.total_seconds() > 86400: + stop_time='%02d' % (int(stop_time[0:2])+24)+stop_time[2:] + elif self.format == 'horace': + start_time=self.Start_time + if self.Stop_time: + stop_time=self.Stop_time + if self.Stop_time: + self.Index=range(faampy.core.utils.get_index_from_hhmmss(ds, start_time), + faampy.core.utils.get_index_from_hhmmss(ds, stop_time)) + else: + self.Index=faampy.core.utils.get_index_from_hhmmss(ds, start_time) + + def set_coords(self, ds): + lon_gin=ds.variables['LON_GIN'][self.Index, 0] + lat_gin=ds.variables['LAT_GIN'][self.Index, 0] + alt_gin=ds.variables['ALT_GIN'][self.Index, 0] + if lon_gin.size > 1: + self.Coords=zip(lon_gin.tolist(), + lat_gin.tolist(), + alt_gin.tolist()) + else: + self.Coords=(float(lon_gin), + float(lat_gin), + float(alt_gin)) + if np.all(np.isnan(self.Coords)): + self.Coords=None + + def as_kml(self): + if self.Stop_time: + self.as_kml=types.MethodType(__two_point_event_as_kml__, self) + else: + self.as_kml=types.MethodType(__one_point_event_as_kml__, self) + self.as_kml() + return self.kml + + def as_txt(self): + fmt="%-6s %-6s %-19s %-17s %3s %s" + vals=[] + #vals.append(datetime.datetime.strptime(self.Start_time, '%Y-%m-%d %H:%M:%S').strftime('%H%M%S')) + vals.append(self.Start_time_48) + if self.Stop_time: + #vals.append(datetime.datetime.strptime(self.Stop_time, '%Y-%m-%d %H:%M:%S').strftime('%H%M%S')) + vals.append(self.Stop_time_48) + else: + vals.append('') + if len(self.Name) > 19: + spl=self.Name.split() + self.Name=spl[0] + self.Comment=' '.join(spl[1:]+[self.Comment]) + vals.append(self.Name) + if self.Stop_height: + vals.append('%.2f - %.2f kft' % (float(self.Start_height), float(self.Stop_height))) + else: + vals.append('%.2f kft' % (float(self.Start_height),)) + vals.append(self.Hdg) + vals.append(self.Comment) + result=fmt % tuple(vals) + return result + + def __str__(self): + result='' + result+='Name: %s\n' % (self.Name) + result+='Start Time: %s\n' % (self.Start_time) + result+='End Time: %s\n' % (self.Stop_time) + result+='Comment: %s\n' % (self.Comment) + return result + + +class FlightSummary(object): + """from FlightSummary import * + fs=FlightSummary(fltsummfile) + + """ + + def __init__(self, fltsumm_file): + self.format=None + self.fid=None + self.date=None + self.basetime=None + self.filename=fltsumm_file + self.revision=None + self.Entries=[] + + if not os.path.exists(fltsumm_file): + sys.stdout.write("File does not exist!\n") + else: + self.__read__() + self.parse() + try: + self.date=datetime.datetime.strptime(os.path.basename(self.filename).split('_faam_')[1][0:8],'%Y%m%d') + except: + pass + + + def __read__(self): + f=open(self.filename, 'r') + self.txt=f.readlines() + f.close() + + def parse(self): + + if ' ', ' '*5, txt) + + ix_s=re.search('', txt).start(0) + ix_e=re.search('
', txt).end(0) + s=txt[ix_s:ix_e] + + html=lxml.html.fromstring(s) + + tbl = [] + rows = html.cssselect("tr") + for row in rows: + tbl.append([]) + for td in row.cssselect("td"): + #tbl[-1].append(unicode(td.text_content())) + tbl[-1].append(remove_non_ascii(td.text_content())) + + for line in tbl: + if not line: + continue + print(line) + e=Event() + if self.basetime: + e.basetime=self.basetime + e.Start_time=re.sub(':', '', line[1]) + e.Stop_time=re.sub(':', '', line[6]) + e.Name=line[0] + + tmp=re.sub('kft', '', line[3]).strip() + tmp=re.findall('[+-]?\d+.\d+', tmp) + if tmp: e.Start_height=tmp[0] + tmp=re.sub('kft', '', line[8]).strip() + tmp=re.findall('[+-]?\d+.\d+', tmp) + if tmp: e.Stop_height=tmp[0] + e.Hdg=line[2] + if len(line) > 11: + e.Comment=line[12] + else: + e.Comment='' + e.format='decades' + e.format='horace' + self.Entries.append(e) + self.Entries.sort(key= lambda x: x.Start_time) + return + + for line in self.txt: + if not line.strip(): + continue + elif line[0:6].isdigit(): + e=Event() + e.Start_time=line[0:6] + if line[8:14].strip(): + e.Stop_time=line[8:14] + e.Name = line[17:37].strip() + tmp=line[37:52].strip() + tmp=re.sub('kft', '', tmp).strip() + heights=re.findall('[+-]?\d+.\d+', tmp) + e.Start_height=heights[0] + if len(heights) > 1: + e.Stop_height=heights[1] + e.Hdg=line[55:58].strip() + e.Comment=line[59:].strip() + e.format='horace' + self.Entries.append(e) + elif len(line.split(',')) > 2: + print(line) + line=line.split(',') + #skip header line + if ('Event' in line[0]): + continue + e=Event() + if self.basetime: + e.basetime=self.basetime + e.Start_time=line[1] + e.Stop_time=line[6] + e.Name=line[0] + e.Start_height=line[3] + e.Stop_height=line[8] + e.Hdg=line[2] + if len(line) > 11: + e.Comment=line[11] + else: + e.Comment='' + e.format='decades' + self.Entries.append(e) + self.Entries.sort(key= lambda x: x.Start_time) + + def as_kml(self, ofile=None, fid='', date=''): + kml='' + header = """ + + + %s-Flight-Summary + + %s-%s + 1 +""" % (fid, fid, date) + kml+=header + for e in self.Entries: + if e.Coords: + try: + kml+=e.as_kml() + except: + pass + kml+="\n\n" + if ofile: + f=open(ofile, 'w') + f.write(kml) + f.close() + return kml + + def __str__(self, ofile=None): + result='' + for e in self.Entries: + result+=e.__str__() + result+=30*'#'+'\n' + return result + + def as_txt(self, ofile=None, fid='', date=''): + """Format Flight Summary as text file + """ + result=""" FLIGHT SUMMARY +Flight No %s +Date: %s +Project: +Location: + +Start End +Time Time Event Height (s) Hdg Comments +---- ---- ----- ---------- --- -------- +""" % (fid, date) + for e in self.Entries: + result+=e.as_txt().strip()+'\n' + if ofile: + f=open(ofile, 'w') + f.write(result) + f.close() + return result + + + def as_html(self, ofile=None): + """format Flight Summary as html table + + """ + html='\n' + row=['Name', 'Start Time', 'Start Height
(kft)', 'Stop Time', 'Stop Height
(kft)', 'Comment'] + html+='\n\n\n' + for e in self.Entries: + row=[e.Name, e.Start_time, e.Start_height, e.Stop_time, e.Stop_height, e.Comment] + row2=[] + for i in row: + if i == None: + row2.append('') + else: + row2.append(i) + row=row2 + html+='\n\n\n' + html+='
'+''.join(row)+'
'+''.join(row)+'
\n' + if ofile: + f=open(ofile, 'w') + f.write(html) + f.close() + return html + + +def process(fltsummfile, ncfile, outpath): + ds=netCDF4.Dataset(ncfile, 'r') + basetime=faampy.core.utils.get_base_time(ds) + fid=faampy.core.utils.get_fid(ds) + fs=FlightSummary(fltsummfile) + #for i in range(len(fs.Entries)): + for ent in fs.Entries: + try: + ent.fix_time(ds) + ent.set_index(ds) + ent.set_coords(ds) + except: + pass + basename='flight-sum_faam_%s_r0_%s' % (basetime.strftime('%Y%m%d'), fid) + fs.as_kml(ofile=os.path.join(outpath, basename+'.kml'), fid=fid, date=basetime.strftime('%d/%m/%Y')) + fs.as_html(ofile=os.path.join(outpath, basename+'.html')) + fs.as_txt(ofile=os.path.join(outpath, basename+'.txt'), fid=fid, date=basetime.strftime('%d/%m/%Y')) + ds.close() + return fs + + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + sys.argv.insert(0, 'faampy flight_summary') + parser=argparse.ArgumentParser(prog='faampy flight_summary', + description=__doc__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('fltsummfile', action="store", type=str, help='Flight Summary file') + parser.add_argument('ncfile', action="store", type=str, help='core_faam netCDF') + parser.add_argument('outpath', action="store", type=str, help='outpath where the newly formatted flight summaries will be saved') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + process(args.fltsummfile, args.ncfile, args.outpath) + sys.stdout.write('Done ...\n') + + +if __name__ == '__main__': + main() diff --git a/faampy/core/nc_to_csv.py b/faampy/core/nc_to_csv.py new file mode 100644 index 0000000..7832c9f --- /dev/null +++ b/faampy/core/nc_to_csv.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python + +""" +Extracts variables from netCDF and writes those to a csv file. + +""" + + +from faampy.core.utils import get_mpl_time +from matplotlib.dates import num2date + +import netCDF4 +import numpy as np +import os +import re +import sys +import zipfile + + +def get_fid(ds): + """ + Get the flight ID ([b,d]nnn) from netCDF4.Dataset. + + :param ds: core_faam dataset + :type param: netCDF4.Dataset + + >>> ncfile = 'core_faam_20130403_v004_r0_b768.nc' + >>> ds = netCDF4.Dataset(ncfile, 'r') + >>> print(get_fid(ds)) + b768 + + """ + # parse the title global attribute of the netCDF + if hasattr(ds, 'title'): + result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.title)[0]) + result = result.lower() + elif hasattr(ds, 'Title'): + result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.Title)[0]) + result = result.lower() + else: + result = "" + return result + + +VARNAMES = ['ACLD_GIN', + 'ACLF_GIN', + 'ACLS_GIN', + 'ALT_GIN', + 'AOA', + 'AOSS', + 'GSPD_GIN', + 'HDGR_GIN', + 'HDG_GIN', + 'HGT_RADR', + 'IAS_RVSM', + 'LAT_GIN', + 'LON_GIN', + 'LWC_JW_U', + 'NV_LWC_U', + 'NV_TWC_U', + 'P0_S10', + 'P9_STAT', + 'PALT_RVS', + 'PA_TURB', + 'PB_TURB', + 'PITR_GIN', + 'PSP_TURB', + 'PS_RVSM', + 'PTCH_GIN', + 'Q_RVSM', + 'ROLL_GIN', + 'ROLR_GIN', + 'TAS', + 'TAS_RVSM', + 'TAT_DI_R', + 'TAT_ND_R', + 'TDEW_GE', + 'TRCK_GIN', + 'TWC_EVAP', + 'TWC_TDEW', + 'U_C', + 'U_NOTURB', + 'VELD_GIN', + 'VELE_GIN', + 'VELN_GIN', + 'VMR_CR2', + 'VMR_C_U', + 'V_C', + 'V_NOTURB', + 'WOW_IND', + 'WVSS2F_VMR', + 'WVSS2R_VMR', + 'W_C'] + + +def process(ds, varnames, outfile=None, include_flag=False): + """ + :param ds: netCDF4.Dataset object + :param varnamess: list of variables ot + :param outfile: csv filename output + :param include_flag: adding flags + + """ + + if include_flag: + new_varnames = [] + for p in varnames: + new_varnames.append(p) + new_varnames.append(p+'_FLAG') + varnames = new_varnames + + n = ds.variables['Time'][:].size + dt = [i[0].strftime('%Y-%m-%d %H:%M:%S') for i in num2date(get_mpl_time(ds, 1))] + + header =','.join(['utc',] + varnames) + + _dtype = [('utc', 'S20'), ] + _fmt = ['%s', ] + for v in varnames: + if v.endswith('_FLAG'): + _dtype.append((v, int)) + _fmt.append('%i') + else: + _dtype.append((v, float)) + _fmt.append('%.3f') + + result = np.recarray((n,), dtype=_dtype) + + for v in result.dtype.fields.keys(): + if v.endswith('_FLAG'): + result[v] = 3 + else: + result[v] = -9999. + result['utc'] = dt + + for v in varnames: + if v.lower() in [i.lower() for i in ds.variables.keys()]: + if len(ds.variables[v][:].shape) == 2: + result[v] = ds.variables[v][:, 0] + else: + result[v] = ds.variables[v][:] + if v.endswith('_FLAG'): + result[v][result[v] < 0] = 3 + + if outfile: + lines = [] + for r in result: + lines.append(','.join(_fmt[:]) % tuple(list(r)[:])) + out = open(outfile, 'w') + out.write(header + '\n' + '\n'.join(lines) + '\n') + out.close() + return (result, header) + + +if __name__ == '__main__': + import argparse + + parser = argparse.ArgumentParser(description='Creates a csv file from a netCDF.') + parser.add_argument('ncfile', action='store', type=str, + help="FAAM core file *or* a directory. When input is a directory the script finds recursively all 1hz netCDF files inside thed processes them.") + parser.add_argument('--outpath', + action='store', + type=str, + default=os.environ['HOME'], + help="Outpath for the csv file.") + parser.add_argument('--suffix', + action='store', + type=str, + default='_extract', + help="Outpath for the csv file.") + args = parser.parse_args() + + file_list=[] + if os.path.isdir(args.ncfile): + pattern = 'core_faam_\d{8}.*_?\d{3}.nc' + for root, subFolders, files in os.walk(args.ncfile): + for f in files: + if re.match(pattern, os.path.basename(f)): + file_list.append(os.path.join(root, f)) + if not file_list: + sys.stdout.write('No core netcdf files found in ... %s\n Leaving \n\n' % args.ncfile) + sys.exit() + else: + file_list = [args.ncfile,] + + for f in file_list: + sys.stdout.write('Processing ... %s\n' % (f,)) + ds = netCDF4.Dataset(f, 'r') + fid = get_fid(ds) + outfile = os.path.join(args.outpath, os.path.basename(f)[:-3] + args.suffix + '.csv') + hdr, data = process(ds, VARNAMES, outfile=outfile, include_flag=True) + arc = zipfile.ZipFile(os.path.splitext(outfile)[0]+'.zip', 'w', zipfile.ZIP_DEFLATED) + arc.write(outfile, os.path.basename(outfile)) + sys.stdout.write('Output written to ... \n %s\n' % (outfile,)) + arc.close() diff --git a/faampy/core/utils.py b/faampy/core/utils.py new file mode 100644 index 0000000..ec2f0e9 --- /dev/null +++ b/faampy/core/utils.py @@ -0,0 +1,324 @@ +import datetime +import netCDF4 +import numpy as np +import os +import pandas as pd +import re +import sys +import shutil + +from matplotlib.dates import date2num, num2date + + +def sub_nans(ncfilename): + dst=os.path.splitext(ncfilename)[0]+'_edited'+os.path.splitext(ncfilename)[1] + shutil.copy(ncfilename, dst) + ds=netCDF4.Dataset(dst, 'a') + for k in ds.variables.keys(): + if not k.endswith('_FLAG'): + dat=ds.variables[k][:] + if hasattr(dat, 'mask'): + dat[dat.mask]=-9999 + dat[np.isnan(dat)]=-9999 + ds.variables[k][:]=dat + ds.close() + + +def data_filter(ds, var_filter, verbose=None): + """ + :param ds: + :type ds: netCDF4.Dataset + :param var_filter: filter definition in form of list of tuplesa tuples + + filter(ds, [('Time', (20000 , 22000)), ('GIN_ALT', (0, 40000))] + """ + ix = np.empty(0) + n = len(ds.variables['Time']) + for item in var_filter: + key = item[0] + val = item[1] + tmp_var = ds.variables[key][:].ravel() + ix_tmp = np.where((tmp_var < val[0]) | (tmp_var > val[1]))[0] + if verbose: + sys.stdout.write('Filtering %s ... %.2f %% removed' % (key, float(ix_tmp.size)/float(tmp_var.size)*100.0)) + ix = np.concatenate((ix, ix_tmp)) + bad_index = set(np.unique(ix)) + full_index = set(range(n)) + good_index = list(full_index.difference(bad_index)) + if verbose: + sys.stdout.write('Remaining points: %i (%5.2f percent)\n' % (len(good_index), float(len(good_index))/float(len(n))*100.0)) + return good_index + + +def conv_secs_to_time(secs, no_colons=True): + """converts seconds past midnight to a time string HH:MM:SS + + :param int secs: seconds past midnight + :param no_colons: by default HH, MM, SS are not separated by ':' + default True + :type no_colons: boolean + :return time string: + """ + hh = secs/3600 + mm = (secs % 3600)/60 + ss = (secs % 3600)%60 + if no_colons: + result = '%.2i%.2i%.2i' % (hh, mm, ss) + else: + result = '%.2i:%.2i:%.2i' % (hh, mm, ss) + return result + + +def conv_time_to_secs(hhmmss): + """converts time string as it used for ecample in the flight summary + to seconds past midnight + + :param str hhmmss: timestamp + + >>> conv_time_to_secs('122312') + Out[1]: 44592 + >>> + + """ + hhmmss=hhmmss.strip() + l_hhmmss=list(hhmmss) + while ':' in l_hhmmss: + l_hhmmss.remove(':') + hhmmss=''.join(l_hhmmss) + hhmmss=hhmmss[-6:] + result=int(hhmmss[0:2])*3600+int(hhmmss[2:4])*60+int(hhmmss[4:6]) + return result + + +def get_fid(ds): + """ + Get the flight ID ([b,c]nnn) from a netCDF4.Dataset + + :param ds: core_faam dataset + :type param: netCDF4.Dataset + + >>> ncfile = 'core_faam_20130403_v004_r0_b768.nc' + >>> ds = netCDF4.Dataset(ncfile, 'r') + >>> print(get_fid(ds)) + b768 + + """ + #parse the title global attribute of the netcdf + if hasattr(ds, 'title'): + result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.title)[0]) + result = result.lower() + elif hasattr(ds, 'Title'): + result = str(re.findall('[b,B,c,C,p,P]\d{3}', ds.Title)[0]) + result = result.lower() + else: + result = "" + + return result + + +def get_base_time(ds): + """Get the base time from the units of the time dimension + + :param ds: core_faam dataset + :type param: netCDF4.Dataset + :return datetime: + :type return: datetime.datetime object + """ + if 'Time' in ds.variables.keys(): + result=datetime.datetime.strptime(str(ds.variables['Time'].units).strip(), + 'seconds since %Y-%m-%d 00:00:00 +0000') + elif 'TIME' in ds.variables.keys(): + result=datetime.datetime.strptime(str(ds.variables['TIME'].units).strip(), + 'seconds since %Y-%m-%d 00:00:00 +0000') + elif 'time' in ds.variables.keys(): + result=datetime.datetime.strptime(re.findall('\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}', ds.variables['time'].units)[0], '%Y-%m-%d 00:00:00') + elif 'PARA0515' in ds.variables.keys(): + result=datetime.datetime.strptime(ds.title.split()[-1], '%d-%b-%y') + else: + return None + return result + + +def get_mpl_time(ds, *freq): + """Return a float wihich is usabale for plt.plot_date from matplotlib. + + :param ds: core_faam dataset + :type param: netCDF4.Dataset + :param freq: frequency of the time stamp default=1; if freq > 1 a multidimensional array is returned + :return: array containing the matplotlib timestamps + :rtype: numpy.array + + >>> ds = netCDF4.Dataset('core_faam_20130403_v004_r0_b768.nc', 'r') + >>> t_1hz = get_mpl_time(ds) + >>> t_1hz.shape + Out[1]: (37137,) + >>> t_32hz = get_mpl_time(ds, 32) + >>> t_32hz.shape + Out[1]: (37137, 32) + >>> plot_date(t_32hz.ravel(), ds.variables['U_C'][:].ravel(), 'b-') + >>> + + """ + if 'Time' in ds.variables.keys(): + vtime=ds.variables['Time'][:] + elif 'time' in ds.variables.keys(): + vtime=ds.variables['time'][:] + elif 'TIME' in ds.variables.keys(): + vtime=ds.variables['TIME'][:] + #in old core files the 'Time' variable was c2alled PARA0515 + elif 'PARA0515' in ds.variables.keys(): + vtime=ds.variables['PARA0515'][:] + else: + return None + vtime=np.array(vtime) + if freq: + rows = len(vtime) + vtime = vtime.repeat(freq[0]).reshape((rows, freq[0])) + np.array(range(freq[0]), dtype=np.float64)/freq[0] + result=np.float64(vtime/86400.) + np.float64(date2num(get_base_time(ds))) + return result + + +def get_flight_duration(ds, verbose=False): + """Estimates the flight durations of a flight. If the Weight on Wheels + indicator flag is available in the netCDF4 dataset, this variable is + used for determing take-off and landing. Alternatively the method looks + at the indicated airspeed (IAS) and ground speed (GSPD) data that are + greater than 60ms-1 and lower tahn 200ms-1 + + :param ds: core_faam dataset + :type param: netCDF4.Dataset + :param verbose: more output if needed + :type verbose: boolean + """ + + ias=ds.variables['IAS_RVSM'][:] + if len(ds.variables['IAS_RVSM'].shape) > 1: + ias=ias[:,0].ravel() + else: + ias=ias[:] + + if 'GSPD_GIN' in ds.variables.keys(): + gspd=ds.variables['GSPD_GIN'][:] + if len(gspd.shape) > 1: + gspd=gspd[:,0].ravel() + + if 'WOW_IND' in ds.variables.keys(): + wow_ind=ds.variables['WOW_IND'][:] + if len(wow_ind.shape) > 1: + wow_ind=wow_ind[:,0].ravel() + + if 'WOW_IND' in ds.variables.keys(): + ix=np.where(wow_ind.filled() == 0)[0] + ix_min, ix_max=np.min(ix), np.max(ix) + dur=ds.variables['Time'][ix_max]-ds.variables['Time'][ix_min] + elif 'GSPD_GIN' in ds.variables.keys(): + #filter for indicated airspeed greater 60 + ix=np.where((ias > 60) & (ias < 200) & (gspd > 60)) + ix_min, ix_max=np.min(ix), np.max(ix) + dur=ds.variables['Time'][ix_max]-ds.variables['Time'][ix_min] + else: + if not isinstance(ias, np.ndarray): + ias=ias.data + #filter for indicated airspeed greater 60 + ix=np.where((ias > 60) & (ias < 200)) + ix_min, ix_max=np.min(ix), np.max(ix) + dur=ds.variables['Time'][ix_max]-ds.variables['Time'][ix_min] + + if verbose: + sys.stdout.write('*'*40+'\n') + sys.stdout.write(' FID: %s\n' % (get_fid(ds),)) + sys.stdout.write(' T/O: %s\n' % conv_secs_to_time(np.min(ds.variables['Time'][ix_min]))) + sys.stdout.write(' Land: %s\n' % conv_secs_to_time(np.min(ds.variables['Time'][ix_max]))) + sys.stdout.write(' Duration: %s (%i secs)\n' % (conv_secs_to_time(dur), dur)) + sys.stdout.write('*'*40+'\n') + return float(dur) + + +def get_index_from_secs(ds, secs): + """Return index for seconds + :param ds: + :type ds: netCDF4.Dataset + :param secs: seconds past midnight + :type secs: int + """ + ix = np.where(ds.variables['Time'][:] == secs) + return ix[0][0] + + +def get_index_from_hhmmss(ds, hhmmss): + """return the index from a FAAM core netcdf where the 'Time'-variables matches + the time 'hhmmss' string matches the + + + + >>> import netCDF4 + >>> ds = netCDF4.Dataset('core_faam_20130225_v004_r0_b753.nc', 'r') + >>> get_index_from_hhmmss(ds, '120000') + Out[1]: 7668 + >>> + + """ + # convert time in string format to seconds past midnight + secs=conv_time_to_secs(hhmmss) + ix=get_index_from_secs(ds, secs) + return ix + + +def get_index(ds, inp): + """ + wrapper for the methods + get_index_from_secs + and + get_index_from_hhmmss + + function determines what type the input is + and calls the appropriate function accordingly. + + :param ds: core_faam dataset + :type param: netCDF4.Dataset + """ + if isinstance(inp, int): + ix=get_index_from_secs(ds, inp) + elif len(inp) == 6 and re.match('\d{6}', inp): + ix=get_index_from_hhmmss(ds, inp) + else: + pass + return ix + + +def core_to_pandas(ds): + """converts a netCDF4.Dataset into a pandas Dataframe using the timestamp + as index. + + ..note: Only the first column of the two dimensional data set is grabbed, + because of performance issues. + + :param ds: core_faam dataset + :type param: netCDF4.Dataset + :return: pandas.Dataframe + :type return: pandas.Dataframe + + """ + + # TODO: make it work for the full dataset too + vars=sorted(ds.variables.keys()) + vars.remove('Time') + + index=get_mpl_time(ds, 1) + index=num2date(index.ravel()) + #initialize an empty Dataframe + df=pd.DataFrame(index=index) + for v in vars: + shp=ds.variables[v].shape + if len(shp) ==2: + data=np.copy(ds.variables[v][:,0].data).ravel() + else: + data=np.copy(ds.variables[v][:].data) + df_tmp=pd.DataFrame(data[:], index=index, columns=[v,]) + df = pd.concat([df, df_tmp], axis=1) + #set all missing values to nan + df[df == -9999.0] = np.nan + #set timezone to None otherwise there might be issues merging the data + #frame with others + df.index.tz=None + return df diff --git a/faampy/data_io/__init__.py b/faampy/data_io/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/faampy/data_io/chem.py b/faampy/data_io/chem.py new file mode 100644 index 0000000..38e2839 --- /dev/null +++ b/faampy/data_io/chem.py @@ -0,0 +1,47 @@ +import datetime +import os + +import numpy as np +import pandas as pd + + +def read_nox(ifile): + _date = datetime.datetime.strptime('%d%m%y', os.path.basename(ifile).split('_')[1]) + year = _date.year + month = _date.month + day = _date.day + nox_dateparse = lambda x: pd.datetime(year, month, day) + \ + datetime.timedelta(seconds=int(float(float(x) % 1)*86400.)) + + df_nox = pd.read_csv(ifile, parse_dates=[0], date_parser=nox_dateparse) + df_nox = df_nox.set_index('TheTime') # Setting index + t = df_nox.index.values + df_nox['timestamp'] = t.astype('datetime64[s]') # Converting index data type + df_nox = df_nox[['timestamp', 'no_conc', 'no2_conc', 'nox_conc']] + df_nox[df_nox < 0] = np.nan + return df_nox + + +def read_fgga(ifile) : + fgga_dateparse = lambda x: pd.datetime.utcfromtimestamp(int(x)) + fgga_names = ['identifier', 'packet_length', 'timestamp', 'ptp_sync', + 'MFM', 'flight_num', 'CPU_Load', 'USB_disk_space', 'ch4', + 'co2', 'h2o', 'press_torr', 'temp_c', 'fit_flag', + 'rda_usec', 'rdb_usec', 'ch4_ppb', 'co2_ppm', + 'MFC_1_absolute_pressure', 'MFC_1_temperature', + 'MFC_1volumetic_flow', 'MFC_1mass_flow', 'MFC_1set_point', + 'V1', 'V2', 'V3', 'V4', 'restart_FGGA', 'FGGA_Pump', + 'CAL_MFC_1Set_Value'] + df_fgga = pd.read_csv(ifile, + names=fgga_names, + delimiter=',', + parse_dates=[2], + date_parser=fgga_dateparse, + skiprows=100) # To be sure to skip the header + + # Using the Valve states for flagging out calibration periods + # TODO: add time buffer around calibration periods + df_fgga.loc[df_fgga['V1'] != 0, 'ch4_ppb'] = np.nan + df_fgga.loc[df_fgga['V2'] != 0, 'co2_ppm'] = np.nan + df_fgga.loc[df_fgga['V2'] != 0, 'ch4_ppb'] = np.nan + return df_fgga \ No newline at end of file diff --git a/faampy/data_io/cp.py b/faampy/data_io/cp.py new file mode 100644 index 0000000..fb3e076 --- /dev/null +++ b/faampy/data_io/cp.py @@ -0,0 +1,48 @@ +import sys + + +import netCDF4 +import numpy as np +from numpy.lib import recfunctions +import pandas as pd +from faampy.core.utils import get_mpl_time +from matplotlib.dates import num2date + + +def read_core_cloud(ifile): + """reads in the core cloud data and + + :param str ifile: core cloud netcdf file + :return: pandas.Dataframe + :type return: pandas.Dataframe + + """ + ds = netCDF4.Dataset(ifile, 'r') + vars = sorted(ds.variables.keys()) + vars.remove('Time') + + # create and indexed pandas DataFrame + tindex = get_mpl_time(ds, 1) + tindex = num2date(tindex.ravel()) + # initialize an empty Dataframe + df = pd.DataFrame(index=tindex) + for v in vars: + shp = ds.variables[v].shape + if not shp[0] == len(index): + continue + if len(shp) == 2: + data = np.copy(ds.variables[v][:,0].data).ravel() + else: + data = np.copy(ds.variables[v][:]) + df_tmp = pd.DataFrame(data[:].ravel(), index=index, columns=[v,]) + df = pd.concat([df, df_tmp], axis=1) + + df[df == -9999.0] = np.nan # set all missing values to nan + t = df.index.values + df['timestamp'] = t.astype('datetime64[s]') # Converting index data type + + # TODO: Check that this is really necessary + # set timezone to None otherwise there might be issues merging + # this DataFrame with others + df.index.tz = None + return df diff --git a/faampy/data_io/nimrod_to_nc.py b/faampy/data_io/nimrod_to_nc.py new file mode 100644 index 0000000..44e49e3 --- /dev/null +++ b/faampy/data_io/nimrod_to_nc.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- + +""" +This script converts the NIMROD weather radar data format into a netCDF with +the dimensions: + + * Timestamp + * Longitude + * Latitude + +The original array in the NIMROD data fits the OSGB 1936 spatial reference +system. However, to make the results work with for example cis it is necessary +to warp the array to EPSG:4326 so that longitude and latitude are available as +dimensions. + +The created netCDF results were tested with the cistools_. If the netCDF +stores more than one timestamp it is necessary to extract one layer using the +subset command like this:: + + cis subset rain_intensity:nimrod.nc timestamp=['2012-03-04T00:50'] \ +-o nimrod_20120304T0050.nc + +The above command extracts the data for the timestamp '2012-03-04T00:50' and +writes a new netCDF with the filename "nimrod_20120304T0050.nc". + +Now it is possible to plot the data as a heatmap using cis:: + + cis plot rain_intensity:nimrod_20120304T0050.nc + +Maybe there is a way to skip the subset step but so far I have not found it. + +.. _cistools: http://www.cistools.net/ + +""" + + +import array +import datetime +import gzip +import netCDF4 +from multiprocessing import Pool +import numpy as np +import os +from osgeo import osr, gdal +import shutil +import struct +import sys +import tarfile +import tempfile +import time + + +_TEMP_FOLDER=tempfile.mkdtemp() + +# taken from: http://badc.nerc.ac.uk/browse/badc/ukmo-nimrod/software/python/read_nimrod.py +def read_nimrod(pathed_file, quite=True): + + file_id=open(pathed_file,"rb") + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != 512: + sys.stdout.write("Unexpected record length: %i" % record_length) + sys.exit(1) + + gen_ints=array.array("h") + gen_reals=array.array("f") + spec_reals=array.array("f") + characters=array.array("c") + spec_ints=array.array("h") + + gen_ints.read(file_id, 31) + gen_ints.byteswap() + + gen_reals.read(file_id, 28) + gen_reals.byteswap() + + spec_reals.read(file_id, 45) + spec_reals.byteswap() + characters.read(file_id, 56) + spec_ints.read(file_id, 51) + spec_ints.byteswap() + + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != 512: + raise ("Unexpected record length", record_length) + + chars=characters.tostring() + + if not quite: + sys.stdout.write("\nDate %4.4d%2.2d%2.2d Time %2.2d:%2.2d Grid %d x %d\n" %(gen_ints[0], gen_ints[1], gen_ints[2], gen_ints[3], gen_ints[4], gen_ints[15], gen_ints[16])) + sys.stdout.write("start northing %.1f, row interval %.1f, start easting %.1f, column interval %.1f\n" %(gen_reals[2], gen_reals[3], gen_reals[4], gen_reals[5])) + sys.stdout.write("Units are %s\n" % chars[0:8]) + sys.stdout.write("Data source is %s\n" % chars[8:32]) + sys.stdout.write("Parameter is %s\n" % chars[32:55]) + for i in range(gen_ints[22]): + print (i+108, spec_ints[i]) + for i in gen_reals: + print(i) + for i in spec_reals: + print(i) + #Read data + cols=gen_ints[15] + rows=gen_ints[16] + array_size=cols*rows + + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != array_size*2: + raise ("Unexpected record length", record_length) + + data=array.array("h") + try: + data.read(file_id, array_size) + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != array_size*2: + raise ("Unexpected record length", record_length) + data.byteswap() + except: + sys.stdout.write("Read failed\n") + file_id.close() + result={'data': np.array(data), + 'cols': cols, + 'rows': rows, + 'start_northing': gen_reals[2], + 'row_interval': gen_reals[3], + 'start_easting': gen_reals[4], + 'column_interval': gen_reals[5], + 'year': gen_ints[0], + 'month': gen_ints[1], + 'day': gen_ints[2], + 'hour': gen_ints[3], + 'min': gen_ints[4]} + return result + + +def unzip(gzip_fname): + """ + uncompression of gzip file + """ + with gzip.open(gzip_fname, 'rb') as in_file: + s = in_file.read() + # Now store the uncompressed data + path_to_store = gzip_fname[:-3] # remove the '.gz' from the filename + # store uncompressed file data from 's' variable + with open(path_to_store, 'w') as f: + f.write(s) + + +def untar(fname): + """ + extracting files from tar + """ + tar = tarfile.open(fname) + tar.extractall(path=_TEMP_FOLDER) + tar.close() + + +def extract(tar_file): + """ + extract all data files from the tar-file into a temporary directory and unzip them + + """ + untar(tar_file) + for i in [os.path.join(_TEMP_FOLDER, gzip_file) for gzip_file in os.listdir(_TEMP_FOLDER)]: + if i.endswith('.gz'): + unzip(i) + + +def warp(nimrod_dataset): + """ + Warps the data array into one that has longitude/latitude as axes an fits + the EPSG:4326 spatial reference system. The original array has the srs + EPSG:27700 (OSGB 1936). + :param nimrod_dataset: dictionary containing the data from the NIMROD file + """ + + # http://gis.stackexchange.com/questions/139906/replicating-result-of-gdalwarp-using-gdal-python-bindings + # Create synthetic data + gtiff_drv = gdal.GetDriverByName('MEM') + cols, rows = nimrod_dataset['cols'], nimrod_dataset['rows'] + raster = np.reshape(nimrod_dataset['data'], (cols, rows)) + raster = np.int16(raster) + top_left = (nimrod_dataset['start_easting'], nimrod_dataset['start_northing']) + pixel_height = nimrod_dataset['column_interval'] + pixel_width = nimrod_dataset['row_interval'] + + src_srs = osr.SpatialReference() + src_srs.ImportFromEPSG(27700) + + src_geotran = [top_left[0], pixel_width, 0, + top_left[1], 0, -pixel_height] + + rows, cols = raster.shape + src_ds = gtiff_drv.Create( + 'test_epsg3413.tif', + cols, rows, 1, + gdal.GDT_Byte) + src_ds.SetGeoTransform(src_geotran) + src_ds.SetProjection(src_srs.ExportToWkt()) + src_ds.GetRasterBand(1).WriteArray(raster) + + # Transform to EPSG: 4326 + dest_srs = osr.SpatialReference() + dest_srs.ImportFromEPSG(4326) + + int_ds = gdal.AutoCreateWarpedVRT(src_ds, src_srs.ExportToWkt(), dest_srs.ExportToWkt()) + nimrod_dataset['data_warped'] = int_ds.GetRasterBand(1).ReadAsArray() + nimrod_dataset['GeoTransform'] = int_ds.GetGeoTransform() + + src_ds = None + int_ds = None + + return nimrod_dataset + + +def nimrod_to_nc(nimrod_file_list, ncoutfilename): + """ + Converts the nimrod data into netCDF. The output netCDF has the dimensions + * Timestamp + * Longitude + * Latitude + + :param list nimrod_file_list: list of files that should be added to the netCDF. + These are the extracted and unzipped data files from the tar-file. The tar + file contains all the files for one day in five minute timestamps (288 files). + :param string ncoutfilename: name of the new netCDF that will be created + """ + + # check that nimrod_file_list is not a string or unicode and convert to + # list if so + if isinstance(nimrod_file_list, (str, unicode)): + nimrod_file_list = [nimrod_file_list,] + + nimrod_list = [] + for f in nimrod_file_list: + # Only work with the uncompressed files + if ((os.path.exists(f)) and (f.endswith('.dat'))): + nimrod_list.append(read_nimrod(f)) + + ds = netCDF4.Dataset(ncoutfilename, 'w', clobber=True) + + #GeoTransformInfo = None + + pool = Pool(processes=_NUM_PROCESSES) + nimrod_list = pool.map(warp, nimrod_list) + + nimrod_list.sort(key=lambda x: datetime.datetime(x['year'], x['month'], x['day'], x['hour'], x['min'])) + + GeoTransformInfo = nimrod_list[0]['GeoTransform'] + + rows, cols = nimrod_list[0]['data_warped'].shape + + ds.Conventions = "CF-1.6" + + # Create the dimensions + ds.createDimension('lon', int(cols)) + ds.createDimension('lat', int(rows)) + ds.createDimension('timestamp', None) + + lat = ds.createVariable('lat', np.float32, ('lat',)) + lat.units = 'degrees_north' + lat.standard_name = 'latitude' + lat.valid_range = [-90.0, 90.0] + + lon = ds.createVariable('lon', np.float32, ('lon',)) + lon.units = 'degrees_east' + lon.standard_name = 'longitude' + lon.valid_range = [-180.0, 180.0] + + times = ds.createVariable('timestamp', np.float64, ('timestamp',)) + times.units = 'hours since 0001-01-01 00:00:00' + times.calendar = 'gregorian' + + # The zlib option is awesome. The compression makes the result much smaller + rain = ds.createVariable('rain_intensity', np.float32, ('lat', 'lon', 'timestamp'), fill_value=-9999., zlib=True) + rain.units = 'mm/hr' + + lon[:] = GeoTransformInfo[0] + np.arange(cols, dtype=np.float32) * GeoTransformInfo[1] + lat[:] = GeoTransformInfo[3] + np.arange(rows, dtype=np.float32) * GeoTransformInfo[5] + + times[:] = netCDF4.date2num([datetime.datetime(nimrod['year'], + nimrod['month'], + nimrod['day'], + nimrod['hour'], + nimrod['min']) for nimrod in nimrod_list], units = times.units, calendar = times.calendar) + + data = np.zeros((rows, cols, len(nimrod_list)), dtype=np.float32) + for i in range(len(nimrod_list)): + data[:,:,i] = np.array(nimrod_list[i]['data_warped'], dtype=np.float32)/32. + + data[data <= 0] = -9999. + rain[:] = data + + ds.sync() + ds.close() + return + + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + sys.argv.insert(0, 'faampy nimrod_to_nc') + parser=argparse.ArgumentParser(description=__doc__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('rain_radar_tar_file', action="store", type=str, help='MetOffice compressed rain radar file') + parser.add_argument('-n', '--number_of_processes', action="store", type=int, required=False, + default=2, + help='Number of processes that can be used.') + parser.add_argument('-o', '--outpath', action="store", type=str, required=False, + default=os.environ['HOME'], + help='Directory where the netCDF file will be stored. Default: $HOME.') + return parser + + +def main(): + global _NUM_PROCESSES + start_time = time.time() + parser = _argparser() + args = parser.parse_args() + _NUM_PROCESSES = args.number_of_processes + extract(args.rain_radar_tar_file) + nimrod_file_list = [os.path.join(_TEMP_FOLDER, f) for f in os.listdir(_TEMP_FOLDER)] + ncoutfilename = os.path.join(args.outpath, os.path.basename(args.rain_radar_tar_file).split('.')[0]+'.nc') + nimrod_to_nc(nimrod_file_list, ncoutfilename) + # Delete the _TEMP_FOLDER where we kept all the unzipped files + shutil.rmtree(_TEMP_FOLDER) + sys.stdout.write('Done ... ') + sys.stdout.write('Processing time %i seconds ... \n' % (time.time()-start_time)) + sys.stdout.write('netCDF written to\n %s.\n' % ncoutfilename) + + +if __name__ == '__main__': + main() diff --git a/faampy/fltcons/FltConsDiff.py b/faampy/fltcons/FltConsDiff.py new file mode 100644 index 0000000..bd03146 --- /dev/null +++ b/faampy/fltcons/FltConsDiff.py @@ -0,0 +1,140 @@ +''' +Created on 30 Dec 2009 + +@author: axel +''' + +import CoreData + +import copy +import difflib +import os +import re + + +def unique(alist): + """returns a list of unique values and keeps the order of their occurence""" + + aset = set(alist) + index = [] + for val in aset: + index.append( alist.index( val )) + + tmp = zip( index, list(aset) ) + tmp.sort() + newlist = [] + for val in tmp: + newlist.append( val[1] ) + + return newlist + + +def __getFlightConstants__(ncfilename): + + cd = CoreData() + cd.open(ncfilename) + + try: + fc = cd.ds.Flight_Constants + except: + pass + + try: + fc = cd.ds.FLIGHT_CONSTANTS + except: + pass + + cd.close() + + return fc.split('\n') + + + +class FlightConstants(object): + + def __init__(self, Filelist): + + self.Filelist = Filelist + + + def diff( self, id1, id2, r1=None, r2=None ): + + cdfl = CoreData.FileList() + cdfl.__createFileList__() + fn1 = cdf.__filterByFlightNumberAndRevision__( id1, r1 ) + fn2 = cdf.__filterByFlightNumberAndRevision__( id2, r2 ) + + fc1 = __getFlightConstants__( fn1 ) + fc2 = __getFlightConstants__( fn2 ) + + differ = difflib.HtmlDiff( tabsize=4, wrapcolumn=40 ) + html = differ.make_file( fc1, fc2, context=False ) + + outfile = open( '/home/axel/tmp/difftest.html', 'w' ) + outfile.write(html) + outfile.close() + + + def get_table( self ): + + for file in self.Filelist: + #print(file) + cd = CoreData.Summary() + cd.open( file ) + cd.get_file_info() + cd.get_flight_info() + cd.get_data_info() + cd.get_var_info() + cd.get_flicon_info() + + self.table.append(cd) + + cd.close() + del(cd) + + + def check_calfactor(self, par=None): + + caltable = [] + for entry in self.table: + caltable.append(entry) + + #sort by flying time + _time = [] + _cal = [] + for entry in caltable: + _time.append( time.mktime( entry.file_info['created'] )) + _cal.append( entry.cal_info[par][0] ) + + ucal = unique(_cal) + print('INFO: %i different cal factors' % len(ucal)) + + numit = dict(zip(ucal, range(len(ucal)))) + new = [] + for i in _cal: + new.append( numit[i] ) + + print(new) + for i in ucal: print(i) + + +#if __name__ == '__main__': +calfactor_list = ['TASCORR', 'CALCABT', 'GELIMS', 'CALGE', 'PRTCCAL', 'HEIMCAL', 'INSLEVL', 'CALLWC', 'CALNPRS', 'CALNTMP', 'CALNBTS', 'CALNGTS', 'CALNRTS', 'CALNBBS', 'CALNGBS', 'CALNRBS', 'CALNHUM', 'CALNSTS', 'CALNVLW', 'CALNVLR', 'CALNVLC', 'CALNVTW', 'CALNVTR', 'CALNVTC', 'CALRSL', 'CALRST', 'CALO3', 'CALO3P', 'CALO3T', 'CALO3F', 'CALO3MX', 'CALNO', 'CALNO2', 'CALNOX', 'CALNOMX', 'CALSO2', 'CALCOMR', 'CALCOMX', 'CALCABP', 'CALS9SP', 'CALPLIN', 'CALPLOG', 'CALUP1S', 'CALUP2S', 'CALUIRS', 'CALLP1S', 'CALLP2S', 'CALLIRS', 'CALCUCF', 'CALCURF', 'CALCUIF', 'CALCLCF', 'CALCLRF', 'CALCLIF', 'TRFCTR', 'CALDIT', 'CALNDT', 'CALTP1', 'CALTP2', 'CALTP3', 'CALTP4', 'CALTP5', 'AOA_A0', 'AOA_A1', 'AOSS_B0', 'AOSS_B1', 'TOLER', 'TASCOR1', 'ALPH0', 'ALPH1', 'BET0', 'BET1', 'CALTNOS', 'CALTSAM', 'CALTAMB', 'CALTSRC', 'CALHTR1', 'CALHTR2', 'CALISRC', 'INSPOSN'] + +cdfl = CoreData.FileList() +cdfl.__createFileList__() +cdfl.__filterLatestRevison__() +cdfl.__sortByFlightNumber__() + + +FC = FlightConstants(cdfl.FileList[420:]) +#FC.diff(457, 460, r1=1, r2=0) + +FC.get_table() +#FC.check_calfactor() +for cal in calfactor_list: + FC.check_calfactor(cal) + +print('*** END ***') + + diff --git a/faampy/fltcons/Plot.py b/faampy/fltcons/Plot.py new file mode 100644 index 0000000..bb9f4e5 --- /dev/null +++ b/faampy/fltcons/Plot.py @@ -0,0 +1,193 @@ +''' +Created on 13 Jan 2010 + +@author: axel +''' + +import sys + +import matplotlib +import matplotlib.pyplot as plt +import os +import re +import scipy + +import faampy +import faampy.utils + +#from orderedset import OrderedSet +from faampy.fltcons.db import DB + + +import collections + +# http://code.activestate.com/recipes/576694-orderedset/ +class OrderedSet(collections.MutableSet): + + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def discard(self, key): + if key in self.map: + key, prev, next = self.map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def pop(self, last=True): + if not self: + raise KeyError('set is empty') + key = self.end[1][0] if last else self.end[2][0] + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) + + +def fid_as_integer(fid): + if fid[0].lower() == 'c': + offset = 1000 + else: + offset = 0 + return int(fid[1:]) + offset + + +class Plot(object): + + def __init__ (self, par): + self.par = par + self.outpath = None + self.Figure = None + + def get_data(self): + """gets the data for the plot""" + fcdb = DB() + fcdb.connect() + sql = """SELECT f.par,f.fid,f.rev,f.rdate,f.line,f.fname + FROM ( SELECT fid, max(rev) AS maxrev + FROM fltcons GROUP BY fid ORDER BY fid + ) AS x INNER JOIN fltcons AS f ON f.fid=x.fid AND f.rev=x.maxrev AND f.par='%s' ORDER BY x.fid""" % (self.par) + + cur = fcdb.con.cursor() + cur.execute(sql) + data = cur.fetchall() + cur.close() + par, fid, rev, rdate, cal, fname = zip(*data) + newcal = [] + for c in cal: + c = c.split(' ', 1)[1] + c = c.split('!')[0] + c = c.strip() + newcal.append(c) + cal = newcal + allBarData = [] + for c in OrderedSet(cal): + curBarData, beg, end = ([], None, None) + for i in range(len(fid)): + if c == cal[i]: + if not beg and not end: + #beg = int(fid[i]) + #end = int(fid[i]) + beg = fid_as_integer(fid[i]) + end = fid_as_integer(fid[i]) + else: + end = fid_as_integer(fid[i]) + if i == len(fid) - 1: + end = fid_as_integer(fid[i]) + curBarData.append((beg, end-beg)) + else: + if (beg and end): + curBarData.append((beg, end-beg)) + beg = None + end = None + else: + pass + + allBarData.append(curBarData) + self.allBarData = allBarData + print(curBarData) + pltData = [] + for i in range(len( allBarData)): + for n in allBarData[i]: + pltData.append((float(n[0])-0.5, n[1])) + self.pltData = pltData + return pltData + + def create(self): + allBarData = self.allBarData + pltData = self.pltData + fig = plt.figure(figsize=(8, 2)) + ax = fig.add_subplot(111) + ax.grid(True) + xsize, ysize = (8, 3.5) # x-size for all plots + fcol = ['#a6cee3', + '#1f78b4', + '#b2df8a', + '#33a02c', + '#fb9a99', + '#e31a1c', + '#fdbf6f', + '#ff7f00', + '#cab2d6', + '#6a3d9a', + '#ffff99', + '#b15928']*12 + ypos = 2 + for i in range(1): + xpltData = [] + for pair in pltData: + xpltData.append((pair[0], pair[1]+1)) + ax.broken_barh(xpltData, (1, 8), facecolors=fcol[0:len(pltData)]) + ax.set_ylim(0, 10) + ax.set_yticks([5]) + ax.set_yticklabels([' ']) + ax.set_ylabel(self.par) + fig.canvas.draw() + xlabels = [item.get_text() for item in ax.get_xticklabels()] + for i, l in enumerate(xlabels): + if not l: + continue + if int(l) < 1000: + xlabels[i] = 'b%0.3i' % int(l) + else: + xlabels[i] = 'c%0.3i' % (int(l)-999) + ax.set_xticklabels(xlabels) + fig.savefig(os.path.join(faampy.fltcons.FIGURES_PATH, str.strip(self.par) + '.png')) + self.Figure = fig diff --git a/faampy/fltcons/Summary.py b/faampy/fltcons/Summary.py new file mode 100644 index 0000000..02367d3 --- /dev/null +++ b/faampy/fltcons/Summary.py @@ -0,0 +1,68 @@ +''' + +Created on 20 Dec 2010 + +@author: axel +''' + +import re +from faampy.fltcons.db import DB +#from faampy.utils import * + + +class Summary(object): + + def __init__ (self, par, filtered=True): + self.filtered = filtered + self.par = par + self.Data = None + self.Flag = None + self.__fetchData__() + self.__flagData__() + + def __flagData__( self ): + + self.Flag = [] + self.Flag.append(0) + for i in range(1, len(self.Data)): + cal0 = re.sub(r'\s', '', str( self.Data[i-1][4].split('!')[0])) + cal1 = re.sub(r'\s', '', str( self.Data[i][4].split('!')[0])) + if cal0 == cal1: + self.Flag.append(self.Flag[-1]) + else: + self.Flag.append(self.Flag[-1]+1) + + def __fetchData__(self): + fcdb = DB() + fcdb.connect() + + if self.filtered: + sql = """SELECT f.par,f.fid,f.rev,f.rdate,f.line,f.fname + FROM ( SELECT fid, max(rev) AS maxrev + FROM fltcons GROUP BY fid ORDER BY fid + ) AS x INNER JOIN fltcons AS f ON f.fid=x.fid AND f.rev=x.maxrev AND f.par='%s'""" % (self.par) + else: + sql = """SELECT par,fid,rev,rdate,line FROM fltcons WHERE par='%s' ORDER BY fid,rev""" % (self.par) + cur = fcdb.con.cursor() + cur.execute(sql) + self.Data = cur.fetchall() + cur.close() + + def __str__(self): + ref = -9999 + + out = "" + out += 80 * "=" + "\n" + out += " %2s | %12s | %5s | %3s | %10s | %s \n" % ('I', 'Par', 'Fid', 'rev', 'date', 'Value/Comment') + out += 80 * "=" + "\n" + for i in range(len(self.Data)): + if self.Flag[i] != ref: + out += " %02i | %12s | %5s | %3s | %10s | %s \n" % (self.Flag[i], self.Data[i][0], self.Data[i][1],self.Data[i][2],self.Data[i][3],self.Data[i][4]) + if self.filtered: + ref = self.Flag[i] + out += 80 * "=" + return out + + +if __name__ == '__main__': + pass diff --git a/faampy/fltcons/__init__.py b/faampy/fltcons/__init__.py new file mode 100644 index 0000000..831b960 --- /dev/null +++ b/faampy/fltcons/__init__.py @@ -0,0 +1,29 @@ +import os + +__version__ = '0.3' +__author__ = 'axll[at]faam[dot]ac[dot]uk' + +DB_NAME = os.path.join(os.environ['HOME'], '.faampy', 'dbs', 'fltcons.sqlite') +FIGURES_PATH = os.path.join(os.environ['HOME'], '.faampy', 'figures', 'fltcons') + +# Setting for web service +#FIGURES_PATH = '/home/htdocs.dacru/figures/fltcons/' +FIGURES_URL = '/' + +PARAMETERS = ['TASCORR', 'CALCABT', 'GELIMS', 'CALGE', 'PRTCCAL', + 'HEIMCAL', 'INSLEVL', 'CALLWC', 'CALNPRS', 'CALNTMP', + 'CALNBTS', 'CALNGTS', 'CALNRTS', 'CALNBBS', 'CALNGBS', + 'CALNRBS', 'CALNHUM', 'CALNSTS', 'CALNVLW', 'CALNVLR', + 'CALNVLC', 'CALNVTW', 'CALNVTR', 'CALNVTC', 'CALRSL', + 'CALRST', 'CALO3', 'CALO3P', 'CALO3T', 'CALO3F', + 'CALO3MX', 'CALNO', 'CALNO2', 'CALNOX', 'CALNOMX', + 'CALSO2', 'CALCOMR', 'CALCOMX', 'CALCABP', 'CALS9SP', + 'CALPLIN', 'CALPLOG', 'CALUP1S', 'CALUP2S', 'CALUIRS', + 'CALLP1S', 'CALLP2S', 'CALLIRS', 'CALCUCF', 'CALCURF', + 'CALCUIF', 'CALCLCF', 'CALCLRF', 'CALCLIF', 'TRFCTR', + 'CALDIT', 'CALNDT', 'CALTP1', 'CALTP2', 'CALTP3', + 'CALTP4', 'CALTP5', 'AOA_A0', 'AOA_A1', 'AOSS_B0', + 'AOSS_B1', 'TOLER', 'TASCOR1', 'ALPH0', 'ALPH1', + 'BET0', 'BET1', 'CALTNOS', 'CALTSAM', 'CALTAMB', + 'CALTSRC', 'CALHTR1', 'CALHTR2', 'CALISRC', 'INSPOSN', + 'BUCK'] diff --git a/faampy/fltcons/cgi-fltcons-summary.py b/faampy/fltcons/cgi-fltcons-summary.py new file mode 100644 index 0000000..c405e38 --- /dev/null +++ b/faampy/fltcons/cgi-fltcons-summary.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python + +import sys +import os +import matplotlib +matplotlib.use('Agg') +import cgi + +import faampy.fltcons +from faampy.fltcons.db import DB +from faampy.fltcons.Summary import Summary +from faampy.fltcons.Plot import Plot + +fltcons_list = faampy.fltcons.PARAMETERS +fltcons_list.sort() + +#faampy.fltcons.FIGURES_PATH = '/home/axel/.faampy/tmp/' + +dirname, filename = os.path.split(os.path.abspath(__file__)) + +faampy.fltcons.FIGURES_PATH = os.path.join(dirname, '..', 'img') + + +spacer = "

 

" + +form_header = """
+
+ + + + + + +
Flight-Constant: +
 Filter off +
 
+ + +
""" + +html_header = 'Content-Type: text/html\n\n' + +html_body_header = """Flight-Constant-Browser +""" + +html_body_footer = """""" + + + +def showForm(fltcons, filtered=None): + + if not filtered: + filtered = False + filtered_checked_txt = 'checked' + else: + filtered_checked_txt = '' + + html = "" + opt = "" + if fltcons: + opt += form_options % (fltcons, fltcons) + + for fltcon in fltcons_list: + opt += form_options % (fltcon, fltcon) + + if fltcons: + fcs = Summary(fltcons, filtered=filtered) + fcs_txt = "

" + fcs.__str__() + "

" + + fcp = faampy.fltcons.Plot.Plot(fltcons) + fcp.get_data() + fcp.create() + filename = os.path.join(faampy.fltcons.FIGURES_PATH, fltcons + '.png') + fcp.Figure.savefig(filename) + + img_url = os.path.join(faampy.fltcons.FIGURES_URL, fltcons + '.png') + fcs_plot = """

""" % ('/img'+img_url) + else: + fcs_txt = "" + fcs_plot = "" + + html += html_header + \ + html_body_header + \ + form_header + \ + opt + \ + form_footer % (filtered_checked_txt) + \ + fcs_plot + \ + 6 * spacer + \ + "
" + \ + fcs_txt + \ + html_body_footer + print(html) + + +def process(): + form = cgi.FieldStorage() + # get flt-constant parameter name + if form.has_key('cgi_fltcons'): + fltcons = form['cgi_fltcons'].value + else: + fltcons = None + if form.getvalue('cgi_filtered'): + filter_value = False + else: + filter_value = True + showForm(fltcons, filtered=filter_value) + + +if __name__ == '__main__': + process() + diff --git a/faampy/fltcons/db.py b/faampy/fltcons/db.py new file mode 100644 index 0000000..8f858b8 --- /dev/null +++ b/faampy/fltcons/db.py @@ -0,0 +1,69 @@ +''' +Created on 14 Dec 2010 + +@author: axel +''' + +import sys + +import os + +import sqlite3 as dbapi +import re +import time + +import faampy +import faampy.fltcons + +from faampy.fltcons.Parser import Parser + +from faampy.utils import * + + +#faampy.fltcons.DB_NAME = os.path.join(faampy.__path__[0], '..', 'files', 'db', 'faam_fltcons.sqlite') +FLTCONS_DB_NAME = os.path.join(faampy.FAAMPY_DATA_PATH, 'db', 'faam_fltcons_db.sqlite') + +class DB(object): + + def __init__(self): + pass + + def connect(self): + self.con = dbapi.connect(FLTCONS_DB_NAME) + + def disconnect(self): + self.con.close() + + def setup(self): + """Create the fltcons table""" + if not hasattr(self, 'con'): + self.connect() + sql = """CREATE TABLE fltcons (par VARCHAR(20), fid INTEGER, rev VARCHAR(4), rdate DATE, line TEXT, fname TEXT);""" + cur = self.con.cursor() + cur.execute(sql) + self.con.commit() + + def insert(self, par, fid, rev, rdate, line, fname, commit=True): + template="""INSERT INTO fltcons VALUES ('%s', '%s', '%s', '%s', '%s', '%s');""" + if 'list' in str(type(par)): + for i in range(len(par)): + sql = template % (par[i], fid[i], rev[i], rdate[i], line[i], fname[i]) + cur = self.con.cursor() + cur.execute(sql) + else: + sql = template % (par, fid, rev, rdate, line, fname) + cur = self.con.cursor() + cur.execute(sql) + # Only commit if commit keyword is set, which is the default + # Otherwise the sql statement is returned + if commit: + self.con.commit() + return sql + + def clean(self): + sql = "DELETE from fltcons;" + cur = self.con.cursor() + cur.execute(sql) + + def query(self, par): + pass diff --git a/faampy/fltcons/parser.py b/faampy/fltcons/parser.py new file mode 100644 index 0000000..54e1cc5 --- /dev/null +++ b/faampy/fltcons/parser.py @@ -0,0 +1,72 @@ +''' +Created on 31 Oct 2011 + +@author: axel +''' + +import netCDF4 +import os +import sys +import datetime + +import faampy +import faampy.fltcons +from faampy.utils.file_info import get_revision_from_filename, \ + get_fid_from_filename + + +class Parser(object): + + def __init__(self): + pass + + def parse(self, ifile): + """Parsing""" + result = {} + + if os.path.splitext(ifile)[1] == '.nc': + ds = netCDF4.Dataset(ifile, 'r') + txt = ds.Flight_Constants + lines = txt.split('\n') + ds.close() + elif os.path.splitext(ifile)[1] == '.txt': + f = open(ifile, 'r') + lines = f.readlines() + f.close() + else: + sys.stdout.write('Can not handle %s ...\n' % ifile) + return + fltcs_list = faampy.fltcons.PARAMETERS + + rev = get_revision_from_filename(ifile) + fid = get_fid_from_filename(ifile) + + rev_date = None + tmplines = lines[:] + while not rev_date and tmplines: + line = tmplines.pop() + try: + rev_date = datetime.datetime.strptime(line.split('-')[1].strip(), '%d %b %Y') + except: + pass + try: + rev_date = datetime.datetime.strptime(line.split('-')[1].strip(), '%d %B %Y') + except: + pass + rev_date = rev_date.strftime('%Y-%m-%d') + if not rev_date: + rev_date = '2099-01-01' + + for line in lines: + if line.split(' ')[0] in fltcs_list: + par = line.split(' ')[0] + result[par] = (fid, rev, rev_date, line.strip('\n')) + return result + +#p = Parser() +#x=p.parse(os.path.join(fl[-1].path, fl[-1].filename))# +#print(x) +#f = '/mnt/faamarchive/badcMirror/data/2014/b847-feb-18/core_raw/flight-cst_faam_20140218_r0_b847.txt' +#P = Parser() +#d = P.parse(f) +#print(d) diff --git a/faampy/fltcons/server.py b/faampy/fltcons/server.py new file mode 100644 index 0000000..d6c17ff --- /dev/null +++ b/faampy/fltcons/server.py @@ -0,0 +1,46 @@ +''' +Created on 8 May 2013 + +@author: axel +''' + + +import sys +sys.path.insert(0, '/home/axel/git-repos/faampy') + + +import BaseHTTPServer +import CGIHTTPServer +import cgitb; cgitb.enable() ## This line enables CGI error reporting + +import shutil +import os +import sys +import tempfile + +HOST_NAME = 'localhost' +PORT_NUMBER = 8080 + +# create temporary directory from where we will run the http server +TMP_DIR = tempfile.mkdtemp() +os.mkdir(os.path.join(TMP_DIR, 'cgi-bin')) +os.mkdir(os.path.join(TMP_DIR, 'img')) + +sys.stdout.write('HTTP-Server running from: %s ...\n' % TMP_DIR) + +src = os.path.join('/home/axel/git-repos/faampy/faampy', 'fltcons', 'cgi-fltcons-summary.py') +dst = os.path.join(TMP_DIR, 'cgi-bin', 'cgi-fltcons-summary.py') + +shutil.copy(src, dst) + +os.chmod(dst, 0777) + +os.chdir(TMP_DIR) + +sys.stdout.write('Now go to: http://localhost:8080/cgi-bin/cgi-fltcons-summary.py\n') +server=BaseHTTPServer.HTTPServer +handler=CGIHTTPServer.CGIHTTPRequestHandler +server_address = ("", PORT_NUMBER) +#handler.cgi_directories = ["/cgi-bin"] +httpd = server((HOST_NAME, PORT_NUMBER), handler) +httpd.serve_forever() diff --git a/faampy/fltcons/update.py b/faampy/fltcons/update.py new file mode 100644 index 0000000..b0658b0 --- /dev/null +++ b/faampy/fltcons/update.py @@ -0,0 +1,69 @@ +''' +Created on 14 Dec 2010 + +@author: axel +''' + +import os +import sys +sys.path.insert(0, '/home/axel/git-repos/faampy') + +from faampy.fltcons.parser import Parser +from faampy.fltcons.db import DB +from faampy.utils.file_list import File_List + + + +def update(inpath=None, clean=False, root_path=None, verbose=False): + fl = File_List(inpath) + fl.filter_by_data_type('flight-cst') + + fcdb = DB() + fcdb.connect() + if clean: + fcdb.clean() + + parser = Parser() + + for f in fl: + if verbose: + sys.stdout.write('%s\n' % f) + #try: + d = parser.parse(os.path.join(f.path, f.filename)) + #except: + # sys.stdout.write('Problem parsing %s ...\n' % f.filename) + # continue + + if d: + par, fid, rev, rdate, line, fname = [], [], [], [], [], [] + for k in d.keys(): + par.append(k) + fid.append(d[k][0]) # strip the b from the fid + rev.append(d[k][1]) + rdate.append(d[k][2]) + line.append(d[k][3]) + fname.append(f.filename) + try: + fcdb.insert(par, fid, rev, rdate, line, fname) + except: + pass + + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser(description='Flight Constants database creator for instrument debugging purposes.') + parser.add_argument('--inpath', + action='store', + type=str, + help="directory where the flight constants are stored") + parser.add_argument('-c', '--clean', + action='store', + type=bool, + default=False, + help="Clean db before inserting any data") + args = parser.parse_args() + update(inpath=args.inpath, clean=args.clean) + + +#update(inpath='/media/axel/F60AD5E60AD5A3C1/badcMirror/data/', clean=True) +#inpath='/home/data/faam/badc/2016' diff --git a/faampy/mapping/__init__.py b/faampy/mapping/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/faampy/mapping/dem.py b/faampy/mapping/dem.py new file mode 100644 index 0000000..e0d834f --- /dev/null +++ b/faampy/mapping/dem.py @@ -0,0 +1,88 @@ +''' +Created on 6 Aug 2012 + +@author: axel +''' + +import os + +import re +import subprocess +import sys + +import faampy + + + +def get_srtm_files(path): + result = [] + file_list = os.listdir(path) + for f in file_list: + if os.path.splitext(f)[1] == '.tif': + result.append(os.path.join(path, f)) + return result + + +def get_tif_boundaries(tif_filename): + cmd = """gdalinfo %s""" % (tif_filename) + proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) + out = proc.stdout.read().splitlines() + boundaries = {} + for line in out: + if line.startswith('Upper Left'): + boundaries['ul'] = [float(e) for e in re.split('\(|\)|,', line)[1:3]] + elif line.startswith('Lower Left'): + boundaries['ll'] = [float(e) for e in re.split('\(|\)|,', line)[1:3]] + elif line.startswith('Upper Right'): + boundaries['ur'] = [float(e) for e in re.split('\(|\)|,', line)[1:3]] + elif line.startswith('Lower Right'): + boundaries['lr'] = [float(e) for e in re.split('\(|\)|,', line)[1:3]] + else: + pass + return boundaries + + +class DEM(object): + """Uses gdal and the srtm files that can be downloaded for free from the + internet. + """ + def __init__(self, srtm_data_path=None): + if not srtm_data_path: + self.SRTM_DATA_PATH = faampy.SRTM_DATA_PATH + if not os.path.exists(self.SRTM_DATA_PATH): + sys.stdout.write('SRTM data path: %s does not exists.\n' % (self.SRTM_DATA_PATH)) + self.SRTM_Files = {} # initialize an empty dictionary + # fill-up the dictionary using the srtm basename as key and add + # the four boundaries as values + for f in get_srtm_files(self.SRTM_DATA_PATH): + self.SRTM_Files[os.path.basename(f)] = {'bounds': get_tif_boundaries(f)} + + def find_srtm_file(self, lon, lat): + for k, v in self.SRTM_Files.items(): + boundary = v['bounds'] + # check if coordinates are inside boundary box + if (lon >= boundary['ul'][0]) and (lat <= boundary['ul'][1]) and \ + (lon <= boundary['lr'][0]) and (lat >= boundary['lr'][1]): + return k + return + + def get_elevation(self, lon, lat): + """ + gives the elevation for a given coordinate + :param lon: longitude WGS84 + :param lat: latitude WGS84 + """ + tif_filename = self.find_srtm_file(lon, lat) + if not tif_filename: + return + cmd = """gdallocationinfo %s -wgs84 %f %f""" % (os.path.join(self.SRTM_DATA_PATH, tif_filename), lon, lat) + proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) + out = proc.stdout.read().splitlines() + for line in out: + if line.strip().startswith('Value'): + return float(line.split(':')[1]) + return + +#faampy.SRTM_DATA_PATH = '/home/axel/srtm' +#dem=DEM() +#eprint(dem.get_elevation(3, 50)) diff --git a/faampy/mapping/ge_lidar_curtain.py b/faampy/mapping/ge_lidar_curtain.py new file mode 100644 index 0000000..91aed85 --- /dev/null +++ b/faampy/mapping/ge_lidar_curtain.py @@ -0,0 +1,543 @@ +#!/usr/bin/python + +""" +Lidar curtain kmz creator for google-earth + +""" + +import idlsave +import math +import os +import scipy +import subprocess +import tempfile + +import matplotlib.pyplot as plt + +################################################################################## +""" + Python implementation of Haversine formula + Copyright (C) <2009> Bartek Grony, Mobitel Monitoring sp. z o.o. + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + + + +def recalculate_coordinate(val, _as=None): + """ + Accepts a coordinate as a tuple (degree, minutes, seconds) + You can give only one of them (e.g. only minutes as a floating point number) and it will be duly + recalculated into degrees, minutes and seconds. + Return value can be specified as 'deg', 'min' or 'sec'; default return value is a proper coordinate tuple. + + :param tuple val: coordinate as tuple (degree, minutes, seconds) + """ + deg, min, sec = val + # pass outstanding values from right to left + min = (min or 0) + int(sec) / 60 + sec = sec % 60 + deg = (deg or 0) + int(min) / 60 + min = min % 60 + # pass decimal part from left to right + dfrac, dint = math.modf(deg) + min = min + dfrac * 60 + deg = dint + mfrac, mint = math.modf(min) + sec = sec + mfrac * 60 + min = mint + if _as: + sec = sec + min * 60 + deg * 3600 + if _as == 'sec': return sec + if _as == 'min': return sec / 60 + if _as == 'deg': return sec / 3600 + return deg, min, sec + + +def points2distance(start, end): + """ + Calculate distance (in kilometers) between two points given as (long, latt) pairs + based on Haversine formula (http://en.wikipedia.org/wiki/Haversine_formula). + Implementation inspired by JavaScript implementation from http://www.movable-type.co.uk/scripts/latlong.html + Accepts coordinates as tuples (deg, min, sec), but coordinates can be given in any form - e.g. + can specify only minutes: + (0, 3133.9333, 0) + is interpreted as + (52.0, 13.0, 55.998000000008687) + which, not accidentally, is the lattitude of Warsaw, Poland. + """ + start_long = math.radians(recalculate_coordinate(start[0], 'deg')) + start_latt = math.radians(recalculate_coordinate(start[1], 'deg')) + end_long = math.radians(recalculate_coordinate(end[0], 'deg')) + end_latt = math.radians(recalculate_coordinate(end[1], 'deg')) + d_latt = end_latt - start_latt + d_long = end_long - start_long + a = math.sin(d_latt/2)**2 + math.cos(start_latt) * math.cos(end_latt) * math.sin(d_long/2)**2 + c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a)) + #return 6371 * c + return 6378.137 * c +################################################################# + + + +KML_HEADER_TEMPLATE = """ +""" + +KML_FOLDER_EXPANDABLE_START_TEMPLATE = """%s0""" + +KML_FOLDER_START_TEMPLATE = """%s0 +""" +KML_FOLDER_END_TEMPLATE = """""" + +#Input: lon, lat, curtain_ref, img_ref, img_ref +KML_PLACEMARK_TEMPLATE = """ +%s + + +absolute + +%.2f +%.2f +0 + + +%.2f +0 +0 + + +1 +1 +1 + + +files/%s + + + +%s +../images/%s + + + + +""" + +KML_FOOTER_TEMPLATE = """""" + +DAE_TEMPLATE = """ + + + +Google SketchUp 6.0.312 + + +Z_UP + + + +../images/%s + + + + + + + + + + + + +kdm-kml_curtain-image + + + + +kdm-kml_curtain-image-surface + + + + + +0.000000 0.000000 0.000000 1 + + +0.000000 0.000000 0.000000 1 + + + + + +0.000000 0.000000 0.000000 1 + + +20.000000 + + +0.100000 + + +1 1 1 1 + + +0.000000 + + + + + + + + + + + + + + + +%s + + + + + + + + + + +0 -0.0254 0 +0 0.0254 0 + + + + + + + + + +0 0 1 0 0 1 1 +1 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 0 1 0 2 1 2 1 1 1 3 0 3 2 0 2 1 0 1 3 1 3 1 1 +1 2 1 2

+
+
+
+
+ + + + + + + + + + + + + + + + + + + + +
+""" + +# +DAE_VALUES_TEMPLATE = """0.0 0 0 +%.1f 0 0 +0.0 0 %.1f +%.1f 0 %.1f +""" + + +def convert_bearing(hdg): + """Heading value conversion.""" + #hdg is strange: + #0 = East, 45 = SE, 90 = South, 180 = West + result = hdg - 90.0 + if result < 0: + result = result + 360.0 + return result + +def get_run_bearing(ldata_run): + """inspired by + http://stackoverflow.com/questions/4913349/haversine-formula-in-python-bearing-and-distance-between-two-gps-points + + """ + lon1 = ldata_run['lid_longitude'][0] + lon2 = ldata_run['lid_longitude'][-1] + lat1 = ldata_run['lid_latitude'][0] + lat2 = ldata_run['lid_latitude'][-1] + lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat1, lon2, lat2]) + + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2 + c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a)) + Base = 6371 * c + dLon = lon2 - lon1 + y = math.sin(dLon) * math.cos(lat2) + x = math.cos(lat1) * math.sin(lat2) - math.sin(lat1) * math.cos(lat2) * math.cos(dLon) + bearing = math.atan2(y, x) + bearing = math.degrees(bearing) + bearing = (bearing + 360.0) % 360.0 + return bearing + + +# TODO: Use the faampy flight summary module instead +def parse_flightsummary(flight_summary_file): + """Get runs and its start and end time + from flight summary. + + """ + result = [] + f = open(flight_summary_file, 'r') + lines = f.readlines() + f.close() + for line in lines: + if 'run' in str.lower(line[17:28]): + try: + name = str.strip(line[17:28]) + stime = line[0:6] + etime = line[8:14] + #if run is shorter than 5min we don't bother + if time2secs(etime)-time2secs(stime) > 300: + result.append((name,stime,etime)) + except: + pass + return result + + +# TODO: replace the local routine with the faampy.util one +def time2secs(time_string): + """converts time string of the format HHMMSS into + + seconds past midnight. + + """ + time_string = str.strip(time_string) + result = float(time_string[0:2]) * 3600. + \ + float(time_string[2:4]) * 60 + \ + float(time_string[4:6]) + return result + + +def extract_run_data(ldata, start_time=None, end_time=None, start_index=None, end_index=None): + """extract the data for a run + start_time and end_time in the form HHMMSS""" + if (start_time and end_time): + s_secs = time2secs(start_time) + e_secs = time2secs(end_time) + s_ind = scipy.where(s_secs < ldata['lid_time'])[0][0] + e_ind = scipy.where(e_secs > ldata['lid_time'])[0][-1] + else: + s_ind = start_index + e_ind = end_index + run_data = {} + run_data['lid_time'] = ldata['lid_time'][s_ind:e_ind] + run_data['lid_longitude'] = ldata['lid_longitude'][s_ind:e_ind] + run_data['lid_latitude'] = ldata['lid_latitude'][s_ind:e_ind] + run_data['lid_altitude'] = ldata['lid_altitude'][s_ind:e_ind] + run_data['lid_reldep'] = ldata['lid_reldep'][:,s_ind:e_ind] + run_data['lid_height'] = ldata['lid_height'] + run_data['lid_pr2'] = ldata['lid_pr2'][:,:,s_ind:e_ind] + return run_data + + +def get_run_length(ldata_run): + """ + :return float run_length: run length in meters + """ + lon1 = ldata_run['lid_longitude'][0] + lon2 = ldata_run['lid_longitude'][-1] + lat1 = ldata_run['lid_latitude'][0] + lat2 = ldata_run['lid_latitude'][-1] + run_length = points2distance(((lon1, 0, 0), (lat1, 0, 0)), ((lon2, 0, 0), (lat2, 0, 0))) + return run_length * 1000. #convert to meters + + +def get_run_start_coordinates(ldata_run): + """ + :return tuple coords: start coordinates (lon, lat) of Lidar run + """ + lon = ldata_run['lid_longitude'][0] + lat = ldata_run['lid_latitude'][0] + return (lon, lat) + + +def lidar_plot(data, filename): + """Plot lidar profile + + """ + colormap = plt.get_cmap('jet', 1200) + x = data['lid_time'] + y = data['lid_height'] + z = data['lid_pr2'][0,:,:] + mask1 = scipy.where(z > 1, 1, 0) + mask2 = scipy.where(z < 1200, 1, 0) + z = (z*mask1)*mask2 + + levels = range(0, 1200, 1) + plt.contourf(x, y, z, levels, colormap=colormap) + plt.xlim(x.min(), x.max()) + plt.ylim(0, 10000) + figure = plt.figure(1, (10, 8), 80) + #remove any margins + plt.subplots_adjust(left=0.0, right=1.0, bottom=0.0, top=1.0) + figure.savefig(filename) + plt.clf() + + +def process(fid, lidar_file, flight_summary_file, step=None, alt_scale_factor=None): + if not step: + step = 1 + if not alt_scale_factor: + alt_scale_factor = 5 + + ROOT_TMP_PATH = tempfile.mktemp() + os.mkdir(ROOT_TMP_PATH) + #create temporary folders for the images and the kml files + os.mkdir(os.path.join(ROOT_TMP_PATH, 'files')) + os.mkdir(os.path.join(ROOT_TMP_PATH, 'images')) + ldata = idlsave.read(lidar_file) + + kml_doc = '' + img_cnt = 0 + kml_doc += KML_HEADER_TEMPLATE + kml_doc += KML_FOLDER_EXPANDABLE_START_TEMPLATE % (fid + ' Lidar ') + event_cnt = 0 + + fltsumm = parse_flightsummary(flight_summary_file) + + for event_cnt in range(len(fltsumm)): + run_data = extract_run_data(ldata, start_time=fltsumm[event_cnt][1], end_time=fltsumm[event_cnt][2]) + + #if there are no data we leave here + if not run_data['lid_time'].any() or min(run_data['lid_time']) == max(run_data['lid_time']): + continue + + kml_doc += KML_FOLDER_START_TEMPLATE % (fltsumm[event_cnt][0]) + for i in range(0, len(run_data['lid_time']), step): + data = extract_run_data(run_data, start_index=i, end_index=i+step+1) + img_name = os.path.join(ROOT_TMP_PATH, 'images', 'lidar_%.4i.png' % img_cnt) + lidar_plot(data, img_name) + slon, slat = get_run_start_coordinates(data) + run_bearing = get_run_bearing(data) + run_bearing = convert_bearing(run_bearing) + + kml_doc += KML_PLACEMARK_TEMPLATE % (img_name, slon, slat, run_bearing, 'curtain_%.4i.dae' % img_cnt, img_name, img_name) + + run_length = get_run_length(data) + run_altitude = 10000.0 * alt_scale_factor + dae_values = DAE_VALUES_TEMPLATE % (run_length, run_altitude, run_length, run_altitude) + + f = open(os.path.join(ROOT_TMP_PATH, 'files', 'curtain_%.4i.dae' % img_cnt), 'w') + f.write( DAE_TEMPLATE % (os.path.basename(img_name), dae_values)) + f.close() + img_cnt += 1 + kml_doc += KML_FOLDER_END_TEMPLATE + + kml_doc += KML_FOLDER_END_TEMPLATE + kml_doc += KML_FOOTER_TEMPLATE + + f = open(os.path.join(ROOT_TMP_PATH, 'doc.kml'), 'w') + f.write(kml_doc) + f.close() + + kmz_filename = os.path.join(os.environ['HOME'], '%s_lidar_curtain.kmz' % (fid)) + print('Writing kmz file to: %s' % (kmz_filename)) + # TODO: try to remove subprocess dependency + cmd3 = """cd %s && zip --quiet -r %s doc.kml files/ images/""" % (ROOT_TMP_PATH, kmz_filename) + + proc3 = subprocess.Popen(cmd3, shell=True) + proc3.wait() + + +def _argparser(): + import argparse + if not __name__ + sys.argv.insert(0, 'faampy ge_lidar_curtain') + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('fid', action="store", type=str, + help='flight id like: b612.') + parser.add_argument('lidar_file', action="store", type=str, + help='idl sav-file that contains the lidar data.') + parser.add_argument('flight_summary', action="store", type=str, + help='common flight-summary file as stored at the BADC.') + parser.add_argument('-s', '--step', action="store", type=int, default=1, required=False, + help='step size for lidar data plots. If for example step=5, \ + then only every fifth lidar profile is used. Using this option can speed up the process.') + parser.add_argument('-a', '--alt-scale-factor', action="store", type=int, default=5, required=False, + help='overscales the altitude by this factor e.g. with \ + the default alt-scale-factor of 5: 10000m becomes 50000m in google-earth.') + args = parser.parse_args() + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + process(args.fid, + args.lidar_file, + args.flight_summary, + step=args.step, + alt_scale_factor=args.alt_scale_factor) + + +if __name__ == '__main__': + main() diff --git a/faampy/mapping/ge_ncas_airquality.py b/faampy/mapping/ge_ncas_airquality.py new file mode 100644 index 0000000..1d4bdb1 --- /dev/null +++ b/faampy/mapping/ge_ncas_airquality.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python + +""" +Script downloads the model output from the NCAS air quality model from: + + https://sci.ncas.ac.uk/airquality/ + +and creates a kmz file that is viewable in google-earth. This allows for +example FAAM flight tracks to be overlaid on top of the model images. + +""" + +import datetime +import requests +import shutil +import simplekml +import subprocess +import tempfile +import os +import sys + +response = requests.get('https://sci.ncas.ac.uk/airquality/') + +ROOT_URL='https://sci.ncas.ac.uk' + + +TEMPDIR=tempfile.mkdtemp() # temporary directory where the png images will be saved +VARIABLES=['O3', 'NOx', 'PM25'] # variables for which models will be downloaded + +# ground control points and boundaries are static for the NCAS air quality output and were derived using the georeferencer tool from qgis +gcps = "-gcp 209.743 43.3586 -8 60 -gcp 281.457 43.0282 -4 60 -gcp 352.443 42.896 0 60 -gcp 209.412 112.428 -8 58 -gcp 281.655 112.627 -4 58 -gcp 352.575 112.561 0 58 -gcp 209.412 178.458 -8 56 -gcp 281.853 178.59 -4 56 -gcp 352.575 178.226 0 56 -gcp 209.545 240.356 -8 54 -gcp 281.457 240.488 -4 54 -gcp 352.575 240.356 0 54 -gcp 209.743 299.809 -8 52 -gcp 281.721 299.313 -4 52 -gcp 352.509 299.809 0 52 -gcp 209.677 356.552 -8 50 -gcp 281.589 356.552 -4 50 -gcp 352.509 356.651 0 50 " +miny, maxy, minx, maxx = 47.186609484953294, 61.20747141385112, -19.282507889150953, 13.009442407021385 + + +def get_image(url): + """ + download image from the website + + :param url: http address + + """ + response = requests.get(url, stream=True) + local_image_filename=os.path.join(TEMPDIR, os.path.split(url)[-1].encode('ascii')) + with open(local_image_filename, 'wb') as out_file: + shutil.copyfileobj(response.raw, out_file) + del response + return local_image_filename + + +def img_to_gtiff(img_filename, tif_filename): + """convert png to geotiff using ground control points + + :param img_filename: model png image file name + :param tif_filename: geo-tiff file warped to EPSG\:4326 + """ + tif_tmp_filename = tempfile.mkstemp(suffix='.tif')[1] + + cmd1 = """gdal_translate -of GTiff %s "%s" "%s" """ % (gcps, img_filename, tif_tmp_filename) + cmd2 = """gdalwarp -t_srs EPSG:4326 -order -tps -co COMPRESS=NONE "%s" "%s" """ % (tif_tmp_filename, tif_filename) + + proc1 = subprocess.Popen(cmd1, shell=True) + proc1.wait() + proc2 = subprocess.Popen(cmd2, shell=True) + proc2.wait() + return + + +def parse_ncas_airquality(): + image_lists = {v: [] for v in VARIABLES} + lines = response.text.split('\n') + for line in lines: + if line.strip().startswith('images'): + line = line.replace('\\/', '/') + line = line.replace('"', '') + line.encode('ascii') + images = line[line.find("[")+1:line.find("]")] + if 'O3_' in line: + image_lists['O3'] = images.split(',') + elif 'NOx_' in line: + image_lists['NOx'] = images.split(',') + elif 'PM25_' in line: + image_lists['PM25'] = images.split(',') + return image_lists + + +def process(outpath, limit=None): + image_lists=parse_ncas_airquality() + if not limit: + limit = len(image_lists[image_lists.keys()[0]]) + kml = simplekml.Kml() + for var in VARIABLES: + folder = kml.newfolder(name=var) + folder.open = 1 + for img in image_lists[var][0:limit]: + image_filename = get_image(ROOT_URL+img.encode('ascii')) + img_to_gtiff(image_filename, image_filename[:-3]+'tif') + image_filename = image_filename[:-3]+'tif' + timestamp = datetime.datetime.strptime(img.split('/')[-1][-17:-4].encode(), '%Y-%m-%d_%H') + timestamp = timestamp.strftime('%Y-%m-%dT%H:%M') + ground = folder.newgroundoverlay(name=timestamp) + ground.visibility = 0 + ground.icon.href = image_filename + ground.gxlatlonquad.coords = [(minx, miny), (maxx, miny), + (maxx, maxy), (minx, maxy)] + + outfile = os.path.join(outpath, 'ncas_airquality_%s.kmz' % ''.join(image_lists[VARIABLES[0]][0].split('/')[-1][3:-7].split('-'))) + kml.savekmz(outfile) + return outfile + + +def _argparser(): + import argparse + sys.argv.insert(0, 'faampy ge_ncas_airquality') + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('-o', '--outpath', + action="store", + default=os.environ['HOME'], + type=str, + help='outpath') + parser.add_argument('-l', '--limit', + action="store", + type=int, + default=0, + help='maximum number of model images to be processed') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + outfile = process(args.outpath) + sys.stdout.write('Created ... %s. \n' % outfile) + + +if __name__ == '__main__': + main() diff --git a/faampy/mapping/ge_ncvar_to_kml.py b/faampy/mapping/ge_ncvar_to_kml.py new file mode 100644 index 0000000..631e98e --- /dev/null +++ b/faampy/mapping/ge_ncvar_to_kml.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python + +import datetime +import netCDF4 +import numpy as np +import os +import re +import sys + +from faampy.core.utils import conv_time_to_secs, conv_secs_to_time, \ + get_index_from_secs, get_fid + + +KML_HEADER=""" + + + %s + 1 +""" + +KML_FOOTER="""""" + +KML_PLACEMARK_TEMPLATE=""" + + %s@%im + 0 + + + + absolute + + %.5f,%.5f,%.5f + + + + 1 + 1 + absolute + + %s + + + + +""" + + +def get_run_kml(run_data, ds, var, offset, scale_factor, time_lag): + """extracts the data for the specific run + """ + s_index=get_index_from_secs(ds, conv_time_to_secs(run_data[1])) + e_index=get_index_from_secs(ds, conv_time_to_secs(run_data[2])) + + lon=ds.variables['LON_GIN'][:] + if len(lon.shape) > 1: + lon=lon[s_index:e_index,0] + else: + lon=lon[s_index:e_index] + lon=list(lon) + + lat=ds.variables['LAT_GIN'][:] + if len(lat.shape) > 1: + lat=lat[s_index:e_index,0] + else: + lat=lat[s_index:e_index] + lat=list(lat) + + gin_alt=ds.variables['ALT_GIN'][:] + if len(gin_alt.shape) > 1: + gin_alt=gin_alt[s_index:e_index,0] + else: + gin_alt=gin_alt[s_index:e_index] + gin_alt=list(gin_alt) + + if len(ds.variables[var][:].shape) > 1: + alt=(ds.variables[var][:][int(s_index+time_lag):int(e_index+time_lag), 0]+offset)*scale_factor + else: + alt=(ds.variables[var][:][int(s_index+time_lag):int(e_index+time_lag)]+offset)*scale_factor + + if var+'_FLAG' in ds.variables.keys(): + alt_flag=ds.variables[var+'_FLAG'][:] + if len(alt_flag.shape) > 1: + alt_flag=alt_flag[s_index+time_lag:e_index+time_lag, 0] + else: + alt_flag=alt_flag[s_index+time_lag:e_index+time_lag] + alt_flag=list(alt_flag) + + alt[alt_flag != 0]=0 + + alt=list(alt) + + lon2, lat2, alt2 = [], [], [] + for i in range(len(lon)): + if np.isfinite([lon[i], lat[i], alt[i]]).all(): + if lon[i] == 0.0: + continue + if lat[i] == 0.0: + continue + lon2.append(lon[i]) + lat2.append(lat[i]) + alt2.append(alt[i]) + lon = lon2 + lat = lat2 + alt = alt2 + linestring_txt = ['%.5f,%.5f,%.5f' % (lon[i], lat[i], alt[i]) for i in range(len(lon))] + linestring_txt = '\n'.join(linestring_txt) + result = KML_PLACEMARK_TEMPLATE % (run_data[0], gin_alt[0], lon[0], lat[0], alt[0], linestring_txt) + return result + + +def process(ncfile, ncvar, time_lag, offset, scale_factor, outpath, _RUNS): + ds = netCDF4.Dataset(ncfile, 'r') + fid = get_fid(ds) + try: + datestring = os.path.basename(ncfile).split('_')[2]+'_' + except: + datestring = '' + #kml_filename=os.path.join(out_path, fid + '-' + os.path.basename(ncfile).split('_')[2] + '_' + ncvar.lower() + '.kml') + kml_filename = os.path.join(outpath, fid+'-'+datestring+ncvar.lower()+'.kml') + kml = open(kml_filename, 'w') + kml.write(KML_HEADER % (fid + '-' +datetime.datetime(ds.DATE[2], ds.DATE[1], ds.DATE[0]).strftime('%d-%m-%Y') + '-' + ncvar)) + + for run in _RUNS: + run_kml = get_run_kml(run, ds, ncvar, offset, scale_factor, time_lag) + kml.write(run_kml) + kml.write(KML_FOOTER) + kml.close() + ds.close() + return + + +def _argparser(): + import argparse + sys.argv.insert(0, 'faampy ge_ncvar_to_kml') + parser = argparse.ArgumentParser(description='Creates a kml-profile plot for specfic netCDF variable',) + parser.add_argument('--offset', action='store', type=float, default=0.0, + help='Offset value. Value is removed from variable before profiles are created') + parser.add_argument('--scale_factor', action='store', type=float, default=45, + help='Scaling factor, mulitplier for the netCDF variable.') + parser.add_argument('--time_lag', action='store', type=float, default=0.0, + help='time lag between variable and GIN measurement caused by inlets') + parser.add_argument('--fltsumm', action='store', type=str, + help='Path to flight summary file for the specific flight') + parser.add_argument('ncvar', action='store', type=str, + help="FAAM core netCDF variable name used for the profile.") + parser.add_argument('faam_core_netcdf', action='store', type=str, + help="FAAM core netCDF data file") + parser.add_argument('outpath', action='store', type=str, + help='Path to where the kml file is written to.') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + + if args.fltsumm: + from faampy.core.flight_summary import FlightSummary + fs = FlightSummary(args.fltsumm) + _RUNS = [] + for e in fs.Entries: + if re.findall('run|leg|box', e.Name.lower()): + _RUNS.append((e.Name, e.Start_time, e.Stop_time)) + else: + ds = netCDF4.Dataset(args.faam_core_netcdf, 'r') + if 'IAS_RVSM' in ds.variables.keys(): + if len(ds.variables['IAS_RVSM'][:].shape) == 2: + ias = ds.variables['IAS_RVSM'][:, 0].ravel() + else: + ias = ds.variables['IAS_RVSM'][:].ravel() + if not isinstance(ias, np.ndarray): + ias = ias.data + # filter for indicated airspeed greater 60 + ix = np.where(ias > 60) + ix_min, ix_max = np.min(ix), np.max(ix) + else: + ix_min = 60 + v = ds.variables[ds.variables.keys()[0]][:] + ix_max = v.shape[0]-60 + + _RUNS = [('Full flight', + conv_secs_to_time(ds.variables['Time'][ix_min], no_colons=True), + conv_secs_to_time(ds.variables['Time'][ix_max], no_colons=True)),] + ds.close() + #print(args.ncvar, _RUNS,) + process(args.faam_core_netcdf, + args.ncvar, + args.time_lag, + args.offset, + args.scale_factor, + args.outpath, + _RUNS) + + +if __name__ == '__main__': + main() diff --git a/faampy/mapping/ge_nimrod_to_kmz.py b/faampy/mapping/ge_nimrod_to_kmz.py new file mode 100644 index 0000000..c6c8638 --- /dev/null +++ b/faampy/mapping/ge_nimrod_to_kmz.py @@ -0,0 +1,353 @@ +#!/usr/bin/python + +""" +Extracts all rain radar data from the tar file and + + * creates a 8bit png image file with a custom colour palette + * convertes the png to a gtiff and adds the projection ESPG:27700=OSGB1936 + * warpes the gtiff image to EPSG:4326 + * creates one kmz file with a folder which contains all gtiff-4326 files as groundoverlays + +The original UKMO nimrod data files can be found at the badc in: + + http://badc.nerc.ac.uk/browse/badc/ukmo-nimrod/data/composite/uk-1km/ + +The script is very wasteful in terms of disk space. The temporary folder +that is created for storing the image files can grow to several GB. The script +deletes the temporary folder by default. + +At the moment the script only works with the 1km composite data, but it should be +straight forward to adapt the script for other rain radar data files that are supplied +in nimrod format. + +Due to the fact that this script makes use of many common linux command line +tools (zip, tar, gunzip, gdal_translate, gdalwarp) it is unlikely that it will +work on a Windows OS. + +""" + + +import datetime +import gdal +import numpy as np +import subprocess +import shutil +import tempfile +from PIL import Image + +import os, sys +import struct, array, time + + +_KML_HEADER=""" + + + %s + 0 +""" + +_KML_GROUNDOVERLAY=""" + + %s + %s + + %s + 0 + 0 + b5ffffff + + files/%s + 0.75 + + + %f + %f + %f + %f + + +""" + +_KML_FOOTER=""" + +""" + + +#see: http://badc.nerc.ac.uk/browse/badc/ukmo-nimrod/doc/radar_key.gif +#((bin_lower_threshold, bin_upper_threshold), (R, G, B)) +_PALETTE=[(( 0.00, 0.25), ( 15, 31, 151)), # darkblue + (( 0.25, 0.50), ( 2, 77, 178)), # mediumblue + (( 0.50, 1.00), ( 58, 108, 255)), # lightblue + (( 1.00, 2.00), ( 5, 182, 5)), # green + (( 2.00, 4.00), (220, 205, 7)), # yellow + (( 4.00, 8.00), (255, 154, 0)), # orange + (( 8.00, 16.00), (202, 49, 51)), # fuchsia + ((16.00, 32.00), (255, 2, 255)), # white + ((-2.00, -0.5), (150, 150, 150))] # lightgrey + + +#taken from: http://badc.nerc.ac.uk/browse/badc/ukmo-nimrod/software/python/read_nimrod.py +def read_nimrod(pathed_file, quite=True): + + file_id=open(pathed_file,"rb") + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != 512: + sys.stdout.write("Unexpected record length: %i" % record_length) + sys.exit(1) + + gen_ints=array.array("h") + gen_reals=array.array("f") + spec_reals=array.array("f") + characters=array.array("c") + spec_ints=array.array("h") + + gen_ints.read(file_id, 31) + gen_ints.byteswap() + + gen_reals.read(file_id, 28) + gen_reals.byteswap() + + spec_reals.read(file_id, 45) + spec_reals.byteswap() + characters.read(file_id, 56) + spec_ints.read(file_id, 51) + spec_ints.byteswap() + + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != 512: + raise ("Unexpected record length", record_length) + + #for i in range(len(gen_ints)): print i+1, gen_ints[i] + #for i in range(len(gen_reals)): print i+32, gen_reals[i] + chars=characters.tostring() + + if not quite: + sys.stdout.write("\nDate %4.4d%2.2d%2.2d Time %2.2d:%2.2d Grid %d x %d\n" %(gen_ints[0], gen_ints[1], gen_ints[2], gen_ints[3], gen_ints[4], gen_ints[15], gen_ints[16])) + sys.stdout.write("start northing %.1f, row interval %.1f, start easting %.1f, column interval %.1f\n" %(gen_reals[2], gen_reals[3], gen_reals[4], gen_reals[5])) + sys.stdout.write("Units are %s\n" % chars[0:8]) + sys.stdout.write("Data source is %s\n" % chars[8:32]) + sys.stdout.write("Parameter is %s\n" % chars[32:55]) + for i in range(gen_ints[22]): + print(i+108, spec_ints[i]) + for i in gen_reals: + print(i) + for i in spec_reals: + print(i) + #Read data + cols=gen_ints[15] + rows=gen_ints[16] + array_size=cols*rows + + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != array_size*2: + raise ("Unexpected record length", record_length) + + data=array.array("h") + try: + data.read(file_id, array_size) + record_length,=struct.unpack(">l", file_id.read(4)) + if record_length != array_size*2: + raise ("Unexpected record length", record_length) + data.byteswap() + except: + sys.stdout.write("Read failed\n") + file_id.close() + result={'data': np.array(data), + 'cols': cols, + 'rows': rows, + 'start_northing': gen_reals[2], + 'row_interval': gen_reals[3], + 'start_easting': gen_reals[4], + 'column_interval': gen_reals[5], + 'year': gen_ints[0], + 'month': gen_ints[1], + 'day': gen_ints[2], + 'hour': gen_ints[3], + 'min': gen_ints[4]} + return result + + +def calc_kmz_boundaries(tiff_file): + ds=gdal.Open(tiff_file) + width=ds.RasterXSize + height=ds.RasterYSize + gt=ds.GetGeoTransform() + minx=gt[0] + miny=gt[3] + width*gt[4] + height*gt[5] + maxx=gt[0] + width*gt[1] + height*gt[2] + maxy=gt[3] + ds=None + return(miny, maxy, minx, maxx) + + +def get_palette(): + palette=_PALETTE + img_palette=[0,0,0] + [item for sublist in palette for item in sublist[1]] + return img_palette + + +def data_to_img(data, dim, img_filename): + """Converts the nimrod data array to an 8-bit png file with a custom palette. + + """ + cols=dim[0] + rows=dim[1] + im=Image.fromarray(np.uint8(data.reshape(cols, rows)), 'P') + png_palette=get_palette() + im.putpalette(png_palette) + im.save(img_filename, transparency=0) + + +def img_to_gtiff(nimrod, img_filename): + """The original data array is in EPSG 27700. Each pixel represents 1000x1000m + + """ + tif_27700_filename=os.path.splitext(img_filename)[0] + '_epsg27700.tif' + tif_filename = os.path.splitext(img_filename)[0] + '.tif' + tif_filename = os.path.join(os.path.dirname(tif_filename), 'files', + os.path.basename(tif_filename)) + # the datum of the data array is the British National grid OSGB1936 == EPSG27700 (http://spatialreference.org/ref/epsg/27700/ + # The corner values are derived from the information in the nimrod header + # see read_nimrod function: + # print "start northing %.1f, row interval %.1f, start easting %.1f, column interval %.1f\n" %(gen_reals[2], gen_reals[3], gen_reals[4], gen_reals[5]) + # every pixel is 1000mx1000m in size and the array is 2175x1725 big + translate_str = "-a_srs EPSG:27700 -a_ullr %i %i %i %i" % (nimrod['start_easting'], + nimrod['start_northing'], + nimrod['start_easting']+nimrod['rows']*nimrod['row_interval'], + nimrod['start_northing']-nimrod['cols']*nimrod['column_interval']) + + #Add projection/datum information to the img and output a gtiff-file + cmd="""gdal_translate -q -of GTiff %s "%s" "%s" """ % (translate_str, img_filename, tif_27700_filename) + proc=subprocess.Popen(cmd, shell=True) + proc.wait() + #reproject the just created tif-file to EPSG:4326, which is the projection google-earth needs + cmd="""gdalwarp -q -t_srs EPSG:4326 -srcnodata 0 -dstalpha "%s" "%s" """ % (tif_27700_filename, tif_filename) + proc=subprocess.Popen(cmd, shell=True) + proc.wait() + return tif_filename + + +def gtiff_to_kmz(kmz_filename, date): + """convert geo-tiff file into kmz file. One large kml file is created + which will contain a number of GroundOverlays + + """ + kml=_KML_HEADER % ('ukmo-rain-radar %s' % (date.strftime('%Y-%m-%d'))) + + file_list=[os.path.join(_TEMP_FOLDER, 'files', f) for f in os.listdir(os.path.join(_TEMP_FOLDER, 'files'))] + file_list.sort() + for f in file_list: + time_span_begin=datetime.datetime.strptime(os.path.basename(f).split('_')[2], '%Y%m%d%H%M') - datetime.timedelta(seconds=150) + time_span_begin=time_span_begin.strftime('%Y-%m-%dT%H:%M:00Z') + time_span_end=datetime.datetime.strptime(os.path.basename(f).split('_')[2], '%Y%m%d%H%M') + datetime.timedelta(seconds=150) + time_span_end=time_span_end.strftime('%Y-%m-%dT%H:%M:00Z') + time_stamp=datetime.datetime.strptime(os.path.basename(f).split('_')[2], '%Y%m%d%H%M').strftime('%Y-%m-%d %H:%M') + miny, maxy, minx, maxx=calc_kmz_boundaries(f) + kml += _KML_GROUNDOVERLAY % (time_span_begin, + time_span_end, + time_stamp, + os.path.basename(f), + miny, maxy, minx, maxx) + kml += _KML_FOOTER + f=open(os.path.join(_TEMP_FOLDER, 'doc.kml'), 'w') + f.write(kml) + f.close() + cmd="""cd %s && zip -rq %s doc.kml files/ """ % (_TEMP_FOLDER, kmz_filename) + proc=subprocess.Popen(cmd, shell=True) + proc.wait() + + +def untar(tar_file): + """ + extract all data files from the tar-file into a temporary directory. + + """ + cmd1="""tar xvf %s -C %s > /dev/null""" % (tar_file, _TEMP_FOLDER) + cmd2="""gunzip -q %s/*gz""" % (_TEMP_FOLDER) + proc=subprocess.Popen(cmd1, shell=True) + proc.wait() + proc=subprocess.Popen(cmd2, shell=True) + proc.wait() + + +def scale_data(data): + """ + Units of the nimrod data are mm/h*32. + """ + palette=_PALETTE + tmp=data + for i in range(len(palette)): + lthre=palette[i][0][0] + uthre=palette[i][0][1] + ix=np.where((data/32. > lthre) & (data/32. <= uthre))[0] + tmp[ix]=i + 1 + return tmp + + +def create_kmz_filename(tar_file, outpath): + """creates a kmz filename which contains location, resolution, and date + """ + # TODO: Needs adjusting when the script is adapted to handle other data files than the 1km-UK-composite + kmz_filename=os.path.join(outpath, 'ukmo-rain-radar_uk_1km_%s.kmz' % (os.path.basename(tar_file).split('_')[2])) + date=datetime.datetime.strptime(os.path.basename(tar_file).split('_')[2], '%Y%m%d') + return (kmz_filename, date) + + +def process(tar_file, outpath): + untar(tar_file) + data_file_list=os.listdir(_TEMP_FOLDER) + data_file_list.sort() + for d in data_file_list: + if d.endswith('dat'): + nimrod=read_nimrod(os.path.join(_TEMP_FOLDER, d)) + data=scale_data(nimrod['data']) + #dimensions + dim=(nimrod['cols'], nimrod['rows']) + img_filename=os.path.join(_TEMP_FOLDER, os.path.splitext(d)[0]+'.png') + data_to_img(data, dim, img_filename) + img_to_gtiff(nimrod, img_filename) + kmz_filename, date=create_kmz_filename(tar_file, outpath) + gtiff_to_kmz(kmz_filename, date) + sys.stdout.write('\nKMZ written to: %s \n' % (kmz_filename)) + + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + sys.argv.insert(0, 'faampy ge_nimrod_to_kmz') + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('rain_radar_tar_file', action="store", type=str, help='MetOffice compressed rain radar file') + parser.add_argument('-o', '--outpath', action="store", type=str, required=False, + default=os.environ['HOME'], + help='Directory where the kmz file will be stored. Default: $HOME.') + parser.add_argument('-k', '--keep-folder', action="store_true", required=False, default=False, + help='If option is set the temporary directory will *not* be deleted. Default: False') + return parser + + +def main(): + global _TEMP_FOLDER, _NUM_PROCESSES + _TEMP_FOLDER=tempfile.mkdtemp(dir=os.path.join(os.environ['HOME'], 'tmp')) + if not os.path.exists(os.path.join(_TEMP_FOLDER, 'files')): + os.mkdir(os.path.join(_TEMP_FOLDER, 'files')) + parser = _argparser() + args = parser.parse_args() + # test that the input file is the 1km-composite + if not args.rain_radar_tar_file.endswith('1km-composite.dat.gz.tar'): + sys.stdout.write('Sorry, script currently only works with the UK 1km composite file. \nLeaving ... \n') + sys.exit() + start_time = time.time() + process(args.rain_radar_tar_file, args.outpath) + # clean_up behind ourselves + if not args.keep_folder: + sys.stdout.write('Deleting temporary folder %s ... \n' % (_TEMP_FOLDER)) + shutil.rmtree(_TEMP_FOLDER) + else: + sys.stdout.write('Keeping files in %s \n' % (_TEMP_FOLDER)) + sys.stdout.write('Processing time %i seconds ... \n' % (time.time()-start_time)) + sys.stdout.write('Leaving ... \n\n') + + +if __name__ == '__main__': + main() diff --git a/faampy/mapping/ge_photo_album.py b/faampy/mapping/ge_photo_album.py new file mode 100644 index 0000000..a2e8945 --- /dev/null +++ b/faampy/mapping/ge_photo_album.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +Photo album creator for google-earth from georeferenced photos. +Script produces a kmz files for google-earth with all the photos +in the input folder. The images need to contain GPS location information. +This kind of information can be added to the image files using a tool +like gpscorrelate and a gpx file which contains the track of the flight. + +""" + +import os +import simplekml +import sys +import datetime as dt +import imghdr + +import faampy + +from PIL import Image +from PIL.ExifTags import TAGS, GPSTAGS + +# https://gist.githubusercontent.com/erans/983821/raw/a0fc31610ab044946387cef81ee3ff3b117ab0a6/get_lat_lon_exif_pil.py + +def get_exif_data(image): + """Returns a dictionary from the exif data of an PIL Image item. Also converts the GPS Tags""" + exif_data = {} + info = image._getexif() + if info: + for tag, value in info.items(): + decoded = TAGS.get(tag, tag) + if decoded == "GPSInfo": + gps_data = {} + for t in value: + sub_decoded = GPSTAGS.get(t, t) + gps_data[sub_decoded] = value[t] + + exif_data[decoded] = gps_data + else: + exif_data[decoded] = value + + return exif_data + +def _get_if_exist(data, key): + if key in data: + return data[key] + + return None + +def _convert_to_degress(value): + """Helper function to convert the GPS coordinates stored in the EXIF to degress in float format""" + d0 = value[0][0] + d1 = value[0][1] + d = float(d0) / float(d1) + + m0 = value[1][0] + m1 = value[1][1] + m = float(m0) / float(m1) + + s0 = value[2][0] + s1 = value[2][1] + s = float(s0) / float(s1) + + return d + (m / 60.0) + (s / 3600.0) + + +def _convert_to_dms(deg, direction): + m,s = divmod(deg*3600,60) + d,m = divmod(m,60) + #z = round(s, 2) + if direction == 'NS': + if d >= 0: + _dir = 'N' + else: + _dir = 'S' + elif direction == 'EW': + if d >= 0: + _dir = 'E' + else: + _dir = 'W' + return "%id %i' %.2f\" %s" % (abs(d), abs(m), abs(s), _dir) + + +def get_lat_lon(exif_data): + """Returns the latitude and longitude, if available, from the provided exif_data (obtained through get_exif_data above)""" + lat = None + lon = None + alt = None + gps_datetime = None + + if "GPSInfo" in exif_data: + gps_info = exif_data["GPSInfo"] + + gps_latitude = _get_if_exist(gps_info, "GPSLatitude") + gps_latitude_ref = _get_if_exist(gps_info, 'GPSLatitudeRef') + gps_longitude = _get_if_exist(gps_info, 'GPSLongitude') + gps_longitude_ref = _get_if_exist(gps_info, 'GPSLongitudeRef') + gps_altitude = _get_if_exist(gps_info, 'GPSAltitude') + if gps_altitude: + alt = float(gps_altitude[0])/float(gps_altitude[1]) + if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref: + lat = _convert_to_degress(gps_latitude) + if gps_latitude_ref != "N": + lat = 0 - lat + + lon = _convert_to_degress(gps_longitude) + if gps_longitude_ref != "E": + lon = 0 - lon + DateTime = gps_info['GPSDateStamp']+'T'+':'.join(['%i' % i[0] for i in gps_info['GPSTimeStamp']]) + gps_datetime = dt.datetime.strptime(DateTime, '%Y:%m:%dT%H:%M:%S') + + return (lat, lon, alt, gps_datetime) + + +def process(path): + kml = simplekml.Kml() + iconfilename = os.path.join(os.path.dirname(os.path.realpath(faampy.__file__)), + '..', 'files', 'icons', 'camera_32x32.png') + iconpath = kml.addfile(iconfilename) + for (dirpath, dirnames, filenames) in os.walk(path): + for filename in sorted(filenames): + fullpath = os.path.join(dirpath, filename) + _type = imghdr.what(fullpath) + if _type != 'jpeg': + continue + img = Image.open(fullpath) + exif_data = get_exif_data(img) + img.close() + Lat, Long, Alt, DT = get_lat_lon(exif_data) + sys.stdout.write('%s: Lat: %s, Long: %s, Alt: %s\n' % (fullpath, Lat, Long, Alt)) + if Lat: + x, y = Lat, Long + point = kml.newpoint(name=filename, coords=[(y,x)]) + picpath = kml.addfile(fullpath) + #print picpath + fn = 'files/'+ os.path.splitext(filename)[0] + '.jpg' #Note: will not work if .JPG is used, must be lower case. + balstylestring = '' + balstylestring += "Date: " + DT.strftime('%d-%m-%Y') + balstylestring += ' Time: ' + DT.strftime("%H:%M:%S UTC") + balstylestring += '

Lon: ' + _convert_to_dms(x, 'EW') + ' Lat: ' + _convert_to_dms(y, 'NS') + ' Alt: ' + str(int(Alt))+ 'm' + balstylestring += '

]]>' + point.style.balloonstyle.text = balstylestring + point.style.iconstyle.icon.href = 'files/camera_32x32.png' + return kml + + +def _argparser(): + import argparse + sys.argv.insert(0, 'faampy ge_photo_album') + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('path', action="store", type=str, + help='directory which holds the photographs. All photographs in the directory will be added to the photo album.') + parser.add_argument('outfile', + action="store", + type=str, + help='outfile name') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + kml = process(args.path) + kml.savekmz(args.outfile, format=True) + sys.stdout.write('Created ... %s. \n' % args.outfile) + + +if __name__ == '__main__': + main() diff --git a/faampy/mapping/ge_shapes.py b/faampy/mapping/ge_shapes.py new file mode 100644 index 0000000..2954eee --- /dev/null +++ b/faampy/mapping/ge_shapes.py @@ -0,0 +1,72 @@ +import numpy as np +from numpy import arcsin, sin, cos, arctan2 + +ELGIN = (1.8362333333, 57.0110555556, 0) +WDIR = 90. + +LENGTH = 65.0 +WIDTH = 1.0 + + +KML_LINESTRING_TEMPLATE = """ + +%s +%s +1 + + + + %s + + + +""" + +KML_HEADER_TEMPLATE = """ + + + +""" + +KML_FOOTER_TEMPLATE = """ + + + +""" + + + +def point_from_distance_and_bearing(origin, bearing, distance): + #see http://www.movable-type.co.uk/scripts/latlong.html + R = 6137.0 + d = distance + theta = np.deg2rad(bearing) + lat1 = np.deg2rad(origin[1]) + lon1 = np.deg2rad(origin[0]) + lat2 = arcsin(sin(lat1)*cos(d/R) + cos(lat1)*sin(d/R)*cos(theta)) + lon2 = lon1 + arctan2(sin(theta) * sin(d/R)*cos(lat1), cos(d/R)-sin(lat1)*sin(lat2)) + return(np.rad2deg(lon2), np.rad2deg(lat2)) + + +#def create_kml(name, lon, lat, bearing, distance): +def create_kml(name, origin, bearing, distance): + lon, lat = [], [] + for org in origin: + lon.append(org[0]) + lat.append(org[1]) + linestring_txt = "" + for i in range(len(name)): + desc = 'Bearing: %6.2f' % bearing[i] + lon2, lat2 = point_from_distance_and_bearing((lon[i], lat[i]), bearing[i], distance[i]) + linestring_coord = '%.5f,%.5f,%.5f\n' % (lon[i], lat[i], 0) + linestring_coord += '%.5f,%.5f,%.5f\n' % (lon2, lat2, 0) + linestring_txt += KML_LINESTRING_TEMPLATE % (name[i], desc, linestring_coord) + kml = KML_HEADER_TEMPLATE + linestring_txt + KML_FOOTER_TEMPLATE + return kml + + diff --git a/faampy/mapping/ge_was_to_kmz.py b/faampy/mapping/ge_was_to_kmz.py new file mode 100644 index 0000000..af82ee7 --- /dev/null +++ b/faampy/mapping/ge_was_to_kmz.py @@ -0,0 +1,216 @@ +#!/usr/bin/python + +''' +WAS log as google-earth overlay. Filling times are represented +as lines showing the location of the bottle fill. + +''' + +import netCDF4 +import numpy as np +import os +import re +import sys +import shutil +import tempfile +import zipfile + +import faampy + +_KML_HEADER = """ + + + + + + %s + 0 +""" + +_KML_FOOTER = """ +""" + +_KML_WAS_BOTTLE = """ + %s@%i m + + %s to %s@%im + + #was + + + absolute + + %8.4f,%8.4f,%8.2f + + + + absolute + + %s + + + + +""" + +# global variables +_KMZ_TMP_DIRECTORY = tempfile.mkdtemp() +_FID = '' + +def __zip__(outfile): + files4zipping = [] + for root, subFolders, files in os.walk(_KMZ_TMP_DIRECTORY): + for f in files: + files4zipping.append(os.path.join(root,f)) + zip = zipfile.ZipFile(outfile, mode='w') + for file in files4zipping: + zipname = file[len(_KMZ_TMP_DIRECTORY) + (len(os.sep)):] + zip.write(file, zipname) + return + + +def __setup_kmz__(): + icon_path = os.path.join(_KMZ_TMP_DIRECTORY, 'icons') + os.mkdir(icon_path) + icon_file = os.path.join(os.path.dirname(faampy.__file__), + '..', 'files', 'icons', + 'was_bottle_32x32.png') + shutil.copy(icon_file, icon_path) + + +def t2s(s): + s = s.split(':') + result = int(s[0]) * 3600 + int(s[1]) * 60 + int(s[2]) + return result + + +def get_index(ds, secs): + ix = np.where(ds.variables['Time'][:] == secs) + return ix[0] + + +def read_was_log(was_log_file): + result = [] + f = open(was_log_file, 'r') + lines = f.readlines() + f.close() + for line in lines: + if (line.strip()[0]).isdigit(): + result.append(str.split(line)) + return result + + +def parse_was_log(was_log_file, ds): + was_log_lines = read_was_log(was_log_file) + plcmark_txt = "" + for was in was_log_lines: + if not was: + continue + # get coordinates for bottle start + s_secs = t2s(was[1]) + s_ix = int(get_index(ds, s_secs)) + e_secs = t2s(was[2]) + e_ix = int(get_index(ds, e_secs)) + if len(ds.variables['ALT_GIN'][:].shape) == 2: + s_lon = ds.variables['LON_GIN'][:][s_ix, 0] + s_lat = ds.variables['LAT_GIN'][:][s_ix, 0] + s_alt = ds.variables['ALT_GIN'][:][s_ix, 0] + # get coordinates for bottle end + e_lon = ds.variables['LON_GIN'][:][e_ix, 0] + e_lat = ds.variables['LAT_GIN'][:][e_ix, 0] + e_alt = ds.variables['ALT_GIN'][:][e_ix, 0] + else: + s_lon = ds.variables['LON_GIN'][:][s_ix] + s_lat = ds.variables['LAT_GIN'][:][s_ix] + s_alt = ds.variables['ALT_GIN'][:][s_ix] + # get coordinates for bottle end + e_lon = ds.variables['LON_GIN'][:][e_ix] + e_lat = ds.variables['LAT_GIN'][:][e_ix] + e_alt = ds.variables['ALT_GIN'][:][e_ix] + + linstring_txt = "%f,%f,%f\n%f,%f,%f" % (s_lon, s_lat, s_alt, + e_lon, e_lat, e_alt) + plcmark_txt += _KML_WAS_BOTTLE % (_FID+': ' +was[0], s_alt, was[1], was[2], s_alt, s_lon, s_lat, s_alt, linstring_txt) + return plcmark_txt + + +def process(was_log_file, ncfile, outpath): + global _FID + + __setup_kmz__() + _FID = re.search('[bBcC]\d{3}', ncfile).group() + ds = netCDF4.Dataset(ncfile, 'r') + if hasattr(ds, 'title'): + flight_desc = str(ds.title).split()[2] + '-' + str(ds.title).split()[4] + '-WAS-Bottles' + elif hasattr(ds, 'Title'): + flight_desc = str(ds.Title).split()[2] + '-' + str(ds.Title).split()[4] + '-WAS-Bottles' + else: + flight_desc = '' + was_kml = open(os.path.join(_KMZ_TMP_DIRECTORY, 'doc.kml'), 'w') + plcmark_txt = parse_was_log(was_log_file, ds) + ds.close() + was_kml.write(_KML_HEADER % (flight_desc)) + was_kml.write(plcmark_txt) + was_kml.write(_KML_FOOTER) + was_kml.close() + __zip__(os.path.join(outpath, 'faam-was_%s.kmz' % _FID)) + + +def __get_ncfile__(path, fid): + pattern = '.*core_faam.*%s_.*.nc' % str.lower(fid) + for root, subFolders, files in os.walk(path): + for f in files: + if re.match(pattern, f): + return os.path.join(root, f) + + +def _argparser(): + import argparse + sys.argv.insert(0, 'faampy ge_was_to_kmz') + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('was_log_file', action="store", type=str, + help='Input WAS log file or folder which contains log files') + parser.add_argument('ncfile', action="store", type=str, + help='input netCDF-file or path to netcdf files') + parser.add_argument('outpath', action="store", type=str, + help='outpath for kmz file') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + if os.path.isdir(args.was_log_file): + tmp = os.listdir(args.was_log_file) + new_list = [] + for t in tmp: + if re.match('[b,B]\d{3}.WAS', t): + new_list.append(os.path.join(args.was_log_file, t)) + args.was_log_file = new_list + else: + args.was_log_file = [args.was_log_file, ] + + for was_log_file in args.was_log_file: + if os.path.isdir(args.ncfile): + ncfile = __get_ncfile__(args.ncfile, os.path.basename(os.path.splitext(was_log_file)[0])) + else: + ncfile = args.ncfile + try: + process(was_log_file, ncfile, args.outpath) + except: + sys.stdout.write('Problem processing ... %s\n' % (os.path.basename(was_log_file))) + + +if __name__ == '__main__': + main() diff --git a/faampy/mapping/geoid.py b/faampy/mapping/geoid.py new file mode 100644 index 0000000..585dc74 --- /dev/null +++ b/faampy/mapping/geoid.py @@ -0,0 +1,223 @@ +#!/usr/bin/python + +# url: https://github.com/vandry/geoidheight/blob/master/geoid.py + +# This file is mostly a straight translation of +# GeographicLib/src/Geoid.cpp from C++ to Python +# by Kim Vandry +# +# /** +# * \file Geoid.cpp +# * \brief Implementation for GeographicLib::Geoid class +# * +# * Copyright (c) Charles Karney (2009) +# * and licensed under the LGPL. For more information, see +# * http://geographiclib.sourceforge.net/ +# **********************************************************************/ +# +# Geoid height grade not supported + +import os +import mmap +import struct + +class GeoidBadDataFile(Exception): + pass + +class GeoidHeight(object): + """Calculate the height of the WGS84 geoid above the + ellipsoid at any given latitude and longitude + :param name: name to PGM file containing model info + download from http://geographiclib.sourceforge.net/1.18/geoid.html + """ + c0 = 240 + c3 = ( + ( 9, -18, -88, 0, 96, 90, 0, 0, -60, -20), + ( -9, 18, 8, 0, -96, 30, 0, 0, 60, -20), + ( 9, -88, -18, 90, 96, 0, -20, -60, 0, 0), + (186, -42, -42, -150, -96, -150, 60, 60, 60, 60), + ( 54, 162, -78, 30, -24, -90, -60, 60, -60, 60), + ( -9, -32, 18, 30, 24, 0, 20, -60, 0, 0), + ( -9, 8, 18, 30, -96, 0, -20, 60, 0, 0), + ( 54, -78, 162, -90, -24, 30, 60, -60, 60, -60), + (-54, 78, 78, 90, 144, 90, -60, -60, -60, -60), + ( 9, -8, -18, -30, -24, 0, 20, 60, 0, 0), + ( -9, 18, -32, 0, 24, 30, 0, 0, -60, 20), + ( 9, -18, -8, 0, -24, -30, 0, 0, 60, 20), + ) + + c0n = 372 + c3n = ( + ( 0, 0, -131, 0, 138, 144, 0, 0, -102, -31), + ( 0, 0, 7, 0, -138, 42, 0, 0, 102, -31), + ( 62, 0, -31, 0, 0, -62, 0, 0, 0, 31), + (124, 0, -62, 0, 0, -124, 0, 0, 0, 62), + (124, 0, -62, 0, 0, -124, 0, 0, 0, 62), + ( 62, 0, -31, 0, 0, -62, 0, 0, 0, 31), + ( 0, 0, 45, 0, -183, -9, 0, 93, 18, 0), + ( 0, 0, 216, 0, 33, 87, 0, -93, 12, -93), + ( 0, 0, 156, 0, 153, 99, 0, -93, -12, -93), + ( 0, 0, -45, 0, -3, 9, 0, 93, -18, 0), + ( 0, 0, -55, 0, 48, 42, 0, 0, -84, 31), + ( 0, 0, -7, 0, -48, -42, 0, 0, 84, 31), + ) + + c0s = 372 + c3s = ( + ( 18, -36, -122, 0, 120, 135, 0, 0, -84, -31), + (-18, 36, -2, 0, -120, 51, 0, 0, 84, -31), + ( 36, -165, -27, 93, 147, -9, 0, -93, 18, 0), + (210, 45, -111, -93, -57, -192, 0, 93, 12, 93), + (162, 141, -75, -93, -129, -180, 0, 93, -12, 93), + (-36, -21, 27, 93, 39, 9, 0, -93, -18, 0), + ( 0, 0, 62, 0, 0, 31, 0, 0, 0, -31), + ( 0, 0, 124, 0, 0, 62, 0, 0, 0, -62), + ( 0, 0, 124, 0, 0, 62, 0, 0, 0, -62), + ( 0, 0, 62, 0, 0, 31, 0, 0, 0, -31), + (-18, 36, -64, 0, 66, 51, 0, 0, -102, 31), + ( 18, -36, 2, 0, -66, -51, 0, 0, 102, 31), + ) + + #def __init__(self, name="egm2008-1.pgm"): + def __init__(self, name="/usr/share/GeographicLib/geoids/egm2008-1.pgm"): + self.offset = None + self.scale = None + + if not os.path.exists(name): + alt_name=['/usr/share/geographiclib/geoids/egm2008-1.pgm',] + for alt in alt_name: + if os.path.exists(alt): + name=alt + break + + with open(name, "r") as f: + line = f.readline() + if line != "P5\012" and line != "P5\015\012": + raise GeoidBadDataFile("No PGM header") + headerlen = len(line) + while True: + line = f.readline() + if len(line) == 0: + raise GeoidBadDataFile("EOF before end of file header") + headerlen += len(line) + if line.startswith('# Offset '): + try: + self.offset = int(line[9:]) + except (ValueError, e): + raise GeoidBadDataFile("Error reading offset", e) + elif line.startswith('# Scale '): + try: + self.scale = float(line[8:]) + except (ValueError, e): + raise GeoidBadDataFile("Error reading scale", e) + elif not line.startswith('#'): + try: + self.width, self.height = map(int, line.split()) + except (ValueError, e): + raise GeoidBadDataFile("Bad PGM width&height line", e) + break + line = f.readline() + headerlen += len(line) + levels = int(line) + if levels != 65535: + raise GeoidBadDataFile("PGM file must have 65535 gray levels") + if self.offset is None: + raise GeoidBadDataFile("PGM file does not contain offset") + if self.scale is None: + raise GeoidBadDataFile("PGM file does not contain scale") + + if self.width < 2 or self.height < 2: + raise GeoidBadDataFile("Raster size too small") + + fd = f.fileno() + fullsize = os.fstat(fd).st_size + + if fullsize - headerlen != self.width * self.height * 2: + raise GeoidBadDataFile("File has the wrong length") + + self.headerlen = headerlen + self.raw = mmap.mmap(fd, fullsize, mmap.MAP_SHARED, mmap.PROT_READ) + + self.rlonres = self.width / 360.0 + self.rlatres = (self.height - 1) / 180.0 + self.ix = None + self.iy = None + + def _rawval(self, ix, iy): + if iy < 0: + iy = -iy; + ix += self.width/2; + elif iy >= self.height: + iy = 2 * (self.height - 1) - iy; + ix += self.width/2; + if ix < 0: + ix += self.width; + elif ix >= self.width: + ix -= self.width + + return struct.unpack_from('>H', self.raw, + (iy * self.width + ix) * 2 + self.headerlen + )[0] + + def get(self, lat, lon, cubic=True): + if lon < 0: + lon += 360 + fy = (90 - lat) * self.rlatres + fx = lon * self.rlonres + iy = int(fy) + ix = int(fx) + fx -= ix + fy -= iy + if iy == self.height - 1: + iy -= 1 + + if ix != self.ix or iy != self.iy: + self.ix = ix + self.iy = iy + if not cubic: + self.v00 = self._rawval(ix, iy) + self.v01 = self._rawval(ix+1, iy) + self.v10 = self._rawval(ix, iy+1) + self.v11 = self._rawval(ix+1, iy+1) + else: + v = ( + self._rawval(ix , iy - 1), + self._rawval(ix + 1, iy - 1), + self._rawval(ix - 1, iy ), + self._rawval(ix , iy ), + self._rawval(ix + 1, iy ), + self._rawval(ix + 2, iy ), + self._rawval(ix - 1, iy + 1), + self._rawval(ix , iy + 1), + self._rawval(ix + 1, iy + 1), + self._rawval(ix + 2, iy + 1), + self._rawval(ix , iy + 2), + self._rawval(ix + 1, iy + 2) + ) + if iy == 0: + c3x = GeoidHeight.c3n + c0x = GeoidHeight.c0n + elif iy == self.height - 2: + c3x = GeoidHeight.c3s + c0x = GeoidHeight.c0s + else: + c3x = GeoidHeight.c3 + c0x = GeoidHeight.c0 + self.t = [ + sum([ v[j] * c3x[j][i] for j in range(12) ]) / float(c0x) + for i in range(10) + ] + if not cubic: + a = (1 - fx) * self.v00 + fx * self.v01 + b = (1 - fx) * self.v10 + fx * self.v11 + h = (1 - fy) * a + fy * b + else: + h = ( + self.t[0] + + fx * (self.t[1] + fx * (self.t[3] + fx * self.t[6])) + + fy * ( + self.t[2] + fx * (self.t[4] + fx * self.t[7]) + + fy * (self.t[5] + fx * self.t[8] + fy * self.t[9]) + ) + ) + return self.offset + self.scale * h \ No newline at end of file diff --git a/faampy/mapping/nc_to_gpx.py b/faampy/mapping/nc_to_gpx.py new file mode 100644 index 0000000..5c0b66c --- /dev/null +++ b/faampy/mapping/nc_to_gpx.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python + +""" +Creates a gpx file from a FAAM core netcdf data file. + +GPX files are a standard xml-format, which is understood by many programs. +The gpx file from a flight can for example be used to georeference photographs +taken during a flight. + +see: https://github.com/freefoote/gpscorrelate + +The geotagged images can then in a next step be overlaid on maps using the +LonLat information from the file header. + +""" + +import datetime +import netCDF4 +import os +import sys + +from matplotlib.dates import num2date + +from faampy.core.utils import get_mpl_time, get_base_time, get_fid + +_TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%SZ' + +_GPX_HEADER = """ + +""" + +_GPX_FOOTER = "\n" + +_GPX_RTE_TEMPLATE = "%s\n" + +_GPX_RTEPT_TEMPLATE = """ + %f + + +""" + +_GPX_TRK_TEMPLATE_START = "\n" +_GPX_TRK_TEMPLATE_END = "\n" + +_GPX_TRKPT_TEMPLATE = """ + %f + + +""" + + +def lonlatalt_to_gpx(lon, lat, alt, *args): + if args: + timestamp = [t.strftime(_TIMESTAMP_FORMAT) for t in args[0]] + else: + now = datetime.datetime.utcnow() + timestamp = [(now+datetime.timedelta(seconds=i)).strftime(_TIMESTAMP_FORMAT) for i in range(len(lon))] + result = "" + result += _GPX_HEADER + result += _GPX_TRK_TEMPLATE_START + trkp = [_GPX_TRKPT_TEMPLATE % (lat[i], lon[i], alt[i], timestamp[i]) for i in range(len(lat))] + result += ''.join(trkp) + result += _GPX_TRK_TEMPLATE_END + result += _GPX_FOOTER + return result + + +def nc_to_gpx(ncfile, outpath): + ds = netCDF4.Dataset(ncfile, 'r') + lon = ds.variables['LON_GIN'][:, 0] + lat = ds.variables['LAT_GIN'][:, 0] + alt = ds.variables['ALT_GIN'][:, 0] + t = num2date(get_mpl_time(ds)) + + outfilename = '%s_%s.gpx' % (get_fid(ds), get_base_time(ds).strftime('%Y%m%d')) + outfile = os.path.join(outpath, outfilename) + outtxt = lonlatalt_to_gpx(lon, lat, alt, t) + f = open(outfile, 'w') + f.write(outtxt) + ds.close() + f.close() + + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + sys.argv.insert(0, 'faampy nc_to_gpx') + parser = argparse.ArgumentParser(prog = 'faampy nc_to_gpx', description=__doc__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('ncfile', + action="store", + type=str, + help='input netCDF-file') + parser.add_argument('outpath', + action="store", + nargs='?', + type=str, + default=os.environ['HOME'], + help='file name of output file') + return parser + +def main(): + parser = _argparser() + args = parser.parse_args() + nc_to_gpx(args.ncfile, args.outpath) + + +if __name__ == '__main__': + main() diff --git a/faampy/mapping/sat_tracker.py b/faampy/mapping/sat_tracker.py new file mode 100644 index 0000000..d318d26 --- /dev/null +++ b/faampy/mapping/sat_tracker.py @@ -0,0 +1,392 @@ +#! /usr/bin/env python +""" +More satellite track information can be found at: + + http://www.n2yo.com/ + +Popular platforms: + + =========== ====== ====== + Platform Sensor ID + =========== ====== ====== + CALIPSO 29108 + ISS CATS 25544 + TERRA MODIS 25994 + LANDSAT8 39084 + SENTINEL-2A 40697 + SENTINEL-3A 41335 + =========== ====== ====== + +Example:: + + faampy sat_tracker track --show_map "-38 35 -20 43" 39084 13-02-2017 17-02-2017 60 + +""" + +import datetime +import ephem +import errno +import math +import numpy as np +import os +import re +import sys +import urllib2 as urllib + +import matplotlib.pyplot as plt +from mpl_toolkits.basemap import Basemap + +from faampy._3rdparty import texttable + +def __parse_time__(date_string): + fmt='%d-%m-%YT%H:%M:%S' + time_datetime = None + try: + time_datetime = datetime.datetime.strptime(date_string.strip(), fmt) + except: + time_datetime = datetime.datetime.strptime(date_string.strip()+'T00:00:00', fmt) + finally: + pass + return time_datetime + + +#http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python +def mkdir_p(path): + try: + os.makedirs(path) + except OSError as exc: # Python >2.5 + if exc.errno == errno.EEXIST and os.path.isdir(path): + pass + else: raise + + +class TLE(dict): + + def __init__(self, no_internet=False): + self.urls = ['http://www.celestrak.com/NORAD/elements/science.txt', + 'http://www.celestrak.com/NORAD/elements/resource.txt', + 'http://www.celestrak.com/NORAD/elements/weather.txt', + 'http://www.celestrak.com/NORAD/elements/stations.txt'] + + self.tle_dir=os.path.join(os.getenv('HOME'), '.faampy', 'tle') + #create hidden tle directory in $HOME if it does not exist + if not os.path.exists(self.tle_dir): + mkdir_p(self.tle_dir) + self.read_tle(no_internet) + + def __read_tle_from_file__(self): + """reads the TLE information from locally stored files in $HOME/.tle""" + self.tle_txt = '' + #get latest directory + d=os.listdir(self.tle_dir)[-1] + #read all tle files + for infile in d: + if not infile.endswith('~'): + infi = open( os.path.join( self.tle_dir, infile ), 'r' ) + self.tle_txt += infi.read() + self.tle_txt+='\r\n' + infi.close() + + def __read_tle_from_url__(self): + self.tle_txt='' + #fetch the tle from the urls + utcnow=datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S') + os.mkdir(os.path.join(self.tle_dir, utcnow)) + content=[] + for url in self.urls: + c=urllib.urlopen( url ).read() + content.append(c) + outfile=os.path.join(self.tle_dir, utcnow, os.path.split(url)[1]) + out = open(outfile, 'w' ) + out.write(c) + out.close() + self.tle_txt+='\r\n'.join(content) + + def read_tle(self, no_internet): + """reads the tle information either from a url or a text file + first of all the script tries to get the most recent info from an url. + Failing that it uses the local tle files stored in $HOME/.tle """ + if no_internet: + try: + self.__read_tle_from_file__() + self.__parse_tle__() + except: + pass + else: + try: + self.__read_tle_from_url__() + self.__parse_tle__() + except: + pass + + def __parse_tle__( self ): + """parse and feed everything in a dictionary""" + #split tle at linebreaks + tle_txt = self.tle_txt.split('\r\n') + #loop over tle_txt and put everything in a dictionary + for i in range(len(tle_txt)): + if tle_txt[i].startswith('1'): + key=tle_txt[i].split()[1].strip('U') + #every tle entry is three lines long + val=tle_txt[i-1:i+2] + self[key]=val + + def get(self, sat_id): + """get the tle for a specific satellite""" + sat_tle=self[sat_id] + result=ephem.readtle(sat_tle[0], sat_tle[1], sat_tle[2]) + return result + + def __str__(self): + output='' + table=[(self[k][0], k) for k in self.keys()] + table.sort() + for i in table: + output+='%s %s\n' % i + return output + + +class Map(object): + + def __init__(self, llcrnrlon=-180,llcrnrlat=-80,urcrnrlon=180,urcrnrlat=80): + lat_0=(llcrnrlat+urcrnrlat)/2. + lon_0=(llcrnrlon+urcrnrlon)/2. + self.m=Basemap(llcrnrlon=llcrnrlon,llcrnrlat=llcrnrlat,urcrnrlon=urcrnrlon,urcrnrlat=urcrnrlat,projection='merc', lat_0=lat_0, lon_0=lon_0, resolution='h') + lat_range=urcrnrlat-llcrnrlat + lon_range=urcrnrlon-llcrnrlon + for lat_interval in [30,15,10,5,3,2,1]: + if float(lat_range)/float(lat_interval) >= 5.0: break + for lon_interval in [30,15,10,5,3,2,1]: + if float(lon_range)/float(lon_interval) >= 5.0: break + + self.m.drawmeridians(range(-180, 180, lon_interval), labels=[1,0,0,1], fontsize=12) + self.m.drawparallels(range( -90, 90, lat_interval), labels=[1,1,0,1], fontsize=12) + self.m.drawcoastlines() + self.m.drawcountries() + self.m.drawmapboundary() + + def oplot_sattrack(self, trkpts): + label, lons, lats = zip(*trkpts) + + # The following section takes care of the occassions when the satellite + # goes over the edge form -180 to 180 or vice versa. + # This section is not very nice and needs improvement + _label, _lons, _lats = [label[0],], [lons[0],], [lats[0],] + for i in range(1, len(trkpts)): + if ((lons[i] > 0) and (lons[i-1] < 0) and (np.abs(lons[i] > 120))): + _label.append(None) + _label.append(label[i]) + _lons.append(np.nan) + _lons.append(lons[i]) + _lats.append(np.nan) + _lats.append(lats[i]) + elif ((lons[i] < 0) and (lons[i-1] > 0) and (np.abs(lons[i] > 120))): + _label.append(None) + _label.append(label[i]) + _lons.append(np.nan) + _lons.append(lons[i]) + _lats.append(np.nan) + _lats.append(lats[i]) + else: + _label.append(label[i]) + _lons.append(lons[i]) + _lats.append(lats[i]) + label = _label[:] + lons = _lons[:] + lats = _lats[:] + + x, y=self.m(lons, lats) + x = np.array(x) + ix = np.where(~np.isfinite(x))[0] + x = list(x) + if len(ix) > 0: + for i in ix: + x[i] = None + y[i] = None + + + + self.m.plot(x, y, lw=1.5) + self.m.plot(x, y, 'o', color='orange') + for i in range(len(label)): + if label[i]: + plt.annotate(label[i], xy=(x[i], y[i]), fontsize=8) + plt.annotate('Created: %sZ' % (datetime.datetime.utcnow().strftime('%d-%m-%Y %H:%M:%S')), xy=(0.02, 0.02), xycoords='figure fraction', fontsize=8) + + +class SatTrack(object): + + def __init__(self, sat_id, start_time, end_time, timestep): + self.trkpts=[] + self.sat_id=sat_id + self.start_time=start_time + self.end_time=end_time + self.timestep=timestep + + def create_table(self): + tab=texttable.Texttable() + tab.set_chars(['-','|','+','-']) + tab.set_deco(tab.HEADER | tab.VLINES | tab.BORDER) + tab.set_cols_align(['r','r','r']) + tab.set_cols_width([19, 7, 6]) + tab.set_precision(2) + tab.add_rows(self.trkpts) + tab.header(['time (utc)', 'lon', 'lat']) + return tab + + def get_tles(self): + self.tle_dict=TLE() + + def calc(self): + #tle=tles[satellite_name] + sat_id=self.sat_id + start_time=self.start_time + end_time=self.end_time + timestep=self.timestep + + self.sat=ephem.readtle(self.tle_dict[sat_id][0], + self.tle_dict[sat_id][1], + self.tle_dict[sat_id][2]) + + + start_time_datetime=__parse_time__(start_time) + end_time_datetime=__parse_time__(end_time) + timestamp=start_time_datetime + + while True: + if timestamp > end_time_datetime: + break + obs.date=timestamp + self.sat.compute(obs) + self.trkpts.append([timestamp.strftime('%d-%m-%YT%H:%M:%S'), + math.degrees(self.sat.sublong), + math.degrees(self.sat.sublat)]) + timestamp+=datetime.timedelta(seconds=timestep) + + def __str__(self): + output='' + ttable=self.create_table() + #Output table to the screen + output+='\n' + output+='\n'.join(self.tle_dict[self.sat_id]) + output+='\n'*2 + output+=ttable.draw() + output+='\n' + #return (data, satellite_name, tle, ttable.draw()) + return output + + + +obs=ephem.Observer() +obs.lat='14.9429' +obs.lon='-23.4839' + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + sys.argv.insert(0, 'faampy sat_tracker') + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=RawTextHelpFormatter) + subparsers = parser.add_subparsers( + title='subcommands', description='valid subcommands', + help='additional help') + parser_tle = subparsers.add_parser('tle') + parser_tle.add_argument('sat_name', action="store", nargs='?', type=str, default='all', + help='shows list of available satellites and their IDs') + parser_track=subparsers.add_parser('track') + parser_track.add_argument('sat_id', nargs="*", action='store', type=str, + help="Satellite ID(s). If more than one satellite track should be calculated the ids should be separated by commas ") + parser_track.add_argument('start_time', action='store', type=str, + help="date in the format DD-MM-YYYY or dd-mm-YYYYTHH:MM:SS") + parser_track.add_argument('end_time', action='store', type=str, + help="date in the format DD-MM-YYYY or dd-mm-YYYYTHH:MM:SS") + parser_track.add_argument('timestep', + action='store', + type=int, + help="Timestep in seconds") + parser_track.add_argument('-w', '--write_to_file', action="store_true", required=False, default=False, + help='If flag is set the output is stored to a file in the $HOME directory. Default: False') + parser_track.add_argument('-m', '--show_map', nargs='?', required=False, const='-180 -80 180 80', + help='Boundary for the map in the form "left_longitude bottom_latitude right_longitude top_latitude". The input has ') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + FINISHED=False + #print(args) + tle_dict=TLE() + + try: + if args.sat_name: + if args.sat_name == 'all': + print(tle_dict) + else: + for k in tle_dict.keys(): + if args.sat_name.lower() in tle_dict[k][0].lower(): + print('%s %s\n' % (tle_dict[k][0], k)) + FINISHED = True + except: + pass + + # TODO: there must be a better way to get out + if FINISHED: + sys.exit() + + for s in args.sat_id: + s = SatTrack(s, args.start_time, args.end_time, args.timestep) + s.tle_dict = tle_dict + s.calc() + print(s) + + sat_name = s.sat.name + sat_name = re.sub(' ', '_', s.sat.name) + sat_name = re.sub('\(', '', s.sat.name) + sat_name = re.sub('\)', '', s.sat.name) + + if args.write_to_file == True: + ofname = os.path.join(os.environ['HOME'], '%s_%s.txt' % (sat_name.lower(), __parse_time__(args.start_time).strftime('%Y%m%d_%H%M%S'))) + outfile = open(ofname, 'w') + outfile.write(s.__str__()) + outfile.close() + if args.show_map: + llcrnrlon, llcrnrlat, urcrnrlon, urcrnrlat = args.show_map.split() + m = Map(llcrnrlon=float(llcrnrlon), + llcrnrlat=float(llcrnrlat), + urcrnrlon=float(urcrnrlon), + urcrnrlat=float(urcrnrlat)) + m.oplot_sattrack(s.trkpts) + title = '%s\n%s to %s' % (sat_name, + __parse_time__(args.start_time).strftime('%Y-%m-%dT%H:%M:%S'), + __parse_time__(args.end_time).strftime('%Y-%m-%dT%H:%M:%S')) + plt.title(title) + imgfilename = os.path.join(os.environ['HOME'], + 'sat_track_%s_%s_to_%s' % (sat_name.replace(' ', '_'), + __parse_time__(args.start_time).strftime('%Y%m%dT%H%M%S'), + __parse_time__(args.end_time).strftime('%Y%m%dT%H%M%S'))) + plt.savefig(imgfilename) + sys.stdout.write('Map saved as: \n %s\n\n' % (imgfilename,)) + plt.clf() + + +if __name__ == '__main__': + main() + + + +TESTING = False +if TESTING: + today = datetime.datetime.utcnow().strftime('%Y-%m-%d') + tomorrow = (datetime.datetime.utcnow()+datetime.timedelta(days=1)).strftime('%Y-%m-%d') + + tle = TLE() + s = SatTrack('39084', today, tomorrow, 60) + s.tle_dict = tle + s.calc() + llcrnrlon, llcrnrlat, urcrnrlon, urcrnrlat = -38, 35, 3, 53 + m = Map(llcrnrlon=float(llcrnrlon), + llcrnrlat=float(llcrnrlat), + urcrnrlon=float(urcrnrlon), + urcrnrlat=float(urcrnrlat)) + m.oplot_sattrack(s.trkpts) diff --git a/faampy/mapping/utils.py b/faampy/mapping/utils.py new file mode 100644 index 0000000..890fdd3 --- /dev/null +++ b/faampy/mapping/utils.py @@ -0,0 +1,238 @@ +import datetime +import numpy as np +import os +import subprocess +import time +import tempfile + +import georasters as gr + +_TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%SZ' + +def conv_bearing_to_angle(bearing): + """bearing is the angle on a compass + + """ + result = np.mod(450-bearing, 360) + return result + + +def conv_angle_to_bearing(angle): + result = np.mod(450-angle, 360) + return result + + +def dec_to_dms_(deg, direction): + """direction is either 'ns' or 'ew' + + """ + d = int(deg) + md = abs(deg - d) * 60 + m = int(md) + sd = (md - m) * 60 + if d < 0: + direction = direction[1].upper() + else: + direction = direction[0].upper() + result = '%s%.2i:%.2i:%f' % (direction, abs(d), m, sd) + return result + + +def translate_coord(lon, lat, origin): + """coordinate translation to a global origin + + """ + start_long = lon*0 + np.radians(origin[0]) + start_latt = lat*0 + np.radians(origin[1]) + end_long = np.radians(lon) + end_latt = np.radians(lat) + + d_latt = end_latt - start_latt + d_long = end_long - start_long + d_latt = d_latt * 0.0 + a = np.sin(d_latt/2)**2 + np.cos(start_latt) * np.cos(end_latt) * np.sin(d_long/2)**2 + c = 2 * np.arctan2(np.sqrt(a), np.sqrt(1-a)) + x = 6371.0 * c * 1000.0 + ix = np.where(end_long < start_long) + x[ix] = x[ix] * -1.0 + + d_latt = end_latt - start_latt + d_long = end_long - start_long + d_long = d_long * 0.0 + a = np.sin(d_latt/2)**2 + np.cos(start_latt) * np.cos(end_latt) * np.sin(d_long/2)**2 + c = 2 * np.arctan2(np.sqrt(a), np.sqrt(1-a)) + y = 6371.0 * c * 1000.0 + ix = np.where(end_latt < start_latt) + y[ix] = y[ix] * -1.0 + return(x,y) + + +def rotate_coord(x, y, angle): + """coordinate system rotation + + """ + x_rot = x * np.cos(np.deg2rad(angle)) + y * np.sin(np.deg2rad(angle)) + y_rot = -x * np.sin(np.deg2rad(angle)) + y * np.cos(np.deg2rad(angle)) + return(x_rot, y_rot) + + +def get_wgs84_offset(coords): + """Uses the GeoidEval routines from [1] to get the offset between GPS data and + sea level. + + #to get the difference between WGS84 and mean sea level + GeoidEval --input-file /tmp/tmpt35as + + example: + >>> coord = [(50, 0), (51, 2), (54, 2)] + >>> offset = get_wgs84_offset(coord) + >>> print(offset + [45.0389, 44.4422, 42.6927] + + [1] http://geographiclib.sourceforge.net/ + [2] http://www.esri.com/news/arcuser/0703/geoid1of3.html + """ + #lon, lat = zip(*coord) + if not isinstance(coords, list): + coords = [coords, ] + + lon, lat = zip(*coords) + lon_str = [dec_to_dms_(l, 'ns') for l in lon] + lat_str = [dec_to_dms_(l, 'ew') for l in lat] + coords_str = zip(lon_str, lat_str) + fd, fn = tempfile.mkstemp() + fd = open(fn, 'w') + fd.writelines(['%s %s\n' % l for l in coords_str]) + fd.close() + geoideval = os.system('which GeoidEval') + result = subprocess.check_output('%s --input-file %s' % (geoideval, fn), shell=True) + result = [float(r) for r in result.split()] + return result + + +def calc_distance_to_point(coords, origin): + #TODO + if not isinstance(coords, list): + coords = [coords, ] + + + +def calc_distance_to_line(coords, line): + """ This works in a cartesian coordinate system + + point should be list of tuples tuple (x, y) + line should be tuple ((x1, y1), (x2, y2)) + """ + x3 = coords[0] + y3 = coords[1] + x1 = line[0][0] + y1 = line[0][1] + x2 = line[1][0] + y2 = line[1][1] + px = x2-x1 + py = y2-y1 + u = ((x3 - x1) * px + (y3 - y1) * py) / float(px*px + py*py) + if u > 1: + u = 1 + elif u < 0: + u = 0 + x = x1 + u * px + y = y1 + u * py + dx = x - x3 + dy = y - y3 + dist = np.sqrt(dx*dx + dy*dy) + return dist + + +#TODO: needs speed improvement +def simplify(coords, error=None, distance=None, timestep=None): + """uses the simplify option from gpsbabel + + """ + import gpsbabel + + if not error: + error = '0.2k' + if not distance: + distance = '10k' + if not timestep: + timestep = 120 + #crosstracked track + (lon, lat, alt) = zip(*coords) + now = datetime.datetime.utcnow() + timestamp = [(now+datetime.timedelta(seconds=i)).strftime(_TIMESTAMP_FORMAT) for i in range(len(lon))] + #gpxd = lonlatalt_to_gpx(lon, lat, alt) + gpxr = gpsbabel.GPXRoute() + n = len(lon) + for i in range(n): + gpxr.rtepts.append(gpsbabel.GPXWaypoint()) + gpxr.rtepts[-1].lat = lat[i] + gpxr.rtepts[-1].lon = lon[i] + gpxr.rtepts[-1].ele = alt[i] + gpxr.rtepts[-1].time = timestamp[i] + gpxd = gpsbabel.GPXData() + gpxd.rtes.append(gpxr) + + track_ct = gpsbabel.GPSBabel() + track_ct.procRoutes = True + track_ct.setInGpx(gpxd) + track_ct.addFilter('simplify', {'crosstrack': None, 'error': error}) + track_ct.captureStdOut() + (retcode, gpxd_crosstracked) = track_ct.execCmd() + + lon_tmp, lat_tmp, alt_tmp, timestamp_tmp = ([], [], [], []) + for rte in gpxd_crosstracked.rtes: + for pt in rte.rtepts: + lon_tmp.append(pt.lon) + lat_tmp.append(pt.lat) + alt_tmp.append(pt.ele) + timestamp_tmp.append(pt.time) + + track_ip = gpsbabel.GPSBabel() + track_ip.procRoutes = True + track_ip.setInGpx(gpxd) + track_ip.addFilter('position', {'distance': '50m'}) + track_ip.captureStdOut() + (retcode, gpxd_interpolated) = track_ip.execCmd() + + for rte in gpxd_interpolated.rtes: + for pt in rte.rtepts: + if (pt.time.hour * 3600 + pt.time.minute * 60 + pt.time.second) % timestep == 0: + lon_tmp.append(pt.lon) + lat_tmp.append(pt.lat) + alt_tmp.append(pt.ele) + timestamp_tmp.append(pt.time) + timesort = [time.mktime(tstamp.timetuple()) for tstamp in timestamp_tmp] + ix = list(index for index, item in sorted(enumerate(timesort), key=lambda item: item[1])) + lon = [float(lon_tmp[i]) for i in ix] + lat = [float(lat_tmp[i]) for i in ix] + alt = [float(alt_tmp[i]) for i in ix] + return (lon, lat, alt) + + +def is_point_on_land(coords, shape_file=None): + """Checks if a coords are over land or over water. This is done useing + a shape file of world boundaries and looping over all Polygons to see + if the point is in any of it. + + """ + + import fiona + from shapely.geometry import Point, shape + + if not shape_file: + shape_file='/home/data/mapdata/other/tm_world/TM_WORLD_BORDERS-0.3.shp' + + lon, lat=coords + pt=Point(lon, lat) + try: + fc=fiona.open(shape_file) + except: + pass + result=False + for feature in fc: + if shape(feature['geometry']).contains(pt): + result=True + fc.close() + return result + \ No newline at end of file diff --git a/faampy/mapping/world_map.py b/faampy/mapping/world_map.py new file mode 100644 index 0000000..2b03083 --- /dev/null +++ b/faampy/mapping/world_map.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- + +import datetime +import itertools +import Image +import matplotlib as mpl +mpl.use('Agg') +import matplotlib.pyplot as plt +from mpl_toolkits.basemap import Basemap +import numpy as np +import os +import sys +import tempfile + +import faampy +from faampy.core.faam_spatial import FAAM_Spatial_DB +import json + + +##### SETTINGS ####################################### + +PAPER_SIZE='A4' +DPI=100 + +FLIGHT_TRACK_DATA_PATH='/home/axel/faam_kml/flight_track_data' + +OUTPATH='/home/axel/Dropbox/poster_map/' + +DAFT_FLIGHT_TRACKS=['b364', 'b358', 'b403', 'b657', 'b810', 'b811', 'b813', 'b815', 'b817', 'b818', 'b819', 'b820'] + +#BACKGROUND_IMAGE='/home/axel/Dropbox/poster_map/world_4326_10476x5238.png' + +BACKGROUND_IMAGE=True +BACKGROUND_IMAGE_PATH='/home/axel/Dropbox/poster_map/' +BACKGROUND_IMAGE_PATH = os.path.join(faampy.__path__[0], '..', 'files', 'img') +os.path.exists(BACKGROUND_IMAGE_PATH) +BACKGROUND_IMAGE_OVERWRITE=False +BACKGROUND_IMAGE_FILENAME='' +BACKGROUND_WARP=False +BACKGROUND_SCALE=1.0 + +PLOT_FLIGHT_TRACKS=True +PLOT_AIRPORTS=True + +RELOAD_FLIGHT_TRACKS=False + +# BoundaryBox +LLCRNRLON=-180.0 +LLCRNRLAT=-90 +URCRNRLON=180.0 +URCRNRLAT=90.0 + +###################################################### + + +plt.close('all') + +din_dict = {'a0': [(46.8, 33.1), int(12.0*1.41**4)], + 'a1': [(33.1, 23.4), int(12.0*1.41**3)], + 'a2': [(23.4, 16.5), int(12.0*1.41**2)], + 'a3': [(16.5, 11.7), int(12.0*1.41**1)], + 'a4': [(11.7, 8.27), int(12.0*1.41**0)]} + + +#mpl.rc({'text.usetex': True}) +#mpl.rc({'figure.figsize': din_dict[str.lower(PAPER_SIZE)][0]}) +#mpl.rc({'font.size': din_dict[str.lower(PAPER_SIZE)][1]}) + + +#http://gis.stackexchange.com/questions/99672/mapnik-rendering-with-osm-carto-style# + + +def create_background_img(imgx, imgy, outfile): + try: + import mapnik + except: + return + robin=mapnik.Projection('+proj=robin +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs ') + # Change this to the bounding box you want + longlat = mapnik.Projection('+init=epsg:4326') + bounds = (LLCRNRLON, LLCRNRLAT, URCRNRLON, URCRNRLAT) + + map_uri=outfile + mapfile='osm.xml' + m = mapnik.Map(imgx,imgy) + mapnik.load_map(m,mapfile) + + # ensure the target map projection is mercator + m.srs = robin.params() + + bbox = mapnik.Envelope(*bounds) + + # Our bounds above are in long/lat, but our map + # is in spherical mercator, so we need to transform + # the bounding box to mercator to properly position + # the Map when we call `zoom_to_box()` + transform = mapnik.ProjTransform(longlat,robin) + robin_bbox = transform.forward(bbox) + #print(robin_bbox) + # Mapnik internally will fix the aspect ratio of the bounding box + # to match the aspect ratio of the target image width and height + # This behavior is controlled by setting the `m.aspect_fix_mode` + # and defaults to GROW_BBOX, but you can also change it to alter + # the target image size by setting aspect_fix_mode to GROW_CANVAS + #m.aspect_fix_mode = mapnik.GROW_CANVAS + # Note: aspect_fix_mode is only available in Mapnik >= 0.6.0 + m.zoom_to_box(robin_bbox) + + # render the map to an image + im = mapnik.Image(imgx,imgy) + mapnik.render_to_file(m, map_uri, 'png') + + sys.stdout.write('output image to %s!\n' % map_uri) + + # Note: instead of creating an image, rendering to it, and then + # saving, we can also do this in one step like: + # mapnik.render_to_file(m, map_uri,'png') + + # And in Mapnik >= 0.7.0 you can also use `render_to_file()` to output + # to Cairo supported formats if you have Mapnik built with Cairo support + # For example, to render to pdf or svg do: + # mapnik.render_to_file(m, "image.pdf") + #mapnik.render_to_file(m, "image.svg") + + +def get_ax_size(DPI): + ax=plt.gca() + fig=plt.gcf() + bbox = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted()) + width, height = bbox.width, bbox.height + width*=float(DPI) + height*=float(DPI) + return int(width), int(height) + + +def plot_airports(m): + airports_file = os.path.join(faampy.FAAMPY_DATA_PATH, 'other', 'airports.txt') + data=np.genfromtxt(airports_file, delimiter=',', dtype=None, names='id,lats,lons') + x, y=m(data['lons'], data['lats']) + # the zorder setting makes sure that the airport locations are plotted on top of the flight tracks + m.scatter(x, y, c='orange', zorder=3, alpha=0.5) + + +def plot_flight_tracks(m, x, y): + m.plot(x, y, linewidth=1.0, color='0.4', alpha=0.7) + + +def get_flight_tracks(m): + db = FAAM_Spatial_DB(os.path.join(faampy.FAAMPY_DATA_PATH, 'db', 'faam_spatial_db.sqlite')) + sql = """SELECT fid from flight_tracks;""" + cur = db.conn.cursor() + cur.execute(sql) + fids = [fid[0] for fid in cur.fetchall()] + fids.sort() + # tidy up flight tracks + for daft in DAFT_FLIGHT_TRACKS: + if daft in fids: + fids.remove(daft) + + sql = """SELECT AsGeojson(Simplify(FT.the_geom, 0.01)) from flight_tracks FT where FT.fid in (%s);""" % (','.join(["'%s'" % fid for fid in fids])) + cur = db.conn.cursor() + cur.execute(sql) + result = cur.fetchall() + + x, y, z = [], [], [] + for r in result: + if not r: + continue + lon, lat, alt = zip(*(json.loads(r[0]))['coordinates']) + _x, _y = m(lon, lat) + x.append(_x) + x.append([None,]) + y.append(_y) + y.append([None,]) + z.append(alt) + z.append([None,]) + + #now flatten the coordinates + x = list(itertools.chain.from_iterable(x)) + y = list(itertools.chain.from_iterable(y)) + z = list(itertools.chain.from_iterable(z)) + return (x,y,z) + + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + #sys.argv.insert(0, 'faampy world_map') + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('--din', action="store", type=str, help='Paper size', default='a4') + parser.add_argument('-o', '--outpath', action="store", type=str, required=False, + default=os.environ['HOME'], help='Directory where the images will be stored. Default: $HOME.') + parser.add_argument('--dpi', action="store", type=int, required=False, default=300, + help='resolution dot per inch') + parser.add_argument('--map_background', action="store_true", required=False, default=True, + help='whether background map is added or not') + return parser + + +def get_image_filename(width, height): + # find the most suitable image + img_filenames = [] + _tmp = os.listdir(BACKGROUND_IMAGE_PATH) + for t in _tmp: + if t.startswith('world_osm_'): + img_filenames.append(t) + img_widths = [float(i.split('_')[-1][:-4].split('x')[0]) for i in img_filenames] + img_heights = [float(i.split('_')[-1][:-4].split('x')[1]) for i in img_filenames] + ratio = [abs(((w*h)/(width*height))-1.) for w, h in zip(img_widths, img_heights)] + return img_filenames[ratio.index(min(ratio))] + + +def main(): + parser = _argparser() + args = parser.parse_args() + #print(args) + + mpl.rcParams['text.usetex'] = True + #mpl.rc({'figure.figsize': din_dict[str.lower(args.din)][0]}) + mpl.rcParams['figure.figsize'] = din_dict[str.lower(args.din)][0] + #mpl.rc({'font.size': din_dict[str.lower(args.din)][1]}) + mpl.rcParams['font.size'] = din_dict[str.lower(args.din)][1] + m = Basemap(projection='robin', lon_0=0, resolution='c') + + m.drawcoastlines(linewidth=0, zorder=0) + plt.tight_layout() + + #Dodgy way to get the correct size for the mapnik osm image + plt.savefig(os.path.join(tempfile.mktemp(suffix='.svg')), dpi=args.dpi) + width, height=get_ax_size(args.dpi) + #Determine size for the OSM background image in Robinson projection + sys.stdout.write('Size: %i x %i\n' % (width, height)) + + #Fix empty areas for the OSM background image + ocean_color=tuple([rgb/255. for rgb in (180,209,206)]) + ice_color=tuple([rgb/255. for rgb in (241,240,230)]) + #Arctic: Fill everything north of 85 degrees with ocean color + m.drawmapboundary(fill_color=ocean_color) + + #Antarctica: Fill everything south of -85 with ice_color using a Polygon + from matplotlib.patches import Polygon + coord=[m(x, y) for x,y in zip(range(-180, 181), [-85.,]*361)] + [m(x, y) for x,y in zip(range(180, -181,-1), [-90.,]*361)] + p=Polygon(coord, facecolor=ice_color, edgecolor='none') + plt.gca().add_patch(p) + + + if args.map_background: + # TODO + #BACKGROUND_IMAGE_FILENAME=os.path.join(BACKGROUND_IMAGE_PATH, 'world_osm_54030_%ix%i.png' % (width, height)) + BACKGROUND_IMAGE_FILENAME = os.path.join(BACKGROUND_IMAGE_PATH, get_image_filename(width, height)) + print(BACKGROUND_IMAGE_FILENAME) + if (not os.path.exists(BACKGROUND_IMAGE_FILENAME) or BACKGROUND_IMAGE_OVERWRITE): + create_background_img(width, height, BACKGROUND_IMAGE_FILENAME) + img=Image.open(BACKGROUND_IMAGE_FILENAME) + _filter = Image.ANTIALIAS + rimg = img.resize((width, height), _filter) + m.imshow(rimg, origin='upper') + #m.drawcoastlines() #uncomment this for testing purposes, to see if image fits + else: + m.drawcoastlines() + m.fillcontinents(color='coral',lake_color='aqua') + m.drawmapboundary(fill_color='aqua') + img=Image.open(BACKGROUND_IMAGE) + m.imshow(img, origin='upper') + + m.drawparallels(np.arange(-90,90,30),labels=[0,0,0,0],fontsize=12) + m.drawmeridians(np.arange(0,360,30),labels=[1,0,0,0],fontsize=12) + + x, y, z = get_flight_tracks(m) + plot_flight_tracks(m, x, y) + + if PLOT_AIRPORTS: + plot_airports(m) + + plt.tight_layout() + + plt.figtext(0.94, 0.05, r'Status %s' % datetime.datetime.utcnow().strftime('%d-%m-%Y'), size='small', ha='right') + #plt.figtext(0.02, 0.9, r"\textbf{\textit{FAAM}}" "\n" r"Facility for Airborne Atmospheric Measurements", size='x-large', color='blue') + + for fmt in ['png', 'pdf']: + plt.savefig(os.path.join(args.outpath, 'poster_flight_map_%s_%sdpi.%s' % (args.din.lower(), str(args.dpi).lower(), fmt)), dpi=int(args.dpi), transparent=True) + + +if __name__ == '__main__': + main() diff --git a/faampy/plotting/__init__.py b/faampy/plotting/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/faampy/plotting/map.py b/faampy/plotting/map.py new file mode 100644 index 0000000..09c76dc --- /dev/null +++ b/faampy/plotting/map.py @@ -0,0 +1,68 @@ +''' +Created on 7 Jul 2013 + +@author: axel +''' + +import matplotlib.pyplot as plt +from mpl_toolkits.basemap import Basemap +import scipy +import numpy as np + +#import faampy.mapping.FlightTrack + + +class Map(object): + + def __init__(self): + self.fig = plt.figure() + + def setup(self, ds, limit): + self.limit = limit + #self.ft = faampy.mapping.FlightTrack.FlightTrack() + #self.ft.set_rawlatlonalt_from_netcdf(ds) + + step = 15 + self.ft = {} + if len(ds.variables['LON_GIN'][:].shape) == 2: + self.ft['lon'] = ds.variables['LON_GIN'][:,0].ravel()[::step] + self.ft['lat'] = ds.variables['LAT_GIN'][:,0].ravel()[::step] + else: + self.ft['lon'] = ds.variables['LON_GIN'][:].ravel()[::step] + self.ft['lat'] = ds.variables['LAT_GIN'][:].ravel()[::step] + #self.ft.secs = [(x - self.ft.BaseTime).seconds for x in self.ft.time] + self.ft['secs'] = ds.variables['Time'][:][::step] + + + self.urcrnrlon = np.ceil(np.max(self.ft['lon'])) + self.llcrnrlon = np.floor(np.min(self.ft['lon'])) + self.urcrnrlat = np.ceil(np.max(self.ft['lat'])) + self.llcrnrlat = np.floor(np.min(self.ft['lat'])) + self.lat0 = (self.urcrnrlat + self.llcrnrlat) / 2.0 + self.lon0 = (self.urcrnrlon + self.llcrnrlon) / 2.0 + + def plot(self): + self.m = Basemap(llcrnrlon=self.llcrnrlon, + llcrnrlat=self.llcrnrlat, + urcrnrlon=self.urcrnrlon, + urcrnrlat=self.urcrnrlat, + resolution='h', + projection='merc', + lat_0=self.lat0, + lon_0=self.lon0) + self.m.drawcoastlines() + # draw parallels and meridians. + self.m.drawparallels(scipy.arange(-90.,91.,1.), labels=[1,1,0,0]) + self.m.drawmeridians(scipy.arange(-180.,181.,1.), labels=[0,0,0,1]) + self.m.drawcountries() + self.m.drawmapboundary() + # TODO: change the colors + self.m.fillcontinents(color='coral',lake_color='aqua') + x, y = self.m(self.ft['lon'], self.ft['lat']) + self.m.plot(x, y, color='0.55', lw=1.4) + ix = list(np.where((np.array(self.ft['secs']) > self.limit[0]) & (np.array(self.ft['secs']) < self.limit[1]))[0]) + self.m.plot(x[min(ix):max(ix)], y[min(ix):max(ix)], 'r', lw=3.5) + self.m.plot(x[min(ix)], y[min(ix)], 'ko') + + def get_figure(self): + return self.fig diff --git a/faampy/plotting/profile.py b/faampy/plotting/profile.py new file mode 100644 index 0000000..a155646 --- /dev/null +++ b/faampy/plotting/profile.py @@ -0,0 +1,160 @@ +''' +Created on 15 May 2013 + +@author: axel +''' + +import matplotlib as mpl +import matplotlib.pyplot as plt +import numpy as np +import numpy.ma as ma +import sys + +from matplotlib.ticker import MaxNLocator, ScalarFormatter + +import faampy.core.utils + +params = {'legend.fontsize': 10,} +plt.rcParams.update(params) + + +class Profile(object): + + def __init__(self, *args): + """ + + """ + self.NO_DATA = None + self.fig = None + self.x_data = [] + self.y_data = [] + + + def __get_mask__(self, parname): + if not parname+'_FLAG' in self.ds.variables.keys(): + flag_data = np.zeros(self.ds.variables[parname].shape).astype(bool) + else: + flag_data = self.ds.variables[parname+'_FLAG'][:] + mask = np.ones(flag_data.shape).astype(bool) + for f in self.flag: + mask[flag_data == f] = False + mask[self.ds.variables[parname][:] == -9999] = True + return mask + + def setup(self, ds, vars, *args): + self.ds = ds + self.vars = list(vars) + + self.index = range(self.ds.variables['Time'].shape[0]) + + #check that all the pars exist in netcdf file + #is_var = lambda var: var.upper() in self.ds.variables.keys() + is_var = lambda var: var.upper() in [i.upper() for i in self.ds.variables.keys()] + pars = [] + for l in self.vars: + pars.append([item for item in l if is_var(item)]) + diff = set(l).difference(pars[-1]) + if diff: + sys.stdout.write('Skip %s because it is not in dataset.\n' % (','.join(diff))) + if not [item for sublist in pars for item in sublist]: + self.NO_DATA = True + return 0 + else: + self.NO_DATA = False + self.vars = pars + + + try: + self.index = faampy.core.utils.data_filter(self.ds, args[0]) + except: + pass + self.index=sorted(self.index) + try: + self.flag = args[1] + except: + self.flag = [0,1,2,3] + + #for i in range(len(self.vars)): + # self.subplt.append(self.fig.add_subplot(1, len(self.vars), 1+i)) + self.fig, self.axs=plt.subplots(ncols=len(self.vars), sharey=True) + if len(self.vars) == 1: + self.axs=np.array([self.axs,]) + + for i in range(len(self.vars)): + self.x_data.append([]) + self.y_data.append([]) + for j in range(len(self.vars[i])): + if len(ds.variables[self.vars[i][j]][:].shape) == 1: + self.x_data[i].append(ds.variables['ALT_GIN'][self.index].ravel()) + mask = self.__get_mask__(self.vars[i][j]) + y = ma.masked_array(ds.variables[self.vars[i][j]][self.index].ravel(), mask=mask[self.index].ravel()) + else: + (rows, cols) = ds.variables[self.vars[i][j]][:].shape + #print(self.vars[i][j], cols) + if cols > 32: + self.x_data[i].append(ds.variables['ALT_GIN'][self.index,:].ravel()) + mask = self.__get_mask__(self.vars[i][j]) + #y = ds.variables[self.vars[i][j]][:,0:64:2] + y = ma.masked_array(ds.variables[self.vars[i][j]][self.index,0:64:2].ravel(), mask=mask[self.index,0:64:2].ravel()) + else: + #self.x_data[i].append(ds.variables['ALT_GIN'][self.index,0:32:32/cols].ravel()) + self.x_data[i].append(ds.variables['ALT_GIN'][self.index,0:32:32/cols].ravel()) + mask = self.__get_mask__(self.vars[i][j]) + y = ma.masked_array(ds.variables[self.vars[i][j]][self.index,:].ravel(), mask=mask[self.index,:].ravel()) + #mask = self.__get_mask__(ds, vars[i][j]) + #y = ma.masked_array(ds.variables[vars[i][j]], mask=mask) + + #y = np.ravel(y[self.index,:]) + #y[y == -9999] = np.nan + #TODO + #y = self.__set_mask__(y, vars[i][j]) + #self.y_data[i].append(np.ravel(ds.variables[vars[i][j]][self.index,:])) + self.y_data[i].append(y) + + def plot(self): + for i in range(len(self.vars)): + for j in range(len(self.vars[i])): + #ax = self.subplt[i] + ax=self.axs[i] + #TODO: weird behaviour; there shouldn't be a need to sort the data + ix = np.argsort(self.x_data[i][j]) + x = self.x_data[i][j][ix] + y = self.y_data[i][j][ix] + ax.plot(y, x, '-', label=self.vars[i][j].lower()) + try: + xunits_str = ', '.join([(self.ds.variables[p].units).strip() for p in self.vars[i]]) + ax.set_xlabel(xunits_str) + except: + pass + + def plot_formatter(self): + #for ax in self.subplt: + ax=self.axs[0] + y_range=ax.get_ylim()[1]-ax.get_ylim()[0] + _ylim=(ax.get_ylim()[0]-(y_range/100.)*3, + ax.get_ylim()[1]+(y_range/100.)*3) + ax.set_ylim(_ylim) + for ax in self.axs: + ax.grid(b='on') + plt.setp(ax.get_yticklabels(), visible=False) + #http://stackoverflow.com/questions/4700614/how-to-put-the-legend-out-of-the-plot + #box = ax.get_position() + #ax.set_position([box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8]) + #ax.set_position([box.x0, box.y0, box.width, box.height * 0.8]) + #ax.legend(loc='upper center', bbox_to_anchor=(0.5, -0.12)) + leg=ax.legend() + leg.get_frame().set_alpha(0.5) + if ax.get_xticklabels().__len__() > 5: + ax.xaxis.set_major_locator(MaxNLocator(5)) + ax.yaxis.set_major_formatter(ScalarFormatter(useOffset=False)) + #set y-axis lower limit to zero + cur_ylim = ax.get_ylim() + if cur_ylim[0] < 0: + ax.set_ylim((0, cur_ylim[1])) + + plt.setp((self.axs[0].get_yticklabels()), visible=True) + (self.axs[0]).set_ylabel('alt (m)') + + def get_figure(self): + return(self.fig) + diff --git a/faampy/plotting/quicklooks.py b/faampy/plotting/quicklooks.py new file mode 100644 index 0000000..44f2842 --- /dev/null +++ b/faampy/plotting/quicklooks.py @@ -0,0 +1,247 @@ +""" +Module to create quicklooks from a netCDF using the flight summary +and a configuration file, that defines the plot layout (quicklook.cfg) + + +The config file should look similar to this:: + +[['BTHEIM_U']] +[['TSC_BLUU', 'TSC_GRNU', 'TSC_REDU'], ['BSC_BLUU', 'BSC_GRNU', 'BSC_REDU']] +[['PSAP_LIN'], ['PSAP_LOG'], ['PSAP_FLO', 'PSAP_TRA']] +[['NEPH_PR'], ['NEPH_T']] +[['NV_TWC_C', 'NV_LWC_C']] +[['WVSS2R_VMR', 'WVSS2F_VMR']] +[['PTCH_GIN',], ['TAS', 'TAS_RVSM']] + +""" + + +import ast +import sys +import netCDF4 +import os +import re +import numpy as np + +import matplotlib as mpl +if __name__ == '__main__': mpl.use('Agg') +import matplotlib.pyplot as plt + +from faampy.core.utils import get_fid, conv_time_to_secs, \ + get_index_from_hhmmss, \ + conv_secs_to_time, get_flight_duration +from faampy.core.flight_summary import FlightSummary, Event +from timeseries import Timeseries +from profile import Profile +from scatter import Scatter +from skewt import SkewT +from map import Map + +try: + import seaborn +except ImportError: + sys.stdout.write('seaborn not available ...\n') + + + + + +def process(fs, ds, outpath, flag=None, no_overwrite=False, config_file=None): + + if not flag: + flag = [0] + if not config_file: + _CATS = [[['BTHEIM_U']], + [['TSC_BLUU', 'TSC_GRNU', 'TSC_REDU'], ['BSC_BLUU', 'BSC_GRNU', 'BSC_REDU']], + [['PSAP_LIN'], ['PSAP_LOG'], ['PSAP_FLO', 'PSAP_TRA']], + [['NEPH_PR'], ['NEPH_T']], + [['NV_TWC_C', 'NV_LWC_C']], + [['WVSS2R_VMR', 'WVSS2F_VMR']], + [['PTCH_GIN',], ['TAS', 'TAS_RVSM']]] + + #config_file=os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'quicklooks.cfg') + else: + f=open(config_file, 'r') + lines=f.readlines() + f.close() + _CATS=[ast.literal_eval('({0})'.format(line)) for line in lines] + _CATS=filter(bool, _CATS) # remove empty lines + + plot_type='timeseries' + Start_time=fs.Entries[[x.Name.lower() for x in fs.Entries].index('t/o')].Start_time + fs.Entries.reverse() + try: + Stop_time=fs.Entries[[x.Name.lower() for x in fs.Entries].index('land')].Start_time + except: + Stop_time='' + fs.Entries.reverse() + if not Stop_time: + duration = get_flight_duration(ds) + Stop_time = conv_secs_to_time(conv_time_to_secs(Start_time)+duration, no_colons=True) + Name = 'full-flight' + e = Event() + e.Start_time = Start_time + e.Stop_time = Stop_time + e.Name = Name + fs.Entries.insert(0, e) + + cnt = -1 + + for e in fs.Entries: + cnt += 1 + if re.findall('full-flight', e.Name.lower()): + plot_type='timeseries' + # make a full plot + elif re.findall('run|leg|box', e.Name.lower()): + plot_type='timeseries' + elif re.findall('prof', e.Name.lower()): + plot_type='profile' + else: + continue + + fid=get_fid(ds) + + sys.stdout.write('\n%s: %s to %s\n' % (e.Name, e.Start_time, e.Stop_time)) + if e.Start_time and e.Stop_time: + x1=conv_time_to_secs(e.Start_time) + e.Start_time=re.sub(':','',conv_secs_to_time(x1)) + x2=conv_time_to_secs(e.Stop_time) + e.Stop_time=re.sub(':','',conv_secs_to_time(x2)) + else: + continue + limit=[('Time', (x1, x2)),] + + try: + outfile=os.path.join(outpath, '%s_e%.2i_%s_to_%s_%s.png' % (fid, cnt, e.Start_time, e.Stop_time, 'map')) + if os.path.exists(outfile) and no_overwrite == True: + sys.exit(1) + m=Map() + m.setup(ds, [x1, x2]) + m.plot() + fig=m.get_figure() + fig.suptitle('%s-%s - %s to %s' % (fid, e.Name, e.Start_time, e.Stop_time)) + fig.savefig(outfile) + plt.close(fig) + sys.stdout.write('Created ... %s\n' % outfile) + except: + pass + + + for cat in _CATS: + plt.close('all') + if 'WVSS2' in cat[0][0]: + flag=[0,1] + else: + flag=[0,] + par_str='-'.join([item.lower() for sublist in cat for item in sublist]) + outfile = os.path.join(outpath, '%s_e%.2i_%s_to_%s_%s_%s.png' % (fid, cnt, e.Start_time, e.Stop_time, plot_type, par_str)) + if os.path.exists(outfile) and no_overwrite == True: + pass + else: + try: + if plot_type == 'timeseries': + p=Timeseries() + else: + p=Profile() + #plot for the time limits and only data values that are flagged 0 + p.setup(ds, cat, limit, flag) + if not p.NO_DATA: + p.plot() + p.plot_formatter() + #flattened parameter string + #par_str='-'.join([item.lower() for sublist in cat for item in sublist]) + outfile=os.path.join(outpath, '%s_e%.2i_%s_to_%s_%s_%s.png' % (fid, cnt, e.Start_time, e.Stop_time, plot_type, par_str)) + fig=p.get_figure() + fig.suptitle('%s-%s - %s to %s' % (get_fid(ds), e.Name, e.Start_time, e.Stop_time)) + plt.savefig(outfile) + sys.stdout.write('Created ... %s\n' % outfile) + plt.close(fig) + except: + outfile=os.path.join(outpath, '%s_e%.2i_%s_to_%s_%s_%s.png' % (fid, cnt, e.Start_time, e.Stop_time, plot_type, par_str)) + sys.stdout.write('Could not create ... %s\n' % outfile) + + if plot_type == 'profile': + outfile=os.path.join(outpath, '%s_e%.2i_%s_to_%s_%s.png' % (fid, cnt, e.Start_time, e.Stop_time, 'skewt')) + if os.path.exists(outfile) and no_overwrite == True: + pass + else: + try: + #sort index + s_ix=get_index_from_hhmmss(ds, e.Start_time) + e_ix=get_index_from_hhmmss(ds, e.Stop_time) + + if len(ds.variables['PS_RVSM'].shape) == 2: + ix=np.argsort(np.mean(ds.variables['PS_RVSM'][:], axis=1).ravel()) + pres=np.mean(ds.variables['PS_RVSM'][:], axis=1).ravel()[s_ix:e_ix] + dewp=np.mean(ds.variables['TDEW_GE'][:], axis=1).ravel()[s_ix:e_ix]-273.15 + + if 'U_C' in ds.variables.keys(): + u=np.mean(ds.variables['U_C'][:], axis=1).ravel()[s_ix:e_ix] + v=np.mean(ds.variables['V_C'][:], axis=1).ravel()[s_ix:e_ix] + elif 'U_NOTURB' in ds.variables.keys(): + u=np.mean(ds.variables['U_NOTURB'][:], axis=1).ravel()[s_ix:e_ix] + v=np.mean(ds.variables['V_NOTURB'][:], axis=1).ravel()[s_ix:e_ix] + else: + u=pres[:]*0.0 + v=pres[:]*0.0 + tat_di_r=np.mean(ds.variables['TAT_DI_R'][:], axis=1).ravel()[s_ix:e_ix] + else: + ix=np.argsort(ds.variables['PS_RVSM'][:]) + pres=ds.variables['PS_RVSM'][:][s_ix:e_ix] + dewp=ds.variables['TDEW_GE'][:][s_ix:e_ix]-273.15 + + if 'U_C' in ds.variables.keys(): + u=ds.variables['U_C'][:][s_ix:e_ix] + v=ds.variables['V_C'][:][s_ix:e_ix] + elif 'U_NOTURB' in ds.variables.keys(): + u=ds.variables['U_NOTURB'][:][s_ix:e_ix] + v=ds.variables['V_NOTURB'][:][s_ix:e_ix] + else: + u=pres[:]*0.0 + v=pres[:]*0.0 + tat_di_r=ds.variables['TAT_DI_R'][:][s_ix:e_ix] + + theta=(tat_di_r*(1000./pres)**(0.286)).ravel() + s=SkewT() + s.set_data(pres, theta, dewp, u, v) + if np.max(pres) > 900 and (np.max(pres)-np.min(pres) > 500): + s.zoom=False + else: + s.zoom=True + s.plot() + fig=s.get_figure() + fig.suptitle('%s-%s - %s to %s' % (get_fid(ds), e.Name, e.Start_time, e.Stop_time)) + outfile=os.path.join(outpath, '%s_e%.2i_%s_to_%s_%s.png' % (fid, cnt, e.Start_time, e.Stop_time, 'skewt')) + sys.stdout.write('Created ... %s\n' % outfile) + fig.savefig(outfile) + plt.close(fig) + except: + outfile=os.path.join(outpath, '%s_e%.2i_%s_to_%s_%s.png' % (fid, cnt, e.Start_time, e.Stop_time, 'skewt')) + sys.stdout.write('Could not create ... %s\n' % outfile) + + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + sys.argv.insert(0, 'faampy quicklooks') + parser = argparse.ArgumentParser(prog = 'faampy quicklooks', description=__doc__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('ncfile', action="store", type=str, help='FAAM core netCDF') + parser.add_argument('fltsumm', action="store", type=str, help='FAAM Flight Summary file') + parser.add_argument('outpath', action="store", type=str, help='outpath where all the quicklook figures will be saved') + parser.add_argument('--config_file', action="store", type=str, help='config file that defines the plots that are produced') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + fs = FlightSummary(args.fltsumm) + ds = netCDF4.Dataset(args.ncfile, 'r') + process(fs, ds, args.outpath, config_file=args.config_file) + sys.stdout.write('Done ...\n') + ds.close() + + +if __name__ == '__main__': + main() diff --git a/faampy/plotting/skewt.py b/faampy/plotting/skewt.py new file mode 100644 index 0000000..1ed2895 --- /dev/null +++ b/faampy/plotting/skewt.py @@ -0,0 +1,296 @@ +# -*- coding:utf-8 -*- +""" +Plots skewT-lnP-diagram from WRF-output file. +@author Geir Arne Waagbø +@see http://code.google.com/p/pywrfplot/ + + Formulas taken from Rogers&Yau: A short course in cloud physics (Third edition) + Some inspiration from: + http://www.atmos.washington.edu/~lmadaus/pyscript/plot_wrf_skewt.txt +""" + +import math +import netCDF4 +import numpy as np +import matplotlib.pyplot as plt + +#from pywrfplotParams import T_zero,T_base,kappa,barb_increments,P_bot +#from pywrfplotUtils import gamma_s,td,e,openWRF,getDimensions + +# P_top must be the same as what is used in the WRF simulation +P_top = 10**4 +P_bot = 10**5 + +T_base = 300.0 +T_zero = 273.15 +L = 2.501e6 # latent heat of vaporization +R = 287.04 # gas constant air +Rv = 461.5 # gas constant vapor +eps = R/Rv +cp = 1005. +cv = 718. +kappa = (cp-cv)/cp +g = 9.81 + +barb_increments = {'half': 2.5,'full':5.0,'flag':25.0} + + +skewness = 37.5 +# Defines the ranges of the plot, do not confuse with P_bot and P_top +P_b = 105000. +P_t = 25000. +dp = 100. +plevs = np.arange(P_b,P_t-1,-dp) + + +# See formula 3.16 in Rogers&Yau +a = 2./7. +b = eps*L*L/(R*cp) +c = a*L/R + + +def calc_flight_level(pres): + result=(10**((np.log10(np.array(pres)/1013.25))/5.2558797) -1) / (-6.8755856 * 10**-6*100) + return result + +def gamma_s(T,p): + """Calculates moist adiabatic lapse rate for T (Celsius) and p (Pa) + Note: We calculate dT/dp, not dT/dz + See formula 3.16 in Rogers&Yau for dT/dz, but this must be combined with + the dry adiabatic lapse rate (gamma = g/cp) and the + inverse of the hydrostatic equation (dz/dp = -RT/pg) + + """ + esat = es(T) + wsat = eps*esat/(p-esat) # Rogers&Yau 2.18 + numer = a*(T+T_zero) + c*wsat + denom = p * (1 + b*wsat/((T+T_zero)**2)) + return numer/denom # Rogers&Yau 3.16 + +def es(T): + """Returns saturation vapor pressure (Pascal) at temperature T (Celsius) + Formula 2.17 in Rogers&Yau + + """ + return 611.2*np.exp(17.67*T/(T+243.5)) + +def e(w,p): + """Returns vapor pressure (Pa) at mixing ratio w (kg/kg) and pressure p (Pa) + Formula 2.18 in Rogers&Yau + + taken from: pywrfplotUtils.py + https://code.google.com/p/pywrfplot/source/checkout + """ + return w*p/(w+eps) + +def td(e): + """Returns dew point temperature (C) at vapor pressure e (Pa) + Insert Td in 2.17 in Rogers&Yau and solve for Td + + taken from: pywrfplotUtils.py + https://code.google.com/p/pywrfplot/source/checkout + """ + return 243.5 * np.log(e/611.2)/(17.67-np.log(e/611.2)) + + + +class SkewT(object): + """ + This is the method to use from the outside + + """ + + def __init__(self): + self.zoom = False + + def set_data(self, pres, theta, dewp, u, v): + self.P = pres * 100 + self.theta = theta + self.dewp = dewp + self.u = u + self.v = v + + + def plot(self): + plt.clf() + fig = plt.figure() + + _isotherms() + # + _dry_adiabats() + _moist_adiabats() + + x1 = _temperature(self.theta, self.P) + x2 = _dewpoint(self.dewp, self.P) + + #plt.axis([xlim0, xlim1, ylim1, ylim0]) + #plt.xlabel('Temperature ($^{\circ}\! C$) at 1000hPa') + xticks = np.arange(-40,51,5) + plt.xticks(xticks,['' if tick%10!=0 else str(tick) for tick in xticks]) + plt.ylabel('Pressure (mb)') + yticks = np.arange(P_bot,P_t-1,-10**4) + + #format labels + #plt.yticks(yticks, ['%4.0i' % (yt/100) for yt in yticks]) + + #sfcT = nc.variables['T2'][time,y,x]-T_zero + #sfcT = 7.6307006835937727 + #sfcP = nc.variables['PSFC'][time,y,x] + #sfcP = 100000. + #sfcW = nc.variables['Q2'][time,y,x] + #sfcW = 0.0063032461 + #sfcTd = td(e(sfcW,sfcP)) + #plt.suptitle('Bakketemp: %4.1f$^{\circ}\! C$ Duggpunkt: %3.1f$^{\circ}\! C$ Trykk: %5.1f hPa' % (sfcT,sfcTd,0.01*sfcP), \ + # fontsize=10, x = 0.5, y = 0.03) + + #plt.show() + #plt.close() + + #zoom into area that is covered + if self.zoom: + xlim0=np.min(x2) // 5.0 * 5.0 + xlim1=((np.max(x1) // 5.0) + 2) * 5.0 + ylim0=np.min(self.P) // 50 * 50 + ylim1=(np.max(self.P) // 50 + 1) * 50 + + if np.abs(ylim1-ylim0) < 6000: + yticks=np.arange(P_bot,P_t-1,(-10**4)/20.) + _isobars(step_size=(-10**4)/20.) + #format labels + plt.yticks(yticks, ['%4.0i' % (yt/100) for yt in yticks]) + delta=500 + elif np.abs(ylim1-ylim0) < 9000: + yticks=np.arange(P_bot,P_t-1,(-10**4)/10.) + _isobars(step_size=(-10**4)/10.) + #format labels + plt.yticks(yticks, ['%4.0i' % (yt/100) for yt in yticks]) + delta=1000 + elif np.abs(ylim1-ylim0) < 20000: + yticks=np.arange(P_bot,P_t-1,(-10**4)/5.) + _isobars(step_size=(-10**4)/5.) + #format labels + plt.yticks(yticks, ['%4.0i' % (yt/100) for yt in yticks]) + delta=2000 + else: + yticks=np.arange(P_bot,P_t-1,(-10**4)/2.) + _isobars(step_size=(-10**4)/2.) + #format labels + plt.yticks(yticks, ['%4.0i' % (yt/100) for yt in yticks]) + delta=5000 + ax = plt.gca() + ax.set_xlim((xlim0, xlim1)) + ax.set_ylim((ylim1, ylim0)) + else: + _isobars(step_size=(-10**4)/4.) + plt.axis([-40,50,P_b,P_t]) + yticks = np.arange(P_bot,P_t-1,-10**4) + #format labels + plt.yticks(yticks, ['%4.0i' % (yt/100) for yt in yticks]) + delta=2500 + + _windbarbs(self.u, self.v, self.P, delta) + + # add 2nd axis showing the flight level + ylim_left=plt.gca().get_ylim() + ax2=plt.twinx() + ax2.set_ylim((calc_flight_level(ylim_left[0]/100.), + calc_flight_level(ylim_left[1]/100.))) + ax2.yaxis.set_label_text('Flight Level') + plt.setp(ax2.yaxis.get_label(), visible=True) + plt.subplots_adjust(left=0.12, right=0.88, top=0.9, bottom=0.1) + self.fig=plt.gcf() + + def get_figure(self): + return self.fig + + +def _skewnessTerm(P): + return skewness * np.log(P_bot/P) + +def _isotherms(): + for temp in np.arange(-140,50,10): + plt.semilogy(temp + _skewnessTerm(plevs), plevs, basey=math.e, \ + color = ('blue' if temp <= 0 else 'red'), \ + linestyle=('solid' if temp == 0 else 'dashed'), linewidth = .5) + +def _isobars(step_size=None): + if not step_size: + step_size = -10**4 + press_ticks = np.arange(P_bot,P_t-1,step_size) + for n in press_ticks: + plt.plot([-40,50], [n,n], color = 'black', linewidth = .5) + + +def _dry_adiabats(): + for tk in T_zero+np.arange(-30,210,10): + dry_adiabat = tk * (plevs/P_bot)**kappa - T_zero + _skewnessTerm(plevs) + plt.semilogy(dry_adiabat, plevs, basey=math.e, color = 'brown', \ + linestyle='dashed', linewidth = .5) + +def _moist_adiabats(): + ps = [p for p in plevs if p<=P_bot] + for temp in np.concatenate((np.arange(-40.,10.1,5.),np.arange(12.5,45.1,2.5))): + moist_adiabat = [] + for p in ps: + temp -= dp*gamma_s(temp,p) + moist_adiabat.append(temp + _skewnessTerm(p)) + plt.semilogy(moist_adiabat, ps, basey=math.e, color = 'green', \ + linestyle = 'dotted', linewidth = .5) + + + + +def _windbarbs_org(nc,time,y,x,P): + uwind = 0.5*(nc.variables['U'][time,:,y,x]+nc.variables['U'][time,:,y,x+1]) + vwind = 0.5*(nc.variables['V'][time,:,y,x]+nc.variables['V'][time,:,y+1,x]) + zmax = len(uwind) + delta = 4 + baraxis = [45. for _j in range(0,zmax,delta)] + plt.barbs(baraxis,P[0:zmax:delta],uwind[0:zmax:delta],vwind[0:zmax:delta], \ + barb_increments=barb_increments, linewidth = .75, norm=norm) + +def _windbarbs(u, v, press, delta): + #delta = 2500 # equals 25mb + p_bin_min = int((np.min(press) // delta) * delta) + p_bin_max = int(((np.max(press) // delta)+1) * delta) + p_bins = np.array(range(p_bin_min, p_bin_max, delta)) + + ixs = np.digitize(press, p_bins) + uwind = [np.mean(u[ixs == ix]) for ix in list(set(ixs))] + vwind = [np.mean(v[ixs == ix]) for ix in list(set(ixs))] + + ax = plt.gca() + inv = ax.transLimits.inverted() + #x_pos, _none = inv.transform((0.92, 0)) + x_pos = inv.transform(np.array([[0.92,0]]))[0, 0] + baraxis = [x_pos] * len(p_bins) + plt.barbs(baraxis, p_bins, uwind, vwind, \ + barb_increments=barb_increments, linewidth = .75)#, transform=ax.transAxes) + + +def _temperature_org(nc,time,y,x,P): + theta = nc.variables['T'][time,:,y,x] + T_base + T = theta*(P/P_bot)**kappa - T_zero # Temperatur i halvflatene (C) + plt.semilogy(T + _skewnessTerm(P), P, basey=math.e, color = 'black', \ + linestyle='solid', linewidth = 1.5) + +def _temperature(theta, P): + T = theta*(P/P_bot)**kappa - T_zero # Temperatur i halvflatene (C) + plt.semilogy(T + _skewnessTerm(P), P, basey=math.e, color = 'black', \ + linestyle='solid', linewidth = 1.5) + return T + _skewnessTerm(P) + + +def _dewpoint_org(nc,time,y,x,P): + w = nc.variables['QVAPOR'][time,:,y,x] + plt.semilogy(td(e(w,P)) + _skewnessTerm(P), P, basey=math.e, color = 'red', \ + linestyle='solid', linewidth = 1.5) + + +def _dewpoint(dp, P): + #w = nc.variables['QVAPOR'][time,:,y,x] + plt.semilogy(dp + _skewnessTerm(P), P, basey=math.e, color = 'red', \ + linestyle='solid', linewidth = 1.5) + return dp + _skewnessTerm(P) + + diff --git a/faampy/plotting/timeseries.py b/faampy/plotting/timeseries.py new file mode 100644 index 0000000..5b2f8f3 --- /dev/null +++ b/faampy/plotting/timeseries.py @@ -0,0 +1,180 @@ +''' +Created on 15 May 2013 + +@author: axel +''' + +import matplotlib as mpl +import matplotlib.pyplot as plt +import numpy as np +import numpy.ma as ma +from matplotlib.ticker import MaxNLocator, ScalarFormatter +import sys + +import faampy.core.utils + +params = {'legend.fontsize': 10,} +plt.rcParams.update(params) + +# TODO: add xlabel and ylabel +# TODO: use flagging +# TODO: deal with parameter input that do not exist in the core faam data set + + +class Timeseries(object): + """Timeseries plotting class to create matplotlib figures from the FAAM + core netcdf data files. This is especially useful for creating quick + overview plots for qa purposes. + + Several parameters can be plotted in one axes or axes are stacked + vertically. + + """ + + def __init__(self, *args, **kwargs): + """ + + """ + self.NO_DATA = None + self.fig = None + self.x_data = [] + self.y_data = [] + self.label = [] + + + def __get_mask__(self, parname): + if not parname+'_FLAG' in self.ds.variables.keys(): + flag_data = np.zeros(self.ds.variables[parname].shape).astype(bool) + else: + flag_data = self.ds.variables[parname+'_FLAG'][:] + mask = np.zeros(self.ds.variables[parname].shape).astype(bool) + if not parname+'_FLAG' in self.ds.variables.keys(): + return mask + + flag_data = self.ds.variables[parname+'_FLAG'][:] + for f in self.flag: + mask[flag_data == f] = False + mask[self.ds.variables[parname][:] == -9999] = True + return mask + + def setup(self, ds, vars, *args): + """ds: netCDF4.Dataset + vars: list of variable names that should be plotted + """ + self.ds=ds + self.vars=list(vars) + self.index=range(self.ds.variables['Time'].shape[0]) + + #check that all the pars exist in netcdf file + is_var = lambda var: var.upper() in [i.upper() for i in self.ds.variables.keys()] + pars = [] + for l in self.vars: + pars.append([item for item in l if is_var(item)]) + diff = set(l).difference(pars[-1]) + if diff: + sys.stdout.write('Skip %s because variable is not in dataset.\n' % (','.join(diff))) + if not [item for sublist in pars for item in sublist]: + self.NO_DATA = True + return 0 + else: + self.NO_DATA = False + self.vars = pars + + try: + self.index = faampy.core.utils.data_filter(self.ds, args[0]) + except: + self.index = range(0, self.ds.variables['Time'].size) + self.index=sorted(self.index) + try: + self.flag = args[1] + except: + self.flag = [0,1,2,3] + + #for i in range(len(self.vars)): + # self.subplt.append(self.fig.add_subplot(len(self.vars), 1, i+1)) + self.fig, self.axs=plt.subplots(nrows=len(self.vars), sharex=True) + if len(self.vars) == 1: + self.axs=np.array([self.axs,]) + + for i in range(len(self.vars)): + self.x_data.append([]) + self.y_data.append([]) + for j in range(len(self.vars[i])): + if len(ds.variables[self.vars[i][j]][:].shape) == 2: + (rows, cols) = ds.variables[self.vars[i][j]][:].shape + self.x_data[i].append(np.ravel(faampy.core.utils.get_mpl_time(ds, cols)[self.index,:])) + else: + self.x_data[i].append(np.ravel(faampy.core.utils.get_mpl_time(ds)[self.index])) + mask = self.__get_mask__(self.vars[i][j]) + #y = ma.masked_array(ds.variables[vars[i][j]][:], mask=mask) + #print(vars[i][j]) + #y = ds.variables[self.vars[i][j]][:] + #y = np.ravel(y[self.index,:]) + #y[y == -9999] = np.nan + #TODO + if len(ds.variables[self.vars[i][j]][:].shape) == 2: + y = ma.masked_array(ds.variables[self.vars[i][j]][self.index,:].ravel(), mask=mask[self.index,:].ravel()) + else: + y = ma.masked_array(ds.variables[self.vars[i][j]][self.index].ravel(), mask=mask[self.index].ravel()) + #y = self.__set_mask__(y, vars[i][j]) + #self.y_data[i].append(np.ravel(ds.variables[vars[i][j]][self.index,:])) + self.y_data[i].append(y) + + def plot(self): + for i in range(len(self.vars)): + for j in range(len(self.vars[i])): + #ax = self.subplt[i] + ax=self.axs[i] + #TODO: weird behaviour; there shouldn't be a need to sort the data + ix = np.argsort(self.x_data[i][j]) + x = self.x_data[i][j][ix] + y = self.y_data[i][j][ix] + ax.plot_date(x, y, '-', label=self.vars[i][j].lower()) + try: + yunits_str = ', '.join(set([(self.ds.variables[p].units).strip() for p in self.vars[i]])) + ax.set_ylabel(yunits_str) + except: + pass + + def plot_formatter(self): + ax=self.axs[0] + # + if ax.get_xlim()[1]-ax.get_xlim()[0] > 3600./86400.: + minloc = mpl.dates.HourLocator() + xformat = mpl.dates.DateFormatter('%H:%M') + else: + minloc = mpl.dates.MinuteLocator() + xformat = mpl.dates.DateFormatter('%H:%M') + #for ax in self.subplt: + x_range=ax.get_xlim()[1]-ax.get_xlim()[0] + _xlim=(ax.get_xlim()[0]-(x_range/100.)*3, + ax.get_xlim()[1]+(x_range/100.)*3) + ax.set_xlim(_xlim) + for ax in self.axs: + ax.grid(b='on') + #TODO + try: + plt.setp(ax.get_xticklabels(), visible=False) + except: + pass + ax.xaxis.set_major_formatter(xformat) + ax.xaxis.set_major_locator(minloc) + if ax.get_xticklabels().__len__() > 6: + ax.xaxis.set_major_locator(MaxNLocator(6)) + ax.yaxis.set_major_formatter(ScalarFormatter(useOffset=False)) + #http://stackoverflow.com/questions/4700614/how-to-put-the-legend-out-of-the-plot + #box = ax.get_position() + #ax.set_position([box.x0, box.y0, box.width * 0.90, box.height]) + #ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)) + #ax.legend() + leg=ax.legend() + leg.get_frame().set_alpha(0.5) + plt.setp(ax.get_xticklabels(), visible=True) + ax.set_xlabel('utc') + + def create_filename(self): + pass + + def get_figure(self): + return(self.fig) + diff --git a/faampy/utils/__init__.py b/faampy/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/faampy/utils/data_download.py b/faampy/utils/data_download.py new file mode 100644 index 0000000..aa58c94 --- /dev/null +++ b/faampy/utils/data_download.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" +Script for downloading faampy data, which are updated regularly +""" + +import faampy +import os +import tempfile +import urllib2 +import sys +import zipfile + +FAAMPY_DATA_URL = 'http://www.faam.ac.uk/axel_share/faampy_data.zip' + + +def dlfile(url, local_zipfile): + # Open the url + try: + sys.stdout.write("downloading %s\n" % url) + f = urllib2.urlopen(url) + + # Open our local file for writing + with open(local_zipfile, "wb") as local_file: + local_file.write(f.read()) + + # handle errors + except (urllib2.HTTPError, e): + print("HTTP Error:", e.code, url) + except (urllib2.URLError, e): + print("URL Error:", e.reason, url) + return + + +def _argparser(): + import argparse + from argparse import RawTextHelpFormatter + sys.argv.insert(0, 'faampy data_download') + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=RawTextHelpFormatter) + parser.add_argument('password', + action="store", + type=str, + help='zip file password') + return parser + + +def main(): + parser = _argparser() + args = parser.parse_args() + passwd = args.password + if not os.path.exists(faampy.FAAMPY_DATA_PATH): + os.mkdir(faampy.FAAMPY_DATA_PATH) + sys.stdout.write('Created %s ...\n' % faampy.FAAMPY_DATA_PATH) + local_zipfile = tempfile.mktemp(suffix='.zip') + dlfile(FAAMPY_DATA_URL, local_zipfile) + + _zip = zipfile.ZipFile(local_zipfile) + _zip.extractall(path=faampy.FAAMPY_DATA_PATH, pwd=passwd) + return + + +if __name__ == '__main__': + main() diff --git a/faampy/utils/file_info.py b/faampy/utils/file_info.py new file mode 100644 index 0000000..be477e5 --- /dev/null +++ b/faampy/utils/file_info.py @@ -0,0 +1,151 @@ +import os +import re + + +DATA_TYPES = {'core-hires': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].nc$', + 'core-lowres': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_1[Hh]z.nc$', + 'core-descrip': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_descrip.txt$', + 'core-quality': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9]_quality.txt$', + 'dropsonde-proc': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_proc.nc$', + 'dropsonde-raw': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_raw.nc$', + 'dropsonde-descrip': '.*dropsonde_faam_.*_r.*_[bBcC][0-9][0-9][0-9]_descrip.txt$', + 'flight-cst': 'flight-cst_faam_20[0-9][0-9][0-1][0-9][0-3][0-9]_r.*_[bBcC][0-9][0-9][0-9].txt$', + 'flight-log': 'flight-log_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].pdf$', + 'flight-sum': 'flight-sum_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bBcC][0-9][0-9][0-9].txt$', + 'rawdrs': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawdrs.zip$', + 'rawgin': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawgin.zip$', + 'rawgps': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawgps.zip$', + 'rawdlu': 'core_faam_20[0-9][0-9][0-1][0-9][0-3][0-9].*_r.*_[bB][0-9][0-9][0-9]_rawdlu.zip$'} + + +def get_revision_from_filename(filename): + """ + Extracts the revision number from the netCDF core filename + + Example: + >>> file = 'core_faam_20090529_v004_r1_b450.nc' + >>> getRevisionFromFilename(file) + 1 + >>> + """ + fn = os.path.basename(filename) + fn = fn.split('.')[0] + parts = fn.split('_') + for p in parts: + if re.match('r\d', p): + result = int(p[1:]) + return result + return + + +def get_data_type_from_filename(filename): + """ + returns the datatype for the input filename determined using the DATA_TYPES + dictionary + """ + for key in DATA_TYPES.keys(): + if re.match(DATA_TYPES[key], os.path.basename(filename)): + return key + return + + +def get_fid_from_filename(filename): + """ + Extracts the flight number from the netCDF core filename + + Example: + >>> ncfile = 'core_faam_20090529_v004_r1_b450.nc' + >>> getFlightNumbserFromFilename(ncfile) + b450 + >>> + """ + fn = os.path.basename(filename) + fn = fn.split('.')[0] + parts = fn.split('_') + for p in parts: + if re.match('[bBcC][0-9][0-9][0-9]', p): + return p.lower() + return + + +def get_date_from_filename(filename): + """ + Extracts the flight date from the netCDF core filename + + Example: + >>> ncfile = 'core_faam_20090529_v004_r1_b450.nc' + >>> getDateFromFilename(ncfile) + 20090529 + >>> + """ + fn = os.path.basename(filename) + fn = fn.split('.')[0] + parts = fn.split('_') + for p in parts: + if re.match('20\d{6}', p): + return p + elif re.match('20\d{12}', p): + return p + else: + pass + return + + +class File_Info(object): + """ + Holds all file specific information for a FAAM data file: + * filename + * path + * Flight Number (fid) + * date + * revision + * datatype + """ + def __init__(self, filename): + self.filename = os.path.basename(filename) + self.path = os.path.dirname(filename) + self.fid = get_fid_from_filename(filename) + self.date = get_date_from_filename(filename) + self.rev = get_revision_from_filename(filename) + self.data_type = get_data_type_from_filename(filename) + + def __str__(self): + output = '\n' + labels = ['Filename', 'Path', 'FID', 'Date', 'Revision', 'Data Type'] + values = [self.filename, + self.path, + self.fid, + self.date, + str(self.rev), + self.data_type] + for s in zip(labels, values): + output += '%9s: %s\n' % s + return output + +# def __cmp__(self, obj): +# cmp_key = '%4s_%0.3i_%s' % (self.fid, self.rev, self.datatype) +# cmp_key_other = '%4s_%0.3i_%s' % (obj.fid, obj.rev, obj.datatype) +# if cmp_key < cmp_key_other: +# return -1 +# elif cmp_key == cmp_key_other: +# return 0 +# elif cmp_key > cmp_key_other: +# return 1 +# else: +# pass + + def __eq__(self, other): + return ((self.fid, self.rev, self.data_type) == + (other.fid, other.rev, other.data_type)) + def __ne__(self, other): + return not self == other + def __gt__(self, other): + return (self.fid, self.rev) > (other.fid, other.rev) + def __lt__(self, other): + return (self.fid, self.rev) < (other.fid, other.rev) + def __ge__(self, other): + return (self > other) or (self == other) + def __le__(self, other): + return (self < other) or (self == other) + + diff --git a/faampy/utils/file_list.py b/faampy/utils/file_list.py new file mode 100644 index 0000000..60a0b39 --- /dev/null +++ b/faampy/utils/file_list.py @@ -0,0 +1,73 @@ +import os +import sys + +import file_info + + +class File_List(list): + """ + A list of File_Info objects. The list can be sorted and filtered which can + be useful for batch processing. + + For example it is possible to (i) get all DECADES rawdlu and flight-constant + files from a path, (ii) filter those for the latest revisions and reprocess + them. + + """ + def __init__(self, path): + """ + Get all FAAM data files in the path. + + :param path: path which will be walked and checked for FAAM data files + """ + self.Path = path + if os.path.isdir(path): + for root, subFolders, files in os.walk(self.Path): + for f in files: + if file_info.get_data_type_from_filename(f): + self.append(file_info.File_Info(os.path.join(root, f))) + else: + sys.stdout.write('%s is not a directory.\n' % path) + self.sort() + + def filter_by_data_type(self, dtype): + """ + Filtering by data type. + """ + if not dtype in file_info.DATA_TYPES: + sys.stdout.write('Submitted dtype unknown.\nValid data types are: %s\n' % ', '.join(sorted(file_info.DATA_TYPES.keys()))) + + bad_index = [] + for i in self: + if not i.data_type == dtype: + bad_index.append(i) + for b in bad_index: + self.remove(b) + + def filter_latest_revision(self): + """ + Compresses the list and keeps only the latest revision file for a FID + """ + bad_index = [] + self.sort(key=lambda i: '%4s_%s_%s_%0.3i' % (i.fid, i.date, i.data_type, i.rev)) + self.reverse() + for i in range(len(self)-1): + if ((self[i].fid, self[i].date, self[i].data_type)) == ((self[i+1].fid, self[i+1].date, self[i+1].data_type)): + bad_index.append(self[i+1]) + for b in bad_index: + self.remove(b) + self.sort() + + def __str__(self): + output = '' + for i in self: + output += '%s\n' % (i.filename,) + return output + + def get_filenames(self): + """ + Returns the filenames + """ + result = [os.path.join(i.path, i.filename) for i in self] + return result + diff --git a/files/icons/camera_32x32.png b/files/icons/camera_32x32.png new file mode 100644 index 0000000..f5febfd Binary files /dev/null and b/files/icons/camera_32x32.png differ diff --git a/files/icons/dot_blue_32x32.png b/files/icons/dot_blue_32x32.png new file mode 100644 index 0000000..9a5551d Binary files /dev/null and b/files/icons/dot_blue_32x32.png differ diff --git a/files/icons/dot_green_32x32.png b/files/icons/dot_green_32x32.png new file mode 100644 index 0000000..133a590 Binary files /dev/null and b/files/icons/dot_green_32x32.png differ diff --git a/files/icons/dot_orange_32x32.png b/files/icons/dot_orange_32x32.png new file mode 100644 index 0000000..1639cbd Binary files /dev/null and b/files/icons/dot_orange_32x32.png differ diff --git a/files/icons/dot_red_32x32.png b/files/icons/dot_red_32x32.png new file mode 100644 index 0000000..7802ec8 Binary files /dev/null and b/files/icons/dot_red_32x32.png differ diff --git a/files/icons/dropsonde_32x32.png b/files/icons/dropsonde_32x32.png new file mode 100644 index 0000000..20e7ccf Binary files /dev/null and b/files/icons/dropsonde_32x32.png differ diff --git a/files/icons/was_bottle_32x32.png b/files/icons/was_bottle_32x32.png new file mode 100644 index 0000000..f8c2495 Binary files /dev/null and b/files/icons/was_bottle_32x32.png differ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..655b14b --- /dev/null +++ b/requirements.txt @@ -0,0 +1,19 @@ +basemap +cartopy +ephem +exifread +fiona +gdal +georasters +haversine +matplotlib +netCDF4 +numpy +pandas +pillow +pyspatialite +requests +scipy +setuptools-git +simplekml +xarray diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..d0feb0b --- /dev/null +++ b/setup.py @@ -0,0 +1,69 @@ +import os + +from setuptools import setup, find_packages + + +HERE = os.path.abspath(os.path.dirname(__file__)) + +# Get the long description from the README file +with open(os.path.join(HERE, 'README.rst')) as f: + long_description = f.read() + +with open('requirements.txt') as f: + required = f.read().splitlines() + +#http://thomas-cokelaer.info/blog/2012/03/how-to-embedded-data-files-in-python-using-setuptools/ +datadir = os.path.join('files') +datafiles = [(d, [os.path.join(d, f) for f in files]) + for d, folders, files in os.walk(datadir)] + + +def get_faampy_version(): + version = None + initpath = os.path.join(HERE, 'faampy', '__init__.py') + with open(initpath) as fd: + for line in fd: + if line.startswith('__version__'): + _, version = line.split('=') + version = version.strip()[1:-1] # Remove quotation characters + break + return version + + +setup(name = "faampy", + version = get_faampy_version(), + description = "python module for dealing with FAAM data", + author = "Axel Wellpott", + author_email = "axel dot wellpott at faam dot ac dot uk", + url = "http://www.faam.ac.uk", + package_dir = {'': '.'}, + packages=find_packages('.'), + # scripts are defined in the faampy.__init__ file + entry_points={ + 'console_scripts': [ + 'faampy = faampy:command_line',] + }, + license='LGPLv3', + platforms = ['linux'], + long_description = long_description, + install_requires = required, + include_package_data = True, + data_files = datafiles, + zip_safe = False, # http://stackoverflow.com/questions/27964960/pip-why-sometimes-installed-as-egg-sometimes-installed-as-files + keywords = ['FAAM', + 'Facility for Airborne Atmospheric Measurements', + 'NCAS', + 'MetOffice', + 'data', + 'science', + 'meteorology', + 'python', + 'plotting'], + classifiers=['Development Status :: 2 - Pre-Alpha', + 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)', + 'Operating System :: POSIX :: Linux', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Visualization'])