diff --git a/LICENSE b/LICENSE index 261eeb9e9..d07669f34 100644 --- a/LICENSE +++ b/LICENSE @@ -199,3 +199,15 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +==================================================================================== +The following applies to MochiKit, embedded in Closure in compiled JavaScript files. +==================================================================================== + +Copyright (c) 2005 Bob Ippolito. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docker/Dockerfile b/docker/Dockerfile deleted file mode 100644 index ac7b8eb62..000000000 --- a/docker/Dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM gcr.io/cloud-datalab/datalab:latest -MAINTAINER Tyler Erickson - -# Install Earth Engine Python API dependencies. -RUN apt-get update \ - && apt-get install -y build-essential libssl-dev libffi-dev \ - && pip install cryptography \ - && apt-get purge -y build-essential libssl-dev libffi-dev \ - dpkg-dev fakeroot libfakeroot:amd64 \ - && apt-get autoremove -y \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -# Install IPyLeaflet. The notebook library dependency is downgraded to -# version 4.4.1 but the datalab repo warns about potential version issues: -# https://github.com/googledatalab/datalab/blob/master/containers/base/Dockerfile#L139 -RUN pip install ipyleaflet \ - && jupyter nbextension enable --py --sys-prefix ipyleaflet \ - && pip install notebook==4.4.1 - -# Install the Earth Engine Python API. -RUN pip install earthengine-api - -# Install custom files in the container's /datalab directory. -RUN cp /datalab/run.sh /datalab/base-run.sh -ADD run.sh /datalab/ -RUN chmod a+x /datalab/run.sh - -# Add license information for the new libraries added. -ADD datalab-ee.txt /datalab/ -RUN cat /datalab/datalab-ee.txt >> /datalab/web/static/datalab.txt \ - && rm /datalab/datalab-ee.txt -ADD license-ee.txt /datalab/ -RUN cat /datalab/license-ee.txt >> /datalab/license.txt \ - && rm /datalab/license-ee.txt diff --git a/docker/datalab-ee.txt b/docker/datalab-ee.txt deleted file mode 100644 index 6f0d9d710..000000000 --- a/docker/datalab-ee.txt +++ /dev/null @@ -1,71 +0,0 @@ - -## IPyLeaflet Library and Dependencies - -- ajv (MIT) [https://github.com/epoberezkin/ajv] -- ansi_up (MIT) [https://github.com/drudru/ansi_up] -- backbone (MIT) [https://github.com/jashkenas/backbone] -- base64-js (MIT) [https://github.com/beatgammit/base64-js] -- buffer-shims (MIT) [https://github.com/calvinmetcalf/buffer-shims] -- co (MIT) [https://github.com/tj/co] -- codemirror (MIT) [https://github.com/codemirror/CodeMirror] -- core-util-is (MIT) [https://github.com/isaacs/core-util-is] -- d3-format (BSD-3-Clause) [https://github.com/d3/d3-format] -- dom-serializer (MIT) [https://github.com/cheeriojs/dom-renderer] -- domelementtype (BSD-2-Clause) [https://github.com/FB55/domelementtype] -- domhandler (BSD-2-Clause) [https://github.com/fb55/DomHandler] -- domutils (BSD-2-Clause) [https://github.com/FB55/domutils] -- entities (BSD-like) [https://github.com/fb55/node-entities] -- font-awesome ((OFL-1.1 AND MIT)) [https://github.com/FortAwesome/Font-Awesome] -- htmlparser2 (MIT) [https://github.com/fb55/htmlparser2] -- inherits (ISC) [https://github.com/isaacs/inherits] -- ipyleaflet (MIT) [https://pypi.python.org/pypi/ipyleaflet] -- isarray (MIT) [https://github.com/juliangruber/isarray] -- jquery-ui (MIT) [https://github.com/jquery/jquery-ui] -- jquery (MIT) [https://github.com/jquery/jquery] -- json-stable-stringify (MIT) [https://github.com/substack/json-stable-stringify] -- jsonify (Public Domain) [https://github.com/substack/jsonify] -- jupyter-js-widgets (BSD-3-Clause) [https://github.com/ipython/ipywidgets] -- jupyter-js-widgets-alpha.0 (BSD-3-Clause) [https://github.com/jupyter-widgets/ipywidgets] -- jupyter-leaflet (MIT) [https://github.com/ellisonbg/ipyleaflet] -- jupyter-widgets-schema (BSD-3-Clause) [https://github.com/ipython/ipywidgets] -- jupyter-widgets-schema-beta.2 (BSD-3-Clause) [https://github.com/jupyter-widgets/ipywidgets] -- jupyterlab (BSD-3-Clause) [https://github.com/jupyterlab/jupyterlab] -- leaflet-draw (MIT) [https://github.com/Leaflet/Leaflet.draw] -- leaflet (BSD-2-Clause) [https://github.com/Leaflet/Leaflet] -- lolex (BSD-3-Clause) [https://github.com/sinonjs/lolex] -- marked (MIT) [https://github.com/chjj/marked] -- minimist (MIT) [https://github.com/substack/minimist] -- moment (MIT) [https://github.com/moment/moment] -- path-posix (ISC) [https://github.com/jden/node-path-posix] -- phosphor (BSD-3-Clause) [https://github.com/phosphorjs/phosphor] -- process-nextick-args (MIT) [https://github.com/calvinmetcalf/process-nextick-args] -- punycode (MIT) [https://github.com/bestiejs/punycode.js] -- querystring (MIT) [https://github.com/Gozala/querystring] -- querystringify (MIT) [https://github.com/unshiftio/querystringify] -- readable-stream (MIT) [https://github.com/nodejs/readable-stream] -- regexp-quote (MIT) [https://github.com/dbrock/node-regexp-quote] -- requires-port (MIT) [https://github.com/unshiftio/requires-port] -- safe-buffer (MIT) [https://github.com/feross/safe-buffer] -- sanitize-html (MIT) [https://github.com/punkave/sanitize-html] -- scriptjs (MIT) [https://github.com/ded/script.js] -- semver (ISC) [https://github.com/npm/node-semver] -- string_decoder (MIT) [https://github.com/rvagg/string_decoder] -- types (MIT) [https://wwwhub.com/DefinitelyTyped/DefinitelyTyped] -- underscore (MIT) [https://github.com/jashkenas/underscore] -- url-join (MIT) [https://github.com/jfromaniello/url-join] -- url-parse (MIT) [https://github.com/unshiftio/url-parse] -- url (MIT) [https://github.com/defunctzombie/node-url] -- util-deprecate (MIT) [https://github.com/TooTallNate/util-deprecate] -- xtend (MIT) [https://github.com/Raynos/xtend] - -## Earth Engine API Library and Dependencies -- asn1crypto (MIT) [https://pypi.python.org/pypi/asn1crypto] -- cffi (MIT) [https://pypi.python.org/pypi/cffi] -- cryptography (BSD or Apache License, Version 2.0) [https://pypi.python.org/pypi/cryptography] -- earthengine-api (Apache License, Version 2.0) [https://pypi.python.org/pypi/earthengine-api] -- idna (BSD-like) [https://pypi.python.org/pypi/idna] -- ipaddress (PSF) [https://pypi.python.org/pypi/ipaddress] -- pbr (Apache License, Version 2.0) [https://pypi.python.org/pypi/pbr] -- pycparser (BSD) [https://pypi.python.org/pypi/pycparser] -- pyOpenSSL (Apache License, Version 2.0) [https://pypi.python.org/pypi/pyOpenSSL] -- setuptools (MIT) [https://pypi.python.org/pypi/setuptools] diff --git a/docker/license-ee.txt b/docker/license-ee.txt deleted file mode 100644 index e8f728a37..000000000 --- a/docker/license-ee.txt +++ /dev/null @@ -1,2392 +0,0 @@ -""""" -ajv -""""" -The MIT License (MIT) - -Copyright (c) 2015 Evgeny Poberezkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -ansi_up -""""" -(The MIT License) - -Copyright (c) 2011 Dru Nelson - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -asn1crypto -""""" -Copyright (c) 2015-2017 Will Bond - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -backbone -""""" -Copyright (c) 2010-2017 Jeremy Ashkenas, DocumentCloud - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. -""""" -base64-js -""""" -The MIT License (MIT) - -Copyright (c) 2014 - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""""" -buffer-shims -""""" -# Copyright (c) 2016 Calvin Metcalf - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.** -""""" -cffi -""""" -Except when otherwise stated (look for LICENSE files in directories or -information at the beginning of each file) all software and -documentation is licensed as follows: - - The MIT License - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or - sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included - in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. -""""" -co -""""" -(The MIT License) - -Copyright (c) 2014 TJ Holowaychuk <tj@vision-media.ca> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -codemirror -""""" -MIT License - -Copyright (C) 2017 by Marijn Haverbeke and others - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""""" -core-util-is -""""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""""" -cryptography -""""" -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made -under the terms of *both* these licenses. - -The code used in the OpenSSL locking callback and OS random engine is derived -from the same in CPython itself, and is licensed under the terms of the PSF -License Agreement. - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -Copyright (c) Individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - 3. Neither the name of PyCA Cryptography nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and - the Individual or Organization ("Licensee") accessing and otherwise using Python - 2.7.12 software in source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby - grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, - analyze, test, perform and/or display publicly, prepare derivative works, - distribute, and otherwise use Python 2.7.12 alone or in any derivative - version, provided, however, that PSF's License Agreement and PSF's notice of - copyright, i.e., "Copyright © 2001-2016 Python Software Foundation; All Rights - Reserved" are retained in Python 2.7.12 alone or in any derivative version - prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on or - incorporates Python 2.7.12 or any part thereof, and wants to make the - derivative work available to others as provided herein, then Licensee hereby - agrees to include in any such work a brief summary of the changes made to Python - 2.7.12. - -4. PSF is making Python 2.7.12 available to Licensee on an "AS IS" basis. - PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF - EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR - WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE - USE OF PYTHON 2.7.12 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.12 - FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF - MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.12, OR ANY DERIVATIVE - THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material breach of - its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any relationship - of agency, partnership, or joint venture between PSF and Licensee. This License - Agreement does not grant permission to use PSF trademarks or trade name in a - trademark sense to endorse or promote products or services of Licensee, or any - third party. - -8. By copying, installing or otherwise using Python 2.7.12, Licensee agrees - to be bound by the terms and conditions of this License Agreement. -""""" -d3-format -""""" -Copyright 2010-2015 Mike Bostock -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the author nor the names of contributors may be used to - endorse or promote products derived from this software without specific prior - written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -dom-serializer -""""" -License - -(The MIT License) - -Copyright (c) 2014 The cheeriojs contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -domelementtype -""""" -Copyright (c) Felix Böhm -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS, -EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -domhandler -""""" -Copyright (c) Felix Böhm -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS, -EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -domutils -""""" -Copyright (c) Felix Böhm -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS, -EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -earthengine-api -""""" - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""""" -entities -""""" -Copyright (c) Felix Böhm -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -THIS IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS, -EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -font-awesome -""""" -- The Font Awesome font is licensed under the SIL OFL 1.1: - - http://scripts.sil.org/OFL -- Font Awesome CSS, LESS, and Sass files are licensed under the MIT License: - - https://opensource.org/licenses/mit-license.html -- The Font Awesome documentation is licensed under the CC BY 3.0 License: - - http://creativecommons.org/licenses/by/3.0/ -- Attribution is no longer required as of Font Awesome 3.0, but much appreciated: - - `Font Awesome by Dave Gandy - http://fontawesome.io` -- Full details: http://fontawesome.io/license/ -""""" -htmlparser2 -""""" -Copyright 2010, 2011, Chris Winberry . All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""""" -idna -""""" -License -------- - -Copyright (c) 2013-2017, Kim Davies. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -#. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -#. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -#. Neither the name of the copyright holder nor the names of the - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH - DAMAGE. - -Portions of the codec implementation and unit tests are derived from the -Python standard library, which carries the `Python Software Foundation -License `_: - - Copyright (c) 2001-2014 Python Software Foundation; All Rights Reserved - -Portions of the unit tests are derived from the Unicode standard, which -is subject to the Unicode, Inc. License Agreement: - - Copyright (c) 1991-2014 Unicode, Inc. All rights reserved. - Distributed under the Terms of Use in - . - - Permission is hereby granted, free of charge, to any person obtaining - a copy of the Unicode data files and any associated documentation - (the "Data Files") or Unicode software and any associated documentation - (the "Software") to deal in the Data Files or Software - without restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, and/or sell copies of - the Data Files or Software, and to permit persons to whom the Data Files - or Software are furnished to do so, provided that - - (a) this copyright and permission notice appear with all copies - of the Data Files or Software, - - (b) this copyright and permission notice appear in associated - documentation, and - - (c) there is clear notice in each modified Data File or in the Software - as well as in the documentation associated with the Data File(s) or - Software that the data or software has been modified. - - THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF - ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE - WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT OF THIRD PARTY RIGHTS. - IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS - NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL - DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, - DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - PERFORMANCE OF THE DATA FILES OR SOFTWARE. - - Except as contained in this notice, the name of a copyright holder - shall not be used in advertising or otherwise to promote the sale, - use or other dealings in these Data Files or Software without prior - written authorization of the copyright holder. -""""" -inherits -""""" -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -""""" -ipaddress -""""" -This package is a modified version of cpython's ipaddress module. -It is therefore distributed under the PSF license, as follows: - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are -retained in Python alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. -""""" -ipyleaflet -""""" -The MIT License (MIT) - -Copyright (c) 2014 Brian E. Granger - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -ipywidgets -""""" -Copyright (c) 2015-2016, Project Jupyter Contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -isarray -""""" -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -jquery -""""" -Copyright JS Foundation and other contributors, https://js.foundation/ - -This software consists of voluntary contributions made by many -individuals. For exact contribution history, see the revision history -available at https://github.com/jquery/jquery - -The following license applies to all parts of this software except as -documented below: - -==== - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -==== - -All files located in the node_modules and external directories are -externally maintained libraries used by this software which have their -own licenses; we recommend you read them, as their terms may differ from -the terms above. -""""" -jquery-ui -""""" -Copyright jQuery Foundation and other contributors, https://jquery.org/ - -This software consists of voluntary contributions made by many -individuals. For exact contribution history, see the revision history -available at https://github.com/jquery/jquery-ui - -The following license applies to all parts of this software except as -documented below: - -==== - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -==== - -Copyright and related rights for sample code are waived via CC0. Sample -code is defined as all source code contained within the demos directory. - -CC0: http://creativecommons.org/publicdomain/zero/1.0/ - -==== - -All files located in the node_modules and external directories are -externally maintained libraries used by this software which have their -own licenses; we recommend you read them, as their terms may differ from -the terms above. -""""" -json-stable-stringify -""""" -This software is released under the MIT license: - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -jsonify -""""" -public domain -""""" -jupyter-js-widgets -""""" -Copyright (c) 2015-2016, Project Jupyter Contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -jupyter-leaflet -""""" -The MIT License (MIT) - -Copyright (c) 2014 Brian E. Granger - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -jupyter-widgets-schema -""""" -Copyright (c) 2015-2016, Project Jupyter Contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -jupyterlab -""""" -Copyright (c) 2015-2016, Project Jupyter Contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -leaflet -""""" -Copyright (c) 2010-2016, Vladimir Agafonkin -Copyright (c) 2010-2011, CloudMade -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are -permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, this list of - conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, this list - of conditions and the following disclaimer in the documentation and/or other materials - provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY -EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR -TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -leaflet-draw -""""" -Copyright 2012-2017 Jacob Toye and Leaflet - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -lolex -""""" -Copyright (c) 2010-2014, Christian Johansen, christian@cjohansen.no. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -marked -""""" -Copyright (c) 2011-2014, Christopher Jeffrey (https://github.com/chjj/) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""""" -minimist -""""" -This software is released under the MIT license: - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -moment -""""" -Copyright (c) JS Foundation and other contributors - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. -""""" -path-posix -""""" -Node's license follows: - -==== - -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. - -==== -""""" -pbr -""""" - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. -""""" -phosphor -""""" -Copyright (c) 2014-2017, PhosphorJS Contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -process-nextick-args -""""" -# Copyright (c) 2015 Calvin Metcalf - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.** -""""" -punycode -""""" -Copyright Mathias Bynens - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -pycparser -""""" -pycparser -- A C parser in Python - -Copyright (c) 2008-2017, Eli Bendersky -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. -* Neither the name of Eli Bendersky nor the names of its contributors may - be used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE -GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""""" -pyOpenSSL -""""" - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""""" -querystring -""""" -Copyright 2012 Irakli Gozalishvili. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""""" -querystringify -""""" -The MIT License (MIT) - -Copyright (c) 2015 Unshift.io, Arnout Kazemier, the Contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -readable-stream -""""" -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" -""""" -regexp-quote -""""" -Copyright (c) 2012-2014 Daniel Brockman - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the 'Software'), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -requires-port -""""" -The MIT License (MIT) - -Copyright (c) 2015 Unshift.io, Arnout Kazemier, the Contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -safe-buffer -""""" -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""""" -sanitize-html -""""" -Copyright (c) 2013, 2014, 2015 P'unk Avenue LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -scriptjs -""""" -/*! - * $script.js JS loader & dependency manager - * https://github.com/ded/script.js - * (c) Dustin Diaz 2014 | License MIT - */ -""""" -semver -""""" -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -""""" -setuptools -""""" -Copyright (C) 2016 Jason R Coombs - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -string_decoder -""""" -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" -""""" -types -""""" -This project is licensed under the MIT license. -Copyrights are respective of each contributor listed at the beginning of each definition file. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""""" -underscore -""""" -Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative -Reporters & Editors - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. -""""" -url -""""" -The MIT License (MIT) - -Copyright Joyent, Inc. and other Node contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -url-join -""""" -MIT License - -Copyright (c) 2015 José F. Romaniello - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -url-parse -""""" -The MIT License (MIT) - -Copyright (c) 2015 Unshift.io, Arnout Kazemier, the Contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -""""" -util-deprecate -""""" -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. -""""" -xtend -""""" -Copyright (c) 2012-2014 Raynos. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/docker/run.sh b/docker/run.sh deleted file mode 100644 index 615cc035f..000000000 --- a/docker/run.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -TMPDIR=temp-repo - -mkdir -p /content/datalab -cd /content/datalab - -if [[ ! -d "docs-earthengine" ]]; then - echo "Adding Earth Engine docs to the Datalab container..." - # Clone the repository into a temporary directory. - git clone https://github.com/google/earthengine-api $TMPDIR - # Copy the IPython Notebook examples. - cp -R $TMPDIR/python/examples/ipynb/ docs-earthengine/ - # Delete the temporary directory. - rm -fr $TMPDIR -fi - -source /datalab/base-run.sh diff --git a/javascript/build/ee_api_js.js b/javascript/build/ee_api_js.js index 2b7760a88..62a306d65 100644 --- a/javascript/build/ee_api_js.js +++ b/javascript/build/ee_api_js.js @@ -45,7 +45,7 @@ var z=function(a,b,c){a||Ia("",null,b,Array.prototype.slice.call(arguments,2))}, g){var l=g[f];b.call(void 0,l,f,a)&&(d[e++]=l)}return d},Ta=Array.prototype.map?function(a,b,c){z(null!=a.length);return Array.prototype.map.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=Array(d),g="string"===typeof a?a.split(""):a,f=0;f=arguments.length?Array.prototype.slice.call(a,b):Array.prototype.slice.call(a,b,c)} -function cb(a){var b=[];if(0>a-0)return[];for(var c=0;ca-0)return[];for(var c=0;cb.length)){var d=b[0].match(/\r\nContent-ID: ]*)>/)[1],e=Number(b[1].match(/^HTTP\S*\s(\d+)\s/)[1]);c(d,e,b.slice(2).join("\r\n\r\n"))}},oj=function(){var a=wj.replace(/\/api$/,"");return"window"in r&&!a.match(/^https?:\/\/content-/)?a.replace(/^(https?:\/\/)(.*\.googleapis\.com)$/,"$1content-$2"):a},yj=function(a,b,c){var d=[];a&&(d=d.concat(xj)); b&&d.push("https://www.googleapis.com/auth/devstorage.read_write");a=d=d.concat(c);c=b=0;for(var e={};cg)break;v++}return Tj(B.status,function(D){try{return B.getResponseHeader(D)}catch(Q){return null}},B.responseText,f,void 0,e,d)},Rj=function(a,b,c,d,e,g){var f=0,l={url:a,method:c,content:d,headers:e},m=Lj,q=null!=g?g:10;l.callback= function(v){v=v.target;if(429==v.getStatus()&&f');c[e]=d});b.he=c}else throw Error('"tensorDepths" option needs to have the form Object.');return b},pl=function(a,b){var c=new yf({ka:null,na:null,Hd:null,ie:null,F:dk(a.fileFormat)});if("GEO_TIFF"===c.F){if(a.fileDimensions&&a.tiffFileDimensions)throw Error('Export cannot set both "fileDimensions" and "tiffFileDimensions".'); -var d=a.tiffShardSize||a.shardSize;var e=!!a.tiffCloudOptimized;var g=!(!a.skipEmptyTiles&&!a.tiffSkipEmptyFiles),f=nl(a.fileDimensions||a.tiffFileDimensions);d=K(d);var l=a.noData;l=null!=l?new Zf({mf:Number(l)}):null;e=new Yf({Se:e,lg:g,bb:f,tileSize:d,Of:l});c.Hd=e}else"TF_RECORD_IMAGE"===c.F&&(c.ie=ol(a));"GOOGLE_CLOUD_STORAGE"===b?c.ka=il(a):c.na=jl(a);return c},ql=function(a,b){var c=new If({ka:null,na:null,F:"MP4"});"GOOGLE_CLOUD_STORAGE"===b?c.ka=il(a):c.na=jl(a);return c},rl=function(a){var b, +var d=a.tiffShardSize||a.shardSize;var e=!!a.tiffCloudOptimized;var g=!(!a.skipEmptyTiles&&!a.tiffSkipEmptyFiles),f=nl(a.fileDimensions||a.tiffFileDimensions);d=K(d);var l=a.tiffNoData;l=null!=l?new Zf({mf:Number(l)}):null;e=new Yf({Se:e,lg:g,bb:f,tileSize:d,Of:l});c.Hd=e}else"TF_RECORD_IMAGE"===c.F&&(c.ie=ol(a));"GOOGLE_CLOUD_STORAGE"===b?c.ka=il(a):c.na=jl(a);return c},ql=function(a,b){var c=new If({ka:null,na:null,F:"MP4"});"GOOGLE_CLOUD_STORAGE"===b?c.ka=il(a):c.na=jl(a);return c},rl=function(a){var b, c,d,e,g=K(null!=(b=a.endZoom)?b:a.maxZoom);b=K(null!=(c=a.startZoom)?c:a.minZoom);c=K(a.scale);var f=!(null!=(d=a.skipEmpty)?!d:!a.skipEmptyTiles);d=J(a.mapsApiKey);var l=nl(null!=(e=a.dimensions)?e:a.tileDimensions);e=K(a.stride);var m=K(a.minTimeMachineZoomSubset);a=K(a.maxTimeMachineZoomSubset);a=null==m&&null==a?null:new Ng({start:null!=m?m:0,end:a});return new Bf({df:g,mg:b,scale:c,kg:f,Hf:d,dimensions:l,ng:e,Fg:a})},nl=function(a){if(null==a)return null;var b=new $f({height:0,width:0});"string"=== typeof a&&(-1!==a.indexOf("x")?a=a.split("x").map(Number):-1!==a.indexOf(",")&&(a=a.split(",").map(Number)));if(Array.isArray(a))if(2===a.length)b.height=a[0],b.width=a[1];else if(1===a.length)b.height=a[0],b.width=a[0];else throw Error("Unable to construct grid from dimensions: "+a);else if("number"!==typeof a||isNaN(a))if(t(a)&&null!=a.height&&null!=a.width)b.height=a.height,b.width=a.width;else throw Error("Unable to construct grid from dimensions: "+a);else b.height=a,b.width=a;return b},il=function(a){var b= null;null!=a.writePublicTiles&&(b=a.writePublicTiles?"PUBLIC":"DEFAULT_OBJECT_ACL");return new Xe({oc:J(a.outputBucket),ea:J(a.outputPrefix),pc:a.bucketCorsUris||null,permissions:b})},jl=function(a){return new jf({pf:J(a.driveFolder),ea:J(a.driveFileNamePrefix)})},kl=function(a){return new Ve({name:pk(a.assetId)})},ll=function(a){if(!a)return null;var b=a;"string"===typeof a&&(a=a.split(","));if(Array.isArray(a))return new Ig({Wf:(a||[]).map(sl)});throw Error("Unable to build ranking rule from rules: "+ @@ -639,7 +639,7 @@ delete e.crsTransform);break;case "EXPORT_TILES":b=Ko(e,b);e=b.fileFormat;null== a&&(a="CSV");a=a.toUpperCase();switch(a){case "CSV":a="CSV";break;case "JSON":case "GEOJSON":case "GEO_JSON":a="GEO_JSON";break;case "KML":a="KML";break;case "KMZ":a="KMZ";break;case "SHP":a="SHP";break;case "TF_RECORD":case "TF_RECORD_TABLE":case "TFRECORD":a="TF_RECORD_TABLE";break;default:throw Error("Invalid file format "+a+". Supported formats are: 'CSV', 'GeoJSON', 'KML', 'KMZ', 'SHP', and 'TFRecord'.");}e.fileFormat=a;e=Ko(e,b);break;case "EXPORT_VIDEO":e=Lo(e);e=Ko(e,b);null!=e.crsTransform&& (e.crs_transform=e.crsTransform,delete e.crsTransform);break;case "EXPORT_VIDEO_MAP":e=Lo(e);e.version=e.version||"V1";e.stride=e.stride||1;e.tileDimensions={width:e.tileWidth||256,height:e.tileHeight||256};e=Ko(e,b);break;case "EXPORT_CLASSIFIER":e=Ko(e,b);break;default:throw Error("Unknown export type: "+e.type);}if(d&&null!=e.region){d=e;b=e.region;if(b instanceof S)b=b.ke();else if("string"===typeof b)try{b=Oa(JSON.parse(b))}catch(g){throw Error("Invalid format for region property. Region must be GeoJSON LinearRing or Polygon specified as actual coordinates or serialized as a string. See Export documentation."); }if(!(t(b)&&"type"in b))try{new yn(b)}catch(g){try{new Bn(b)}catch(f){throw Error("Invalid format for region property. Region must be GeoJSON LinearRing or Polygon specified as actual coordinates or serialized as a string. See Export documentation.");}}b=JSON.stringify(b);d.region=b}return e},Ko=function(a,b){switch(b){case "GOOGLE_CLOUD_STORAGE":a.outputBucket=a.bucket||"";a.outputPrefix=a.fileNamePrefix||a.path||"";delete a.fileNamePrefix;delete a.path;delete a.bucket;break;case "ASSET":a.assetId= -a.assetId||"";break;case "FEATURE_VIEW":a.mapName=a.mapName||"";break;case "BIGQUERY":a.table=a.table||"";break;default:b=ta(a.folder);if(!Xa(["string","undefined"],b))throw Error('Error: toDrive "folder" parameter must be a string, but is of type '+b+".");a.driveFolder=a.folder||"";a.driveFileNamePrefix=a.fileNamePrefix||"";delete a.folder;delete a.fileNamePrefix}return a},Mo={GEO_TIFF:["cloudOptimized","fileDimensions","shardSize"],TF_RECORD_IMAGE:"patchDimensions kernelSize compressed maxFileSize defaultValue tensorDepths sequenceData collapseBands maskedThreshold".split(" ")}, +a.assetId||"";break;case "FEATURE_VIEW":a.mapName=a.mapName||"";break;case "BIGQUERY":a.table=a.table||"";break;default:b=ta(a.folder);if(!Xa(["string","undefined"],b))throw Error('Error: toDrive "folder" parameter must be a string, but is of type '+b+".");a.driveFolder=a.folder||"";a.driveFileNamePrefix=a.fileNamePrefix||"";delete a.folder;delete a.fileNamePrefix}return a},Mo={GEO_TIFF:["cloudOptimized","fileDimensions","noData","shardSize"],TF_RECORD_IMAGE:"patchDimensions kernelSize compressed maxFileSize defaultValue tensorDepths sequenceData collapseBands maskedThreshold".split(" ")}, No={GEO_TIFF:"tiff",TF_RECORD_IMAGE:"tfrecord"},Lo=function(a){a.videoOptions=a.framesPerSecond||5;a.maxFrames=a.maxFrames||1E3;a.maxPixels=a.maxPixels||1E8;var b=a.fileFormat;null==b&&(b="MP4");b=b.toUpperCase();switch(b){case "MP4":b="MP4";break;case "GIF":case "JIF":b="GIF";break;case "VP9":case "WEBM":b="VP9";break;default:throw Error("Invalid file format "+b+". Supported formats are: 'MP4', 'GIF', and 'WEBM'.");}a.fileFormat=b;return a},Jo=function(a,b){var c=a.formatOptions;if(null==c)return{}; if(Object.keys(a).some(function(m){return null!==c&&m in c}))throw Error("Parameter specified at least twice: once in config, and once in config format options.");a=No[b];for(var d=Mo[b],e={},g=n(Object.entries(c)),f=g.next();!f.done;f=g.next()){var l=n(f.value);f=l.next().value;l=l.next().value;if(!Xa(d,f))throw Error('"'+f+'" is not a valid option, the image format "'+b+'""may have the following options: '+(d.join(", ")+'".'));e[a+f[0].toUpperCase()+f.substring(1)]=Array.isArray(l)?l.join():l}return e}, Io=["image","collection","classifier"];var Oo=function(a){if(!(this instanceof Oo))return Jm(Oo,arguments);if(a instanceof Oo)return a;Po();if("number"===typeof a)M.call(this,null,null),this.Zc=a;else if(a instanceof M)M.call(this,a.I,a.args,a.T),this.Zc=null;else throw Error("Invalid argument specified for ee.Number(): "+a);};x(Oo,M);w("ee.Number",Oo);var Qo=!1,Po=function(){Qo||(dn(Oo,"Number","Number"),Qo=!0)};Oo.prototype.encode=function(a){return"number"===typeof this.Zc?this.Zc:Oo.L.encode.call(this,a)}; @@ -701,17 +701,18 @@ h.Al=function(){if(!this.mc){var a=u(function(d){this.mc||(dq(this.ga,this.fa,d) h.ga=null;h.Si=null;h.La=null;h.ni=null;var Cq=["load","abort","error"],Dq=function(){y.call(this);this.Ga=!1};p(Dq,y);Dq.prototype.setActive=function(a){this.Ga=a};Dq.prototype.isActive=function(){return this.Ga};var yq=function(a,b){tq.call(this,a,b)};p(yq,tq);yq.prototype.Xe=function(){return new Dq};yq.prototype.Ad=function(a){a.Ta()};yq.prototype.Pf=function(a){return!a.Ua&&!a.isActive()};var Eq=function(a,b,c,d,e){Kc.call(this,a,b,c,d,e);this.minZoom=d.minZoom||0;this.maxZoom=d.maxZoom||20;if(!window.google||!window.google.maps)throw Error("Google Maps API hasn't been initialized.");this.tileSize=d.tileSize||new google.maps.Size(256,256);this.name=d.name;this.wg=new Fc;this.Qf=1;this.wa=e||null};p(Eq,Kc);h=Eq.prototype;h.Ne=function(a){return Zb(this,"tileevent",a)};h.ag=function(a){gc(a)}; h.getTile=function(a,b,c){if(ba.y||a.y>=1<\n","\n"," Run in Google Colab\n","\n"," View source on GitHub"]},{"cell_type":"markdown","metadata":{"id":"AC8adBmw-5m3","colab_type":"text"},"source":["# Introduction\n","\n","This is an Earth Engine <> TensorFlow demonstration notebook. Specifically, this notebook shows:\n","\n","1. Exporting training/testing data from Earth Engine in TFRecord format.\n","2. Preparing the data for use in a TensorFlow model.\n","2. Training and validating a simple model (Keras `Sequential` neural network) in TensorFlow.\n","3. Making predictions on image data exported from Earth Engine in TFRecord format.\n","4. Ingesting classified image data to Earth Engine in TFRecord format.\n","\n","This is intended to demonstrate a complete i/o pipeline. For a workflow that uses a [Google AI Platform](https://cloud.google.com/ai-platform) hosted model making predictions interactively, see [this example notebook](http://colab.research.google.com/github/google/earthengine-api/blob/master/python/examples/ipynb/Earth_Engine_TensorFlow_AI_Platform.ipynb)."]},{"cell_type":"markdown","metadata":{"id":"KiTyR3FNlv-O","colab_type":"text"},"source":["# Setup software libraries\n","\n","Import software libraries and/or authenticate as necessary."]},{"cell_type":"markdown","metadata":{"id":"dEM3FP4YakJg","colab_type":"text"},"source":["## Authenticate to Colab and Cloud\n","\n","To read/write from a Google Cloud Storage bucket to which you have access, it's necessary to authenticate (as yourself). *This should be the same account you use to login to Earth Engine*. When you run the code below, it will display a link in the output to an authentication page in your browser. Follow the link to a page that will let you grant permission to the Cloud SDK to access your resources. Copy the code from the permissions page back into this notebook and press return to complete the process.\n","\n","(You may need to run this again if you get a credentials error later.)"]},{"cell_type":"code","metadata":{"id":"sYyTIPLsvMWl","colab_type":"code","cellView":"code","colab":{}},"source":["from google.colab import auth\n","auth.authenticate_user()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Ejxa1MQjEGv9","colab_type":"text"},"source":["## Authenticate to Earth Engine\n","\n","Authenticate to Earth Engine the same way you did to the Colab notebook. Specifically, run the code to display a link to a permissions page. This gives you access to your Earth Engine account. *This should be the same account you used to login to Cloud previously*. Copy the code from the Earth Engine permissions page back into the notebook and press return to complete the process."]},{"cell_type":"code","metadata":{"id":"HzwiVqbcmJIX","colab_type":"code","cellView":"code","colab":{}},"source":["import ee\n","ee.Authenticate()\n","ee.Initialize()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"iJ70EsoWND_0","colab_type":"text"},"source":["## Test the TensorFlow installation\n","\n","Import the TensorFlow library and check the version."]},{"cell_type":"code","metadata":{"id":"i1PrYRLaVw_g","colab_type":"code","cellView":"code","colab":{}},"source":["import tensorflow as tf\n","print(tf.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"b8Xcvjp6cLOL","colab_type":"text"},"source":["## Test the Folium installation\n","\n","We will use the Folium library for visualization. Import the library and check the version."]},{"cell_type":"code","metadata":{"id":"YiVgOXzBZJSn","colab_type":"code","colab":{}},"source":["import folium\n","print(folium.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"DrXLkJC2QJdP","colab_type":"text"},"source":["# Define variables\n","\n","This set of global variables will be used throughout. For this demo, you must have a Cloud Storage bucket into which you can write files. ([learn more about creating Cloud Storage buckets](https://cloud.google.com/storage/docs/creating-buckets)). You'll also need to specify your Earth Engine username, i.e. `users/USER_NAME` on the [Code Editor](https://code.earthengine.google.com/) Assets tab."]},{"cell_type":"code","metadata":{"id":"GHTOc5YLQZ5B","colab_type":"code","colab":{}},"source":["# Your Earth Engine username. This is used to import a classified image\n","# into your Earth Engine assets folder.\n","USER_NAME = 'username'\n","\n","# Cloud Storage bucket into which training, testing and prediction \n","# datasets will be written. You must be able to write into this bucket.\n","OUTPUT_BUCKET = 'your-bucket'\n","\n","# Use Landsat 8 surface reflectance data for predictors.\n","L8SR = ee.ImageCollection('LANDSAT/LC08/C01/T1_SR')\n","# Use these bands for prediction.\n","BANDS = ['B2', 'B3', 'B4', 'B5', 'B6', 'B7']\n","\n","# This is a trianing/testing dataset of points with known land cover labels.\n","LABEL_DATA = ee.FeatureCollection('projects/google/demo_landcover_labels')\n","# The labels, consecutive integer indices starting from zero, are stored in\n","# this property, set on each point.\n","LABEL = 'landcover'\n","# Number of label values, i.e. number of classes in the classification.\n","N_CLASSES = 3\n","\n","# These names are used to specify properties in the export of\n","# training/testing data and to define the mapping between names and data\n","# when reading into TensorFlow datasets.\n","FEATURE_NAMES = list(BANDS)\n","FEATURE_NAMES.append(LABEL)\n","\n","# File names for the training and testing datasets. These TFRecord files\n","# will be exported from Earth Engine into the Cloud Storage bucket.\n","TRAIN_FILE_PREFIX = 'Training_demo'\n","TEST_FILE_PREFIX = 'Testing_demo'\n","file_extension = '.tfrecord.gz'\n","TRAIN_FILE_PATH = 'gs://' + OUTPUT_BUCKET + '/' + TRAIN_FILE_PREFIX + file_extension\n","TEST_FILE_PATH = 'gs://' + OUTPUT_BUCKET + '/' + TEST_FILE_PREFIX + file_extension\n","\n","# File name for the prediction (image) dataset. The trained model will read\n","# this dataset and make predictions in each pixel.\n","IMAGE_FILE_PREFIX = 'Image_pixel_demo_'\n","\n","# The output path for the classified image (i.e. predictions) TFRecord file.\n","OUTPUT_IMAGE_FILE = 'gs://' + OUTPUT_BUCKET + '/Classified_pixel_demo.TFRecord'\n","# Export imagery in this region.\n","EXPORT_REGION = ee.Geometry.Rectangle([-122.7, 37.3, -121.8, 38.00])\n","# The name of the Earth Engine asset to be created by importing\n","# the classified image from the TFRecord file in Cloud Storage.\n","OUTPUT_ASSET_ID = 'users/' + USER_NAME + '/Classified_pixel_demo'"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ZcjQnHH8zT4q","colab_type":"text"},"source":["# Get Training and Testing data from Earth Engine\n","\n","To get data for a classification model of three classes (bare, vegetation, water), we need labels and the value of predictor variables for each labeled example. We've already generated some labels in Earth Engine. Specifically, these are visually interpreted points labeled \"bare,\" \"vegetation,\" or \"water\" for a very simple classification demo ([example script](https://code.earthengine.google.com/?scriptPath=Examples%3ADemos%2FClassification)). For predictor variables, we'll use [Landsat 8 surface reflectance imagery](https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LC08_C01_T1_SR), bands 2-7."]},{"cell_type":"markdown","metadata":{"id":"0EJfjgelSOpN","colab_type":"text"},"source":["## Prepare Landsat 8 imagery\n","\n","First, make a cloud-masked median composite of Landsat 8 surface reflectance imagery from 2018. Check the composite by visualizing with folium."]},{"cell_type":"code","metadata":{"id":"DJYucYe3SPPr","colab_type":"code","colab":{}},"source":["# Cloud masking function.\n","def maskL8sr(image):\n"," cloudShadowBitMask = ee.Number(2).pow(3).int()\n"," cloudsBitMask = ee.Number(2).pow(5).int()\n"," qa = image.select('pixel_qa')\n"," mask = qa.bitwiseAnd(cloudShadowBitMask).eq(0).And(\n"," qa.bitwiseAnd(cloudsBitMask).eq(0))\n"," return image.updateMask(mask).select(BANDS).divide(10000)\n","\n","# The image input data is a 2018 cloud-masked median composite.\n","image = L8SR.filterDate('2018-01-01', '2018-12-31').map(maskL8sr).median()\n","\n","# Use folium to visualize the imagery.\n","mapid = image.getMapId({'bands': ['B4', 'B3', 'B2'], 'min': 0, 'max': 0.3})\n","map = folium.Map(location=[38., -122.5])\n","\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='median composite',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"UEeyPf3zSPct","colab_type":"text"},"source":["## Add pixel values of the composite to labeled points\n","\n","Some training labels have already been collected for you. Load the labeled points from an existing Earth Engine asset. Each point in this table has a property called `landcover` that stores the label, encoded as an integer. Here we overlay the points on imagery to get predictor variables along with labels."]},{"cell_type":"code","metadata":{"id":"iOedOKyRExHE","colab_type":"code","colab":{}},"source":["# Sample the image at the points and add a random column.\n","sample = image.sampleRegions(\n"," collection=LABEL_DATA, properties=[LABEL], scale=30).randomColumn()\n","\n","# Partition the sample approximately 70-30.\n","training = sample.filter(ee.Filter.lt('random', 0.7))\n","testing = sample.filter(ee.Filter.gte('random', 0.7))\n","\n","from pprint import pprint\n","\n","# Print the first couple points to verify.\n","pprint({'training': training.first().getInfo()})\n","pprint({'testing': testing.first().getInfo()})"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"uNc7a2nRR4MI","colab_type":"text"},"source":["## Export the training and testing data\n","\n","Now that there's training and testing data in Earth Engine and you've inspected a couple examples to ensure that the information you need is present, it's time to materialize the datasets in a place where the TensorFlow model has access to them. You can do that by exporting the training and testing datasets to tables in TFRecord format ([learn more about TFRecord format](https://www.tensorflow.org/tutorials/load_data/tf-records)) in your Cloud Storage bucket."]},{"cell_type":"code","metadata":{"id":"Pb-aPvQc0Xvp","colab_type":"code","colab":{}},"source":["# Make sure you can see the output bucket. You must have write access.\n","print('Found Cloud Storage bucket.' if tf.io.gfile.exists('gs://' + OUTPUT_BUCKET) \n"," else 'Can not find output Cloud Storage bucket.')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Wtoqj0Db1TmJ","colab_type":"text"},"source":["Once you've verified the existence of the intended output bucket, run the exports."]},{"cell_type":"code","metadata":{"id":"TfVNQzg8R6Wy","colab_type":"code","colab":{}},"source":["# Create the tasks.\n","training_task = ee.batch.Export.table.toCloudStorage(\n"," collection=training,\n"," description='Training Export',\n"," fileNamePrefix=TRAIN_FILE_PREFIX,\n"," bucket=OUTPUT_BUCKET,\n"," fileFormat='TFRecord',\n"," selectors=FEATURE_NAMES)\n","\n","testing_task = ee.batch.Export.table.toCloudStorage(\n"," collection=testing,\n"," description='Testing Export',\n"," fileNamePrefix=TEST_FILE_PREFIX,\n"," bucket=OUTPUT_BUCKET,\n"," fileFormat='TFRecord',\n"," selectors=FEATURE_NAMES)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"QF4WGIekaS2s","colab_type":"code","colab":{}},"source":["# Start the tasks.\n","training_task.start()\n","testing_task.start()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"q7nFLuySISeC","colab_type":"text"},"source":["### Monitor task progress\n","\n","You can see all your Earth Engine tasks by listing them. Make sure the training and testing tasks are completed before continuing."]},{"cell_type":"code","metadata":{"id":"oEWvS5ekcEq0","colab_type":"code","colab":{}},"source":["# Print all tasks.\n","pprint(ee.batch.Task.list())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"43-c0JNFI_m6","colab_type":"text"},"source":["### Check existence of the exported files\n","\n","If you've seen the status of the export tasks change to `COMPLETED`, then check for the existence of the files in the output Cloud Storage bucket."]},{"cell_type":"code","metadata":{"id":"YDZfNl6yc0Kj","colab_type":"code","colab":{}},"source":["print('Found training file.' if tf.io.gfile.exists(TRAIN_FILE_PATH) \n"," else 'No training file found.')\n","print('Found testing file.' if tf.io.gfile.exists(TEST_FILE_PATH) \n"," else 'No testing file found.')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"NA8QA8oQVo8V","colab_type":"text"},"source":["## Export the imagery\n","\n","You can also export imagery using TFRecord format. Specifically, export whatever imagery you want to be classified by the trained model into the output Cloud Storage bucket."]},{"cell_type":"code","metadata":{"id":"tVNhJYacVpEw","colab_type":"code","colab":{}},"source":["# Specify patch and file dimensions.\n","image_export_options = {\n"," 'patchDimensions': [256, 256],\n"," 'maxFileSize': 104857600,\n"," 'compressed': True\n","}\n","\n","# Setup the task.\n","image_task = ee.batch.Export.image.toCloudStorage(\n"," image=image,\n"," description='Image Export',\n"," fileNamePrefix=IMAGE_FILE_PREFIX,\n"," bucket=OUTPUT_BUCKET,\n"," scale=30,\n"," fileFormat='TFRecord',\n"," region=EXPORT_REGION.toGeoJSON()['coordinates'],\n"," formatOptions=image_export_options,\n",")"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"6SweCkHDaNE3","colab_type":"code","colab":{}},"source":["# Start the task.\n","image_task.start()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"JC8C53MRTG_E","colab_type":"text"},"source":["### Monitor task progress"]},{"cell_type":"code","metadata":{"id":"BmPHb779KOXm","colab_type":"code","colab":{}},"source":["# Print all tasks.\n","pprint(ee.batch.Task.list())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"SrUhA1JKLONj","colab_type":"text"},"source":["It's also possible to monitor an individual task. Here we poll the task until it's done. If you do this, please put a `sleep()` in the loop to avoid making too many requests. Note that this will block until complete (you can always halt the execution of this cell)."]},{"cell_type":"code","metadata":{"id":"rKZeZswloP11","colab_type":"code","colab":{}},"source":["import time\n","\n","while image_task.active():\n"," print('Polling for task (id: {}).'.format(image_task.id))\n"," time.sleep(30)\n","print('Done with image export.')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"9vWdH_wlZCEk","colab_type":"text"},"source":["# Data preparation and pre-processing\n","\n","Read data from the TFRecord file into a `tf.data.Dataset`. Pre-process the dataset to get it into a suitable format for input to the model."]},{"cell_type":"markdown","metadata":{"id":"LS4jGTrEfz-1","colab_type":"text"},"source":["## Read into a `tf.data.Dataset`\n","\n","Here we are going to read a file in Cloud Storage into a `tf.data.Dataset`. ([these TensorFlow docs](https://www.tensorflow.org/guide/data) explain more about reading data into a `Dataset`). Check that you can read examples from the file. The purpose here is to ensure that we can read from the file without an error. The actual content is not necessarily human readable.\n","\n"]},{"cell_type":"code","metadata":{"id":"T3PKyDQW8Vpx","colab_type":"code","cellView":"code","colab":{}},"source":["# Create a dataset from the TFRecord file in Cloud Storage.\n","train_dataset = tf.data.TFRecordDataset(TRAIN_FILE_PATH, compression_type='GZIP')\n","# Print the first record to check.\n","print(iter(train_dataset).next())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"BrDYm-ibKR6t","colab_type":"text"},"source":["## Define the structure of your data\n","\n","For parsing the exported TFRecord files, `featuresDict` is a mapping between feature names (recall that `featureNames` contains the band and label names) and `float32` [`tf.io.FixedLenFeature`](https://www.tensorflow.org/api_docs/python/tf/io/FixedLenFeature) objects. This mapping is necessary for telling TensorFlow how to read data in a TFRecord file into tensors. Specifically, **all numeric data exported from Earth Engine is exported as `float32`**.\n","\n","(Note: *features* in the TensorFlow context (i.e. [`tf.train.Feature`](https://www.tensorflow.org/api_docs/python/tf/train/Feature)) are not to be confused with Earth Engine features (i.e. [`ee.Feature`](https://developers.google.com/earth-engine/api_docs#eefeature)), where the former is a protocol message type for serialized data input to the model and the latter is a geometry-based geographic data structure.)"]},{"cell_type":"code","metadata":{"id":"-6JVQV5HKHMZ","colab_type":"code","cellView":"code","colab":{}},"source":["# List of fixed-length features, all of which are float32.\n","columns = [\n"," tf.io.FixedLenFeature(shape=[1], dtype=tf.float32) for k in FEATURE_NAMES\n","]\n","\n","# Dictionary with names as keys, features as values.\n","features_dict = dict(zip(FEATURE_NAMES, columns))\n","\n","pprint(features_dict)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"QNfaUPbcjuCO","colab_type":"text"},"source":["## Parse the dataset\n","\n","Now we need to make a parsing function for the data in the TFRecord files. The data comes in flattened 2D arrays per record and we want to use the first part of the array for input to the model and the last element of the array as the class label. The parsing function reads data from a serialized [`Example` proto](https://www.tensorflow.org/api_docs/python/tf/train/Example) into a dictionary in which the keys are the feature names and the values are the tensors storing the value of the features for that example. ([These TensorFlow docs](https://www.tensorflow.org/tutorials/load_data/tfrecord) explain more about reading `Example` protos from TFRecord files)."]},{"cell_type":"code","metadata":{"id":"x2Q0g3fBj2kD","colab_type":"code","cellView":"code","colab":{}},"source":["def parse_tfrecord(example_proto):\n"," \"\"\"The parsing function.\n","\n"," Read a serialized example into the structure defined by featuresDict.\n","\n"," Args:\n"," example_proto: a serialized Example.\n","\n"," Returns:\n"," A tuple of the predictors dictionary and the label, cast to an `int32`.\n"," \"\"\"\n"," parsed_features = tf.io.parse_single_example(example_proto, features_dict)\n"," labels = parsed_features.pop(LABEL)\n"," return parsed_features, tf.cast(labels, tf.int32)\n","\n","# Map the function over the dataset.\n","parsed_dataset = train_dataset.map(parse_tfrecord, num_parallel_calls=5)\n","\n","# Print the first parsed record to check.\n","pprint(iter(parsed_dataset).next())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Nb8EyNT4Xnhb","colab_type":"text"},"source":["Note that each record of the parsed dataset contains a tuple. The first element of the tuple is a dictionary with bands for keys and the numeric value of the bands for values. The second element of the tuple is a class label."]},{"cell_type":"markdown","metadata":{"id":"xLCsxWOuEBmE","colab_type":"text"},"source":["## Create additional features\n","\n","Another thing we might want to do as part of the input process is to create new features, for example NDVI, a vegetation index computed from reflectance in two spectral bands. Here are some helper functions for that."]},{"cell_type":"code","metadata":{"id":"lT6v2RM_EB1E","colab_type":"code","cellView":"code","colab":{}},"source":["def normalized_difference(a, b):\n"," \"\"\"Compute normalized difference of two inputs.\n","\n"," Compute (a - b) / (a + b). If the denomenator is zero, add a small delta.\n","\n"," Args:\n"," a: an input tensor with shape=[1]\n"," b: an input tensor with shape=[1]\n","\n"," Returns:\n"," The normalized difference as a tensor.\n"," \"\"\"\n"," nd = (a - b) / (a + b)\n"," nd_inf = (a - b) / (a + b + 0.000001)\n"," return tf.where(tf.math.is_finite(nd), nd, nd_inf)\n","\n","def add_NDVI(features, label):\n"," \"\"\"Add NDVI to the dataset.\n"," Args:\n"," features: a dictionary of input tensors keyed by feature name.\n"," label: the target label\n","\n"," Returns:\n"," A tuple of the input dictionary with an NDVI tensor added and the label.\n"," \"\"\"\n"," features['NDVI'] = normalized_difference(features['B5'], features['B4'])\n"," return features, label"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nEx1RAXOZQkS","colab_type":"text"},"source":["# Model setup\n","\n","The basic workflow for classification in TensorFlow is:\n","\n","1. Create the model.\n","2. Train the model (i.e. `fit()`).\n","3. Use the trained model for inference (i.e. `predict()`).\n","\n","Here we'll create a `Sequential` neural network model using Keras. This simple model is inspired by examples in:\n","\n","* [The TensorFlow Get Started tutorial](https://www.tensorflow.org/tutorials/)\n","* [The TensorFlow Keras guide](https://www.tensorflow.org/guide/keras#build_a_simple_model)\n","* [The Keras `Sequential` model examples](https://keras.io/getting-started/sequential-model-guide/#multilayer-perceptron-mlp-for-multi-class-softmax-classification)\n","\n","Note that the model used here is purely for demonstration purposes and hasn't gone through any performance tuning."]},{"cell_type":"markdown","metadata":{"id":"t9pWa54oG-xl","colab_type":"text"},"source":["## Create the Keras model\n","\n","Before we create the model, there's still a wee bit of pre-processing to get the data into the right input shape and a format that can be used with cross-entropy loss. Specifically, Keras expects a list of inputs and a one-hot vector for the class. (See [the Keras loss function docs](https://keras.io/losses/), [the TensorFlow categorical identity docs](https://www.tensorflow.org/guide/feature_columns#categorical_identity_column) and [the `tf.one_hot` docs](https://www.tensorflow.org/api_docs/python/tf/one_hot) for details). \n","\n","Here we will use a simple neural network model with a 64 node hidden layer, a dropout layer and an output layer. Once the dataset has been prepared, define the model, compile it, fit it to the training data. See [the Keras `Sequential` model guide](https://keras.io/getting-started/sequential-model-guide/) for more details."]},{"cell_type":"code","metadata":{"id":"OCZq3VNpG--G","colab_type":"code","cellView":"code","colab":{}},"source":["from tensorflow import keras\n","\n","# Add NDVI.\n","input_dataset = parsed_dataset.map(add_NDVI)\n","\n","# Keras requires inputs as a tuple. Note that the inputs must be in the\n","# right shape. Also note that to use the categorical_crossentropy loss,\n","# the label needs to be turned into a one-hot vector.\n","def to_tuple(inputs, label):\n"," return (tf.transpose(list(inputs.values())),\n"," tf.one_hot(indices=label, depth=N_CLASSES))\n","\n","# Map the to_tuple function, shuffle and batch.\n","input_dataset = input_dataset.map(to_tuple).batch(8)\n","\n","# Define the layers in the model.\n","model = tf.keras.models.Sequential([\n"," tf.keras.layers.Dense(64, activation=tf.nn.relu),\n"," tf.keras.layers.Dropout(0.2),\n"," tf.keras.layers.Dense(N_CLASSES, activation=tf.nn.softmax)\n","])\n","\n","# Compile the model with the specified loss function.\n","model.compile(optimizer=tf.keras.optimizers.Adam(),\n"," loss='categorical_crossentropy',\n"," metrics=['accuracy'])\n","\n","# Fit the model to the training data.\n","model.fit(x=input_dataset, epochs=10)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Pa4ex_4eKiyb","colab_type":"text"},"source":["## Check model accuracy on the test set\n","\n","Now that we have a trained model, we can evaluate it using the test dataset. To do that, read and prepare the test dataset in the same way as the training dataset. Here we specify a batch size of 1 so that each example in the test set is used exactly once to compute model accuracy. For model steps, just specify a number larger than the test dataset size (ignore the warning)."]},{"cell_type":"code","metadata":{"id":"tE6d7FsrMa1p","colab_type":"code","cellView":"code","colab":{}},"source":["test_dataset = (\n"," tf.data.TFRecordDataset(TEST_FILE_PATH, compression_type='GZIP')\n"," .map(parse_tfrecord, num_parallel_calls=5)\n"," .map(add_NDVI)\n"," .map(to_tuple)\n"," .batch(1))\n","\n","model.evaluate(test_dataset)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nhHrnv3VR0DU","colab_type":"text"},"source":["# Use the trained model to classify an image from Earth Engine\n","\n","Now it's time to classify the image that was exported from Earth Engine. If the exported image is large, it will be split into multiple TFRecord files in its destination folder. There will also be a JSON sidecar file called \"the mixer\" that describes the format and georeferencing of the image. Here we will find the image files and the mixer file, getting some info out of the mixer that will be useful during model inference."]},{"cell_type":"markdown","metadata":{"id":"nmTayDitZgQ5","colab_type":"text"},"source":["## Find the image files and JSON mixer file in Cloud Storage\n","\n","Use `gsutil` to locate the files of interest in the output Cloud Storage bucket. Check to make sure your image export task finished before running the following."]},{"cell_type":"code","metadata":{"id":"oUv9WMpcVp8E","colab_type":"code","colab":{}},"source":["# Get a list of all the files in the output bucket.\n","files_list = !gsutil ls 'gs://'{OUTPUT_BUCKET}\n","# Get only the files generated by the image export.\n","exported_files_list = [s for s in files_list if IMAGE_FILE_PREFIX in s]\n","\n","# Get the list of image files and the JSON mixer file.\n","image_files_list = []\n","json_file = None\n","for f in exported_files_list:\n"," if f.endswith('.tfrecord.gz'):\n"," image_files_list.append(f)\n"," elif f.endswith('.json'):\n"," json_file = f\n","\n","# Make sure the files are in the right order.\n","image_files_list.sort()\n","\n","pprint(image_files_list)\n","print(json_file)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"RcjYG9fk53xL","colab_type":"text"},"source":["## Read the JSON mixer file\n","\n","The mixer contains metadata and georeferencing information for the exported patches, each of which is in a different file. Read the mixer to get some information needed for prediction."]},{"cell_type":"code","metadata":{"id":"Gn7Dr0AAd93_","colab_type":"code","colab":{}},"source":["import json\n","\n","# Load the contents of the mixer file to a JSON object.\n","json_text = !gsutil cat {json_file}\n","# Get a single string w/ newlines from the IPython.utils.text.SList\n","mixer = json.loads(json_text.nlstr)\n","pprint(mixer)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"6xyzyPPJwpVI","colab_type":"text"},"source":["## Read the image files into a dataset\n","\n","You can feed the list of files (`imageFilesList`) directly to the `TFRecordDataset` constructor to make a combined dataset on which to perform inference. The input needs to be preprocessed differently than the training and testing. Mainly, this is because the pixels are written into records as patches, we need to read the patches in as one big tensor (one patch for each band), then flatten them into lots of little tensors."]},{"cell_type":"code","metadata":{"id":"tn8Kj3VfwpiJ","colab_type":"code","cellView":"code","colab":{}},"source":["# Get relevant info from the JSON mixer file.\n","patch_width = mixer['patchDimensions'][0]\n","patch_height = mixer['patchDimensions'][1]\n","patches = mixer['totalPatches']\n","patch_dimensions_flat = [patch_width * patch_height, 1]\n","\n","# Note that the tensors are in the shape of a patch, one patch for each band.\n","image_columns = [\n"," tf.io.FixedLenFeature(shape=patch_dimensions_flat, dtype=tf.float32) \n"," for k in BANDS\n","]\n","\n","# Parsing dictionary.\n","image_features_dict = dict(zip(BANDS, image_columns))\n","\n","# Note that you can make one dataset from many files by specifying a list.\n","image_dataset = tf.data.TFRecordDataset(image_files_list, compression_type='GZIP')\n","\n","# Parsing function.\n","def parse_image(example_proto):\n"," return tf.io.parse_single_example(example_proto, image_features_dict)\n","\n","# Parse the data into tensors, one long tensor per patch.\n","image_dataset = image_dataset.map(parse_image, num_parallel_calls=5)\n","\n","# Break our long tensors into many little ones.\n","image_dataset = image_dataset.flat_map(\n"," lambda features: tf.data.Dataset.from_tensor_slices(features)\n",")\n","\n","# Add additional features (NDVI).\n","image_dataset = image_dataset.map(\n"," # Add NDVI to a feature that doesn't have a label.\n"," lambda features: add_NDVI(features, None)[0]\n",")\n","\n","# Turn the dictionary in each record into a tuple without a label.\n","image_dataset = image_dataset.map(\n"," lambda data_dict: (tf.transpose(list(data_dict.values())), )\n",")\n","\n","# Turn each patch into a batch.\n","image_dataset = image_dataset.batch(patch_width * patch_height)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"_2sfRemRRDkV","colab_type":"text"},"source":["## Generate predictions for the image pixels\n","\n","To get predictions in each pixel, run the image dataset through the trained model using `model.predict()`. Print the first prediction to see that the output is a list of the three class probabilities for each pixel. Running all predictions might take a while."]},{"cell_type":"code","metadata":{"id":"8VGhmiP_REBP","colab_type":"code","colab":{}},"source":["# Run prediction in batches, with as many steps as there are patches.\n","predictions = model.predict(image_dataset, steps=patches, verbose=1)\n","\n","# Note that the predictions come as a numpy array. Check the first one.\n","print(predictions[0])"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"bPU2VlPOikAy","colab_type":"text"},"source":["## Write the predictions to a TFRecord file\n","\n","Now that there's a list of class probabilities in `predictions`, it's time to write them back into a file, optionally including a class label which is simply the index of the maximum probability. We'll write directly from TensorFlow to a file in the output Cloud Storage bucket.\n","\n","Iterate over the list, compute class label and write the class and the probabilities in patches. Specifically, we need to write the pixels into the file as patches in the same order they came out. The records are written as serialized `tf.train.Example` protos. This might take a while."]},{"cell_type":"code","metadata":{"id":"AkorbsEHepzJ","colab_type":"code","colab":{}},"source":["print('Writing to file ' + OUTPUT_IMAGE_FILE)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"kATMknHc0qeR","colab_type":"code","cellView":"code","colab":{}},"source":["# Instantiate the writer.\n","writer = tf.io.TFRecordWriter(OUTPUT_IMAGE_FILE)\n","\n","# Every patch-worth of predictions we'll dump an example into the output\n","# file with a single feature that holds our predictions. Since our predictions\n","# are already in the order of the exported data, the patches we create here\n","# will also be in the right order.\n","patch = [[], [], [], []]\n","cur_patch = 1\n","for prediction in predictions:\n"," patch[0].append(tf.argmax(prediction, 1))\n"," patch[1].append(prediction[0][0])\n"," patch[2].append(prediction[0][1])\n"," patch[3].append(prediction[0][2])\n"," # Once we've seen a patches-worth of class_ids...\n"," if (len(patch[0]) == patch_width * patch_height):\n"," print('Done with patch ' + str(cur_patch) + ' of ' + str(patches) + '...')\n"," # Create an example\n"," example = tf.train.Example(\n"," features=tf.train.Features(\n"," feature={\n"," 'prediction': tf.train.Feature(\n"," int64_list=tf.train.Int64List(\n"," value=patch[0])),\n"," 'bareProb': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=patch[1])),\n"," 'vegProb': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=patch[2])),\n"," 'waterProb': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=patch[3])),\n"," }\n"," )\n"," )\n"," # Write the example to the file and clear our patch array so it's ready for\n"," # another batch of class ids\n"," writer.write(example.SerializeToString())\n"," patch = [[], [], [], []]\n"," cur_patch += 1\n","\n","writer.close()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"1K_1hKs0aBdA","colab_type":"text"},"source":["# Upload the classifications to an Earth Engine asset"]},{"cell_type":"markdown","metadata":{"id":"M6sNZXWOSa82","colab_type":"text"},"source":["## Verify the existence of the predictions file\n","\n","At this stage, there should be a predictions TFRecord file sitting in the output Cloud Storage bucket. Use the `gsutil` command to verify that the predictions image (and associated mixer JSON) exist and have non-zero size."]},{"cell_type":"code","metadata":{"id":"6ZVWDPefUCgA","colab_type":"code","colab":{}},"source":["!gsutil ls -l {OUTPUT_IMAGE_FILE}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"2ZyCo297Clcx","colab_type":"text"},"source":["## Upload the classified image to Earth Engine\n","\n","Upload the image to Earth Engine directly from the Cloud Storage bucket with the [`earthengine` command](https://developers.google.com/earth-engine/command_line#upload). Provide both the image TFRecord file and the JSON file as arguments to `earthengine upload`."]},{"cell_type":"code","metadata":{"id":"NXulMNl9lTDv","colab_type":"code","cellView":"code","colab":{}},"source":["print('Uploading to ' + OUTPUT_ASSET_ID)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"V64tcVxsO5h6","colab_type":"code","colab":{}},"source":["# Start the upload.\n","!earthengine upload image --asset_id={OUTPUT_ASSET_ID} --pyramiding_policy=mode {OUTPUT_IMAGE_FILE} {json_file}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Yt4HyhUU_Bal","colab_type":"text"},"source":["## Check the status of the asset ingestion\n","\n","You can also use the Earth Engine API to check the status of your asset upload. It might take a while. The upload of the image is an asset ingestion task."]},{"cell_type":"code","metadata":{"id":"_vB-gwGhl_3C","colab_type":"code","cellView":"code","colab":{}},"source":["ee.batch.Task.list()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"vvXvy9GDhM-p","colab_type":"text"},"source":["## View the ingested asset\n","\n","Display the vector of class probabilities as an RGB image with colors corresponding to the probability of bare, vegetation, water in a pixel. Also display the winning class using the same color palette."]},{"cell_type":"code","metadata":{"id":"kEkVxIyJiFd4","colab_type":"code","colab":{}},"source":["predictions_image = ee.Image(OUTPUT_ASSET_ID)\n","\n","prediction_vis = {\n"," 'bands': 'prediction',\n"," 'min': 0,\n"," 'max': 2,\n"," 'palette': ['red', 'green', 'blue']\n","}\n","probability_vis = {'bands': ['bareProb', 'vegProb', 'waterProb'], 'max': 0.5}\n","\n","prediction_map_id = predictions_image.getMapId(prediction_vis)\n","probability_map_id = predictions_image.getMapId(probability_vis)\n","\n","map = folium.Map(location=[37.6413, -122.2582])\n","folium.TileLayer(\n"," tiles=prediction_map_id['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='prediction',\n",").add_to(map)\n","folium.TileLayer(\n"," tiles=probability_map_id['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='probability',\n",").add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]}]} \ No newline at end of file +{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"TF_demo1_keras.ipynb","provenance":[],"private_outputs":true,"collapsed_sections":[],"toc_visible":true},"kernelspec":{"name":"python3","display_name":"Python 3"},"accelerator":"GPU"},"cells":[{"cell_type":"code","metadata":{"id":"fSIfBsgi8dNK","colab_type":"code","colab":{}},"source":["#@title Copyright 2020 Google LLC. { display-mode: \"form\" }\n","# Licensed under the Apache License, Version 2.0 (the \"License\");\n","# you may not use this file except in compliance with the License.\n","# You may obtain a copy of the License at\n","#\n","# https://www.apache.org/licenses/LICENSE-2.0\n","#\n","# Unless required by applicable law or agreed to in writing, software\n","# distributed under the License is distributed on an \"AS IS\" BASIS,\n","# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","# See the License for the specific language governing permissions and\n","# limitations under the License."],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"aV1xZ1CPi3Nw","colab_type":"text"},"source":["
\n","\n"," Run in Google Colab\n","\n"," View source on GitHub
"]},{"cell_type":"markdown","metadata":{"id":"AC8adBmw-5m3","colab_type":"text"},"source":["# Introduction\n","\n","This is an Earth Engine <> TensorFlow demonstration notebook. Specifically, this notebook shows:\n","\n","1. Exporting training/testing data from Earth Engine in TFRecord format.\n","2. Preparing the data for use in a TensorFlow model.\n","2. Training and validating a simple model (Keras `Sequential` neural network) in TensorFlow.\n","3. Making predictions on image data exported from Earth Engine in TFRecord format.\n","4. Ingesting classified image data to Earth Engine in TFRecord format.\n","\n","This is intended to demonstrate a complete i/o pipeline. For a workflow that uses a [Google AI Platform](https://cloud.google.com/ai-platform) hosted model making predictions interactively, see [this example notebook](http://colab.research.google.com/github/google/earthengine-api/blob/master/python/examples/ipynb/Earth_Engine_TensorFlow_AI_Platform.ipynb)."]},{"cell_type":"markdown","metadata":{"id":"KiTyR3FNlv-O","colab_type":"text"},"source":["# Setup software libraries\n","\n","Import software libraries and/or authenticate as necessary."]},{"cell_type":"markdown","metadata":{"id":"dEM3FP4YakJg","colab_type":"text"},"source":["## Authenticate to Colab and Cloud\n","\n","To read/write from a Google Cloud Storage bucket to which you have access, it's necessary to authenticate (as yourself). *This should be the same account you use to login to Earth Engine*. When you run the code below, it will display a link in the output to an authentication page in your browser. Follow the link to a page that will let you grant permission to the Cloud SDK to access your resources. Copy the code from the permissions page back into this notebook and press return to complete the process.\n","\n","(You may need to run this again if you get a credentials error later.)"]},{"cell_type":"code","metadata":{"id":"sYyTIPLsvMWl","colab_type":"code","cellView":"code","colab":{}},"source":["from google.colab import auth\n","auth.authenticate_user()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Ejxa1MQjEGv9","colab_type":"text"},"source":["## Authenticate to Earth Engine\n","\n","Authenticate to Earth Engine the same way you did to the Colab notebook. Specifically, run the code to display a link to a permissions page. This gives you access to your Earth Engine account. *This should be the same account you used to login to Cloud previously*. Copy the code from the Earth Engine permissions page back into the notebook and press return to complete the process."]},{"cell_type":"code","metadata":{"id":"HzwiVqbcmJIX","colab_type":"code","cellView":"code","colab":{}},"source":["import ee\n","ee.Authenticate()\n","ee.Initialize()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"iJ70EsoWND_0","colab_type":"text"},"source":["## Test the TensorFlow installation\n","\n","Import the TensorFlow library and check the version."]},{"cell_type":"code","metadata":{"id":"i1PrYRLaVw_g","colab_type":"code","cellView":"code","colab":{}},"source":["import tensorflow as tf\n","print(tf.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"b8Xcvjp6cLOL","colab_type":"text"},"source":["## Test the Folium installation\n","\n","We will use the Folium library for visualization. Import the library and check the version."]},{"cell_type":"code","metadata":{"id":"YiVgOXzBZJSn","colab_type":"code","colab":{}},"source":["import folium\n","print(folium.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"DrXLkJC2QJdP","colab_type":"text"},"source":["# Define variables\n","\n","This set of global variables will be used throughout. For this demo, you must have a Cloud Storage bucket into which you can write files. ([learn more about creating Cloud Storage buckets](https://cloud.google.com/storage/docs/creating-buckets)). You'll also need to specify your Earth Engine username, i.e. `users/USER_NAME` on the [Code Editor](https://code.earthengine.google.com/) Assets tab."]},{"cell_type":"code","metadata":{"id":"GHTOc5YLQZ5B","colab_type":"code","colab":{}},"source":["# Your Earth Engine username. This is used to import a classified image\n","# into your Earth Engine assets folder.\n","USER_NAME = 'username'\n","\n","# Cloud Storage bucket into which training, testing and prediction \n","# datasets will be written. You must be able to write into this bucket.\n","OUTPUT_BUCKET = 'your-bucket'\n","\n","# Use Landsat 8 surface reflectance data for predictors.\n","L8SR = ee.ImageCollection('LANDSAT/LC08/C01/T1_SR')\n","# Use these bands for prediction.\n","BANDS = ['B2', 'B3', 'B4', 'B5', 'B6', 'B7']\n","\n","# This is a training/testing dataset of points with known land cover labels.\n","LABEL_DATA = ee.FeatureCollection('projects/google/demo_landcover_labels')\n","# The labels, consecutive integer indices starting from zero, are stored in\n","# this property, set on each point.\n","LABEL = 'landcover'\n","# Number of label values, i.e. number of classes in the classification.\n","N_CLASSES = 3\n","\n","# These names are used to specify properties in the export of\n","# training/testing data and to define the mapping between names and data\n","# when reading into TensorFlow datasets.\n","FEATURE_NAMES = list(BANDS)\n","FEATURE_NAMES.append(LABEL)\n","\n","# File names for the training and testing datasets. These TFRecord files\n","# will be exported from Earth Engine into the Cloud Storage bucket.\n","TRAIN_FILE_PREFIX = 'Training_demo'\n","TEST_FILE_PREFIX = 'Testing_demo'\n","file_extension = '.tfrecord.gz'\n","TRAIN_FILE_PATH = 'gs://' + OUTPUT_BUCKET + '/' + TRAIN_FILE_PREFIX + file_extension\n","TEST_FILE_PATH = 'gs://' + OUTPUT_BUCKET + '/' + TEST_FILE_PREFIX + file_extension\n","\n","# File name for the prediction (image) dataset. The trained model will read\n","# this dataset and make predictions in each pixel.\n","IMAGE_FILE_PREFIX = 'Image_pixel_demo_'\n","\n","# The output path for the classified image (i.e. predictions) TFRecord file.\n","OUTPUT_IMAGE_FILE = 'gs://' + OUTPUT_BUCKET + '/Classified_pixel_demo.TFRecord'\n","# Export imagery in this region.\n","EXPORT_REGION = ee.Geometry.Rectangle([-122.7, 37.3, -121.8, 38.00])\n","# The name of the Earth Engine asset to be created by importing\n","# the classified image from the TFRecord file in Cloud Storage.\n","OUTPUT_ASSET_ID = 'users/' + USER_NAME + '/Classified_pixel_demo'"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ZcjQnHH8zT4q","colab_type":"text"},"source":["# Get Training and Testing data from Earth Engine\n","\n","To get data for a classification model of three classes (bare, vegetation, water), we need labels and the value of predictor variables for each labeled example. We've already generated some labels in Earth Engine. Specifically, these are visually interpreted points labeled \"bare,\" \"vegetation,\" or \"water\" for a very simple classification demo ([example script](https://code.earthengine.google.com/?scriptPath=Examples%3ADemos%2FClassification)). For predictor variables, we'll use [Landsat 8 surface reflectance imagery](https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LC08_C01_T1_SR), bands 2-7."]},{"cell_type":"markdown","metadata":{"id":"0EJfjgelSOpN","colab_type":"text"},"source":["## Prepare Landsat 8 imagery\n","\n","First, make a cloud-masked median composite of Landsat 8 surface reflectance imagery from 2018. Check the composite by visualizing with folium."]},{"cell_type":"code","metadata":{"id":"DJYucYe3SPPr","colab_type":"code","colab":{}},"source":["# Cloud masking function.\n","def maskL8sr(image):\n"," cloudShadowBitMask = ee.Number(2).pow(3).int()\n"," cloudsBitMask = ee.Number(2).pow(5).int()\n"," qa = image.select('pixel_qa')\n"," mask = qa.bitwiseAnd(cloudShadowBitMask).eq(0).And(\n"," qa.bitwiseAnd(cloudsBitMask).eq(0))\n"," return image.updateMask(mask).select(BANDS).divide(10000)\n","\n","# The image input data is a 2018 cloud-masked median composite.\n","image = L8SR.filterDate('2018-01-01', '2018-12-31').map(maskL8sr).median()\n","\n","# Use folium to visualize the imagery.\n","mapid = image.getMapId({'bands': ['B4', 'B3', 'B2'], 'min': 0, 'max': 0.3})\n","map = folium.Map(location=[38., -122.5])\n","\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='median composite',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"UEeyPf3zSPct","colab_type":"text"},"source":["## Add pixel values of the composite to labeled points\n","\n","Some training labels have already been collected for you. Load the labeled points from an existing Earth Engine asset. Each point in this table has a property called `landcover` that stores the label, encoded as an integer. Here we overlay the points on imagery to get predictor variables along with labels."]},{"cell_type":"code","metadata":{"id":"iOedOKyRExHE","colab_type":"code","colab":{}},"source":["# Sample the image at the points and add a random column.\n","sample = image.sampleRegions(\n"," collection=LABEL_DATA, properties=[LABEL], scale=30).randomColumn()\n","\n","# Partition the sample approximately 70-30.\n","training = sample.filter(ee.Filter.lt('random', 0.7))\n","testing = sample.filter(ee.Filter.gte('random', 0.7))\n","\n","from pprint import pprint\n","\n","# Print the first couple points to verify.\n","pprint({'training': training.first().getInfo()})\n","pprint({'testing': testing.first().getInfo()})"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"uNc7a2nRR4MI","colab_type":"text"},"source":["## Export the training and testing data\n","\n","Now that there's training and testing data in Earth Engine and you've inspected a couple examples to ensure that the information you need is present, it's time to materialize the datasets in a place where the TensorFlow model has access to them. You can do that by exporting the training and testing datasets to tables in TFRecord format ([learn more about TFRecord format](https://www.tensorflow.org/tutorials/load_data/tf-records)) in your Cloud Storage bucket."]},{"cell_type":"code","metadata":{"id":"Pb-aPvQc0Xvp","colab_type":"code","colab":{}},"source":["# Make sure you can see the output bucket. You must have write access.\n","print('Found Cloud Storage bucket.' if tf.io.gfile.exists('gs://' + OUTPUT_BUCKET) \n"," else 'Can not find output Cloud Storage bucket.')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Wtoqj0Db1TmJ","colab_type":"text"},"source":["Once you've verified the existence of the intended output bucket, run the exports."]},{"cell_type":"code","metadata":{"id":"TfVNQzg8R6Wy","colab_type":"code","colab":{}},"source":["# Create the tasks.\n","training_task = ee.batch.Export.table.toCloudStorage(\n"," collection=training,\n"," description='Training Export',\n"," fileNamePrefix=TRAIN_FILE_PREFIX,\n"," bucket=OUTPUT_BUCKET,\n"," fileFormat='TFRecord',\n"," selectors=FEATURE_NAMES)\n","\n","testing_task = ee.batch.Export.table.toCloudStorage(\n"," collection=testing,\n"," description='Testing Export',\n"," fileNamePrefix=TEST_FILE_PREFIX,\n"," bucket=OUTPUT_BUCKET,\n"," fileFormat='TFRecord',\n"," selectors=FEATURE_NAMES)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"QF4WGIekaS2s","colab_type":"code","colab":{}},"source":["# Start the tasks.\n","training_task.start()\n","testing_task.start()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"q7nFLuySISeC","colab_type":"text"},"source":["### Monitor task progress\n","\n","You can see all your Earth Engine tasks by listing them. Make sure the training and testing tasks are completed before continuing."]},{"cell_type":"code","metadata":{"id":"oEWvS5ekcEq0","colab_type":"code","colab":{}},"source":["# Print all tasks.\n","pprint(ee.batch.Task.list())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"43-c0JNFI_m6","colab_type":"text"},"source":["### Check existence of the exported files\n","\n","If you've seen the status of the export tasks change to `COMPLETED`, then check for the existence of the files in the output Cloud Storage bucket."]},{"cell_type":"code","metadata":{"id":"YDZfNl6yc0Kj","colab_type":"code","colab":{}},"source":["print('Found training file.' if tf.io.gfile.exists(TRAIN_FILE_PATH) \n"," else 'No training file found.')\n","print('Found testing file.' if tf.io.gfile.exists(TEST_FILE_PATH) \n"," else 'No testing file found.')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"NA8QA8oQVo8V","colab_type":"text"},"source":["## Export the imagery\n","\n","You can also export imagery using TFRecord format. Specifically, export whatever imagery you want to be classified by the trained model into the output Cloud Storage bucket."]},{"cell_type":"code","metadata":{"id":"tVNhJYacVpEw","colab_type":"code","colab":{}},"source":["# Specify patch and file dimensions.\n","image_export_options = {\n"," 'patchDimensions': [256, 256],\n"," 'maxFileSize': 104857600,\n"," 'compressed': True\n","}\n","\n","# Setup the task.\n","image_task = ee.batch.Export.image.toCloudStorage(\n"," image=image,\n"," description='Image Export',\n"," fileNamePrefix=IMAGE_FILE_PREFIX,\n"," bucket=OUTPUT_BUCKET,\n"," scale=30,\n"," fileFormat='TFRecord',\n"," region=EXPORT_REGION.toGeoJSON()['coordinates'],\n"," formatOptions=image_export_options,\n",")"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"6SweCkHDaNE3","colab_type":"code","colab":{}},"source":["# Start the task.\n","image_task.start()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"JC8C53MRTG_E","colab_type":"text"},"source":["### Monitor task progress"]},{"cell_type":"code","metadata":{"id":"BmPHb779KOXm","colab_type":"code","colab":{}},"source":["# Print all tasks.\n","pprint(ee.batch.Task.list())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"SrUhA1JKLONj","colab_type":"text"},"source":["It's also possible to monitor an individual task. Here we poll the task until it's done. If you do this, please put a `sleep()` in the loop to avoid making too many requests. Note that this will block until complete (you can always halt the execution of this cell)."]},{"cell_type":"code","metadata":{"id":"rKZeZswloP11","colab_type":"code","colab":{}},"source":["import time\n","\n","while image_task.active():\n"," print('Polling for task (id: {}).'.format(image_task.id))\n"," time.sleep(30)\n","print('Done with image export.')"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"9vWdH_wlZCEk","colab_type":"text"},"source":["# Data preparation and pre-processing\n","\n","Read data from the TFRecord file into a `tf.data.Dataset`. Pre-process the dataset to get it into a suitable format for input to the model."]},{"cell_type":"markdown","metadata":{"id":"LS4jGTrEfz-1","colab_type":"text"},"source":["## Read into a `tf.data.Dataset`\n","\n","Here we are going to read a file in Cloud Storage into a `tf.data.Dataset`. ([these TensorFlow docs](https://www.tensorflow.org/guide/data) explain more about reading data into a `Dataset`). Check that you can read examples from the file. The purpose here is to ensure that we can read from the file without an error. The actual content is not necessarily human readable.\n","\n"]},{"cell_type":"code","metadata":{"id":"T3PKyDQW8Vpx","colab_type":"code","cellView":"code","colab":{}},"source":["# Create a dataset from the TFRecord file in Cloud Storage.\n","train_dataset = tf.data.TFRecordDataset(TRAIN_FILE_PATH, compression_type='GZIP')\n","# Print the first record to check.\n","print(iter(train_dataset).next())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"BrDYm-ibKR6t","colab_type":"text"},"source":["## Define the structure of your data\n","\n","For parsing the exported TFRecord files, `featuresDict` is a mapping between feature names (recall that `featureNames` contains the band and label names) and `float32` [`tf.io.FixedLenFeature`](https://www.tensorflow.org/api_docs/python/tf/io/FixedLenFeature) objects. This mapping is necessary for telling TensorFlow how to read data in a TFRecord file into tensors. Specifically, **all numeric data exported from Earth Engine is exported as `float32`**.\n","\n","(Note: *features* in the TensorFlow context (i.e. [`tf.train.Feature`](https://www.tensorflow.org/api_docs/python/tf/train/Feature)) are not to be confused with Earth Engine features (i.e. [`ee.Feature`](https://developers.google.com/earth-engine/api_docs#eefeature)), where the former is a protocol message type for serialized data input to the model and the latter is a geometry-based geographic data structure.)"]},{"cell_type":"code","metadata":{"id":"-6JVQV5HKHMZ","colab_type":"code","cellView":"code","colab":{}},"source":["# List of fixed-length features, all of which are float32.\n","columns = [\n"," tf.io.FixedLenFeature(shape=[1], dtype=tf.float32) for k in FEATURE_NAMES\n","]\n","\n","# Dictionary with names as keys, features as values.\n","features_dict = dict(zip(FEATURE_NAMES, columns))\n","\n","pprint(features_dict)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"QNfaUPbcjuCO","colab_type":"text"},"source":["## Parse the dataset\n","\n","Now we need to make a parsing function for the data in the TFRecord files. The data comes in flattened 2D arrays per record and we want to use the first part of the array for input to the model and the last element of the array as the class label. The parsing function reads data from a serialized [`Example` proto](https://www.tensorflow.org/api_docs/python/tf/train/Example) into a dictionary in which the keys are the feature names and the values are the tensors storing the value of the features for that example. ([These TensorFlow docs](https://www.tensorflow.org/tutorials/load_data/tfrecord) explain more about reading `Example` protos from TFRecord files)."]},{"cell_type":"code","metadata":{"id":"x2Q0g3fBj2kD","colab_type":"code","cellView":"code","colab":{}},"source":["def parse_tfrecord(example_proto):\n"," \"\"\"The parsing function.\n","\n"," Read a serialized example into the structure defined by featuresDict.\n","\n"," Args:\n"," example_proto: a serialized Example.\n","\n"," Returns:\n"," A tuple of the predictors dictionary and the label, cast to an `int32`.\n"," \"\"\"\n"," parsed_features = tf.io.parse_single_example(example_proto, features_dict)\n"," labels = parsed_features.pop(LABEL)\n"," return parsed_features, tf.cast(labels, tf.int32)\n","\n","# Map the function over the dataset.\n","parsed_dataset = train_dataset.map(parse_tfrecord, num_parallel_calls=5)\n","\n","# Print the first parsed record to check.\n","pprint(iter(parsed_dataset).next())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Nb8EyNT4Xnhb","colab_type":"text"},"source":["Note that each record of the parsed dataset contains a tuple. The first element of the tuple is a dictionary with bands for keys and the numeric value of the bands for values. The second element of the tuple is a class label."]},{"cell_type":"markdown","metadata":{"id":"xLCsxWOuEBmE","colab_type":"text"},"source":["## Create additional features\n","\n","Another thing we might want to do as part of the input process is to create new features, for example NDVI, a vegetation index computed from reflectance in two spectral bands. Here are some helper functions for that."]},{"cell_type":"code","metadata":{"id":"lT6v2RM_EB1E","colab_type":"code","cellView":"code","colab":{}},"source":["def normalized_difference(a, b):\n"," \"\"\"Compute normalized difference of two inputs.\n","\n"," Compute (a - b) / (a + b). If the denomenator is zero, add a small delta.\n","\n"," Args:\n"," a: an input tensor with shape=[1]\n"," b: an input tensor with shape=[1]\n","\n"," Returns:\n"," The normalized difference as a tensor.\n"," \"\"\"\n"," nd = (a - b) / (a + b)\n"," nd_inf = (a - b) / (a + b + 0.000001)\n"," return tf.where(tf.math.is_finite(nd), nd, nd_inf)\n","\n","def add_NDVI(features, label):\n"," \"\"\"Add NDVI to the dataset.\n"," Args:\n"," features: a dictionary of input tensors keyed by feature name.\n"," label: the target label\n","\n"," Returns:\n"," A tuple of the input dictionary with an NDVI tensor added and the label.\n"," \"\"\"\n"," features['NDVI'] = normalized_difference(features['B5'], features['B4'])\n"," return features, label"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nEx1RAXOZQkS","colab_type":"text"},"source":["# Model setup\n","\n","The basic workflow for classification in TensorFlow is:\n","\n","1. Create the model.\n","2. Train the model (i.e. `fit()`).\n","3. Use the trained model for inference (i.e. `predict()`).\n","\n","Here we'll create a `Sequential` neural network model using Keras. This simple model is inspired by examples in:\n","\n","* [The TensorFlow Get Started tutorial](https://www.tensorflow.org/tutorials/)\n","* [The TensorFlow Keras guide](https://www.tensorflow.org/guide/keras#build_a_simple_model)\n","* [The Keras `Sequential` model examples](https://keras.io/getting-started/sequential-model-guide/#multilayer-perceptron-mlp-for-multi-class-softmax-classification)\n","\n","Note that the model used here is purely for demonstration purposes and hasn't gone through any performance tuning."]},{"cell_type":"markdown","metadata":{"id":"t9pWa54oG-xl","colab_type":"text"},"source":["## Create the Keras model\n","\n","Before we create the model, there's still a wee bit of pre-processing to get the data into the right input shape and a format that can be used with cross-entropy loss. Specifically, Keras expects a list of inputs and a one-hot vector for the class. (See [the Keras loss function docs](https://keras.io/losses/), [the TensorFlow categorical identity docs](https://www.tensorflow.org/guide/feature_columns#categorical_identity_column) and [the `tf.one_hot` docs](https://www.tensorflow.org/api_docs/python/tf/one_hot) for details). \n","\n","Here we will use a simple neural network model with a 64 node hidden layer, a dropout layer and an output layer. Once the dataset has been prepared, define the model, compile it, fit it to the training data. See [the Keras `Sequential` model guide](https://keras.io/getting-started/sequential-model-guide/) for more details."]},{"cell_type":"code","metadata":{"id":"OCZq3VNpG--G","colab_type":"code","cellView":"code","colab":{}},"source":["from tensorflow import keras\n","\n","# Add NDVI.\n","input_dataset = parsed_dataset.map(add_NDVI)\n","\n","# Keras requires inputs as a tuple. Note that the inputs must be in the\n","# right shape. Also note that to use the categorical_crossentropy loss,\n","# the label needs to be turned into a one-hot vector.\n","def to_tuple(inputs, label):\n"," return (tf.transpose(list(inputs.values())),\n"," tf.one_hot(indices=label, depth=N_CLASSES))\n","\n","# Map the to_tuple function, shuffle and batch.\n","input_dataset = input_dataset.map(to_tuple).batch(8)\n","\n","# Define the layers in the model.\n","model = tf.keras.models.Sequential([\n"," tf.keras.layers.Dense(64, activation=tf.nn.relu),\n"," tf.keras.layers.Dropout(0.2),\n"," tf.keras.layers.Dense(N_CLASSES, activation=tf.nn.softmax)\n","])\n","\n","# Compile the model with the specified loss function.\n","model.compile(optimizer=tf.keras.optimizers.Adam(),\n"," loss='categorical_crossentropy',\n"," metrics=['accuracy'])\n","\n","# Fit the model to the training data.\n","model.fit(x=input_dataset, epochs=10)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Pa4ex_4eKiyb","colab_type":"text"},"source":["## Check model accuracy on the test set\n","\n","Now that we have a trained model, we can evaluate it using the test dataset. To do that, read and prepare the test dataset in the same way as the training dataset. Here we specify a batch size of 1 so that each example in the test set is used exactly once to compute model accuracy. For model steps, just specify a number larger than the test dataset size (ignore the warning)."]},{"cell_type":"code","metadata":{"id":"tE6d7FsrMa1p","colab_type":"code","cellView":"code","colab":{}},"source":["test_dataset = (\n"," tf.data.TFRecordDataset(TEST_FILE_PATH, compression_type='GZIP')\n"," .map(parse_tfrecord, num_parallel_calls=5)\n"," .map(add_NDVI)\n"," .map(to_tuple)\n"," .batch(1))\n","\n","model.evaluate(test_dataset)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nhHrnv3VR0DU","colab_type":"text"},"source":["# Use the trained model to classify an image from Earth Engine\n","\n","Now it's time to classify the image that was exported from Earth Engine. If the exported image is large, it will be split into multiple TFRecord files in its destination folder. There will also be a JSON sidecar file called \"the mixer\" that describes the format and georeferencing of the image. Here we will find the image files and the mixer file, getting some info out of the mixer that will be useful during model inference."]},{"cell_type":"markdown","metadata":{"id":"nmTayDitZgQ5","colab_type":"text"},"source":["## Find the image files and JSON mixer file in Cloud Storage\n","\n","Use `gsutil` to locate the files of interest in the output Cloud Storage bucket. Check to make sure your image export task finished before running the following."]},{"cell_type":"code","metadata":{"id":"oUv9WMpcVp8E","colab_type":"code","colab":{}},"source":["# Get a list of all the files in the output bucket.\n","files_list = !gsutil ls 'gs://'{OUTPUT_BUCKET}\n","# Get only the files generated by the image export.\n","exported_files_list = [s for s in files_list if IMAGE_FILE_PREFIX in s]\n","\n","# Get the list of image files and the JSON mixer file.\n","image_files_list = []\n","json_file = None\n","for f in exported_files_list:\n"," if f.endswith('.tfrecord.gz'):\n"," image_files_list.append(f)\n"," elif f.endswith('.json'):\n"," json_file = f\n","\n","# Make sure the files are in the right order.\n","image_files_list.sort()\n","\n","pprint(image_files_list)\n","print(json_file)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"RcjYG9fk53xL","colab_type":"text"},"source":["## Read the JSON mixer file\n","\n","The mixer contains metadata and georeferencing information for the exported patches, each of which is in a different file. Read the mixer to get some information needed for prediction."]},{"cell_type":"code","metadata":{"id":"Gn7Dr0AAd93_","colab_type":"code","colab":{}},"source":["import json\n","\n","# Load the contents of the mixer file to a JSON object.\n","json_text = !gsutil cat {json_file}\n","# Get a single string w/ newlines from the IPython.utils.text.SList\n","mixer = json.loads(json_text.nlstr)\n","pprint(mixer)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"6xyzyPPJwpVI","colab_type":"text"},"source":["## Read the image files into a dataset\n","\n","You can feed the list of files (`imageFilesList`) directly to the `TFRecordDataset` constructor to make a combined dataset on which to perform inference. The input needs to be preprocessed differently than the training and testing. Mainly, this is because the pixels are written into records as patches, we need to read the patches in as one big tensor (one patch for each band), then flatten them into lots of little tensors."]},{"cell_type":"code","metadata":{"id":"tn8Kj3VfwpiJ","colab_type":"code","cellView":"code","colab":{}},"source":["# Get relevant info from the JSON mixer file.\n","patch_width = mixer['patchDimensions'][0]\n","patch_height = mixer['patchDimensions'][1]\n","patches = mixer['totalPatches']\n","patch_dimensions_flat = [patch_width * patch_height, 1]\n","\n","# Note that the tensors are in the shape of a patch, one patch for each band.\n","image_columns = [\n"," tf.io.FixedLenFeature(shape=patch_dimensions_flat, dtype=tf.float32) \n"," for k in BANDS\n","]\n","\n","# Parsing dictionary.\n","image_features_dict = dict(zip(BANDS, image_columns))\n","\n","# Note that you can make one dataset from many files by specifying a list.\n","image_dataset = tf.data.TFRecordDataset(image_files_list, compression_type='GZIP')\n","\n","# Parsing function.\n","def parse_image(example_proto):\n"," return tf.io.parse_single_example(example_proto, image_features_dict)\n","\n","# Parse the data into tensors, one long tensor per patch.\n","image_dataset = image_dataset.map(parse_image, num_parallel_calls=5)\n","\n","# Break our long tensors into many little ones.\n","image_dataset = image_dataset.flat_map(\n"," lambda features: tf.data.Dataset.from_tensor_slices(features)\n",")\n","\n","# Add additional features (NDVI).\n","image_dataset = image_dataset.map(\n"," # Add NDVI to a feature that doesn't have a label.\n"," lambda features: add_NDVI(features, None)[0]\n",")\n","\n","# Turn the dictionary in each record into a tuple without a label.\n","image_dataset = image_dataset.map(\n"," lambda data_dict: (tf.transpose(list(data_dict.values())), )\n",")\n","\n","# Turn each patch into a batch.\n","image_dataset = image_dataset.batch(patch_width * patch_height)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"_2sfRemRRDkV","colab_type":"text"},"source":["## Generate predictions for the image pixels\n","\n","To get predictions in each pixel, run the image dataset through the trained model using `model.predict()`. Print the first prediction to see that the output is a list of the three class probabilities for each pixel. Running all predictions might take a while."]},{"cell_type":"code","metadata":{"id":"8VGhmiP_REBP","colab_type":"code","colab":{}},"source":["# Run prediction in batches, with as many steps as there are patches.\n","predictions = model.predict(image_dataset, steps=patches, verbose=1)\n","\n","# Note that the predictions come as a numpy array. Check the first one.\n","print(predictions[0])"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"bPU2VlPOikAy","colab_type":"text"},"source":["## Write the predictions to a TFRecord file\n","\n","Now that there's a list of class probabilities in `predictions`, it's time to write them back into a file, optionally including a class label which is simply the index of the maximum probability. We'll write directly from TensorFlow to a file in the output Cloud Storage bucket.\n","\n","Iterate over the list, compute class label and write the class and the probabilities in patches. Specifically, we need to write the pixels into the file as patches in the same order they came out. The records are written as serialized `tf.train.Example` protos. This might take a while."]},{"cell_type":"code","metadata":{"id":"AkorbsEHepzJ","colab_type":"code","colab":{}},"source":["print('Writing to file ' + OUTPUT_IMAGE_FILE)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"kATMknHc0qeR","colab_type":"code","cellView":"code","colab":{}},"source":["# Instantiate the writer.\n","writer = tf.io.TFRecordWriter(OUTPUT_IMAGE_FILE)\n","\n","# Every patch-worth of predictions we'll dump an example into the output\n","# file with a single feature that holds our predictions. Since our predictions\n","# are already in the order of the exported data, the patches we create here\n","# will also be in the right order.\n","patch = [[], [], [], []]\n","cur_patch = 1\n","for prediction in predictions:\n"," patch[0].append(tf.argmax(prediction, 1))\n"," patch[1].append(prediction[0][0])\n"," patch[2].append(prediction[0][1])\n"," patch[3].append(prediction[0][2])\n"," # Once we've seen a patches-worth of class_ids...\n"," if (len(patch[0]) == patch_width * patch_height):\n"," print('Done with patch ' + str(cur_patch) + ' of ' + str(patches) + '...')\n"," # Create an example\n"," example = tf.train.Example(\n"," features=tf.train.Features(\n"," feature={\n"," 'prediction': tf.train.Feature(\n"," int64_list=tf.train.Int64List(\n"," value=patch[0])),\n"," 'bareProb': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=patch[1])),\n"," 'vegProb': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=patch[2])),\n"," 'waterProb': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=patch[3])),\n"," }\n"," )\n"," )\n"," # Write the example to the file and clear our patch array so it's ready for\n"," # another batch of class ids\n"," writer.write(example.SerializeToString())\n"," patch = [[], [], [], []]\n"," cur_patch += 1\n","\n","writer.close()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"1K_1hKs0aBdA","colab_type":"text"},"source":["# Upload the classifications to an Earth Engine asset"]},{"cell_type":"markdown","metadata":{"id":"M6sNZXWOSa82","colab_type":"text"},"source":["## Verify the existence of the predictions file\n","\n","At this stage, there should be a predictions TFRecord file sitting in the output Cloud Storage bucket. Use the `gsutil` command to verify that the predictions image (and associated mixer JSON) exist and have non-zero size."]},{"cell_type":"code","metadata":{"id":"6ZVWDPefUCgA","colab_type":"code","colab":{}},"source":["!gsutil ls -l {OUTPUT_IMAGE_FILE}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"2ZyCo297Clcx","colab_type":"text"},"source":["## Upload the classified image to Earth Engine\n","\n","Upload the image to Earth Engine directly from the Cloud Storage bucket with the [`earthengine` command](https://developers.google.com/earth-engine/command_line#upload). Provide both the image TFRecord file and the JSON file as arguments to `earthengine upload`."]},{"cell_type":"code","metadata":{"id":"NXulMNl9lTDv","colab_type":"code","cellView":"code","colab":{}},"source":["print('Uploading to ' + OUTPUT_ASSET_ID)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"V64tcVxsO5h6","colab_type":"code","colab":{}},"source":["# Start the upload.\n","!earthengine upload image --asset_id={OUTPUT_ASSET_ID} --pyramiding_policy=mode {OUTPUT_IMAGE_FILE} {json_file}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Yt4HyhUU_Bal","colab_type":"text"},"source":["## Check the status of the asset ingestion\n","\n","You can also use the Earth Engine API to check the status of your asset upload. It might take a while. The upload of the image is an asset ingestion task."]},{"cell_type":"code","metadata":{"id":"_vB-gwGhl_3C","colab_type":"code","cellView":"code","colab":{}},"source":["ee.batch.Task.list()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"vvXvy9GDhM-p","colab_type":"text"},"source":["## View the ingested asset\n","\n","Display the vector of class probabilities as an RGB image with colors corresponding to the probability of bare, vegetation, water in a pixel. Also display the winning class using the same color palette."]},{"cell_type":"code","metadata":{"id":"kEkVxIyJiFd4","colab_type":"code","colab":{}},"source":["predictions_image = ee.Image(OUTPUT_ASSET_ID)\n","\n","prediction_vis = {\n"," 'bands': 'prediction',\n"," 'min': 0,\n"," 'max': 2,\n"," 'palette': ['red', 'green', 'blue']\n","}\n","probability_vis = {'bands': ['bareProb', 'vegProb', 'waterProb'], 'max': 0.5}\n","\n","prediction_map_id = predictions_image.getMapId(prediction_vis)\n","probability_map_id = predictions_image.getMapId(probability_vis)\n","\n","map = folium.Map(location=[37.6413, -122.2582])\n","folium.TileLayer(\n"," tiles=prediction_map_id['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='prediction',\n",").add_to(map)\n","folium.TileLayer(\n"," tiles=probability_map_id['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='probability',\n",").add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]}]} \ No newline at end of file diff --git a/python/examples/ipynb/UNET_regression_demo.ipynb b/python/examples/ipynb/UNET_regression_demo.ipynb index 2f203e973..008c46805 100644 --- a/python/examples/ipynb/UNET_regression_demo.ipynb +++ b/python/examples/ipynb/UNET_regression_demo.ipynb @@ -1 +1 @@ -{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"UNET_regression_demo.ipynb","provenance":[{"file_id":"https://github.com/google/earthengine-api/blob/master/python/examples/ipynb/UNET_regression_demo.ipynb","timestamp":1586992475463}],"private_outputs":true,"collapsed_sections":[],"toc_visible":true,"machine_shape":"hm"},"kernelspec":{"name":"python3","display_name":"Python 3"},"accelerator":"GPU"},"cells":[{"cell_type":"code","metadata":{"id":"esIMGVxhDI0f","colab_type":"code","colab":{}},"source":["#@title Copyright 2020 Google LLC. { display-mode: \"form\" }\n","# Licensed under the Apache License, Version 2.0 (the \"License\");\n","# you may not use this file except in compliance with the License.\n","# You may obtain a copy of the License at\n","#\n","# https://www.apache.org/licenses/LICENSE-2.0\n","#\n","# Unless required by applicable law or agreed to in writing, software\n","# distributed under the License is distributed on an \"AS IS\" BASIS,\n","# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","# See the License for the specific language governing permissions and\n","# limitations under the License."],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"aV1xZ1CPi3Nw","colab_type":"text"},"source":["
\n","\n"," Run in Google Colab\n","\n"," View source on GitHub
"]},{"cell_type":"markdown","metadata":{"id":"_SHAc5qbiR8l","colab_type":"text"},"source":["# Introduction\n","\n","This is an Earth Engine <> TensorFlow demonstration notebook. Suppose you want to predict a continuous output (regression) from a stack of continuous inputs. In this example, the output is impervious surface area from [NLCD](https://www.mrlc.gov/data) and the input is a Landsat 8 composite. The model is a [fully convolutional neural network (FCNN)](https://www.cv-foundation.org/openaccess/content_cvpr_2015/papers/Long_Fully_Convolutional_Networks_2015_CVPR_paper.pdf), specifically [U-net](https://arxiv.org/abs/1505.04597). This notebook shows:\n","\n","1. Exporting training/testing patches from Earth Engine, suitable for training an FCNN model.\n","2. Preprocessing.\n","3. Training and validating an FCNN model.\n","4. Making predictions with the trained model and importing them to Earth Engine."]},{"cell_type":"markdown","metadata":{"id":"_MJ4kW1pEhwP","colab_type":"text"},"source":["# Setup software libraries\n","\n","Authenticate and import as necessary."]},{"cell_type":"code","metadata":{"id":"neIa46CpciXq","colab_type":"code","colab":{}},"source":["# Cloud authentication.\n","from google.colab import auth\n","auth.authenticate_user()"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"jat01FEoUMqg","colab_type":"code","colab":{}},"source":["# Import, authenticate and initialize the Earth Engine library.\n","import ee\n","ee.Authenticate()\n","ee.Initialize()"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"8RnZzcYhcpsQ","colab_type":"code","colab":{}},"source":["# Tensorflow setup.\n","import tensorflow as tf\n","print(tf.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"n1hFdpBQfyhN","colab_type":"code","colab":{}},"source":["# Folium setup.\n","import folium\n","print(folium.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"iT8ycmzClYwf","colab_type":"text"},"source":["# Variables\n","\n","Declare the variables that will be in use throughout the notebook."]},{"cell_type":"markdown","metadata":{"id":"qKs6HuxOzjMl","colab_type":"text"},"source":["## Specify your Cloud Storage Bucket\n","You must have write access to a bucket to run this demo! To run it read-only, use the demo bucket below, but note that writes to this bucket will not work."]},{"cell_type":"code","metadata":{"id":"obDDH1eDzsch","colab_type":"code","colab":{}},"source":["# INSERT YOUR BUCKET HERE:\n","BUCKET = 'your-bucket-name'"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"wmfKLl9XcnGJ","colab_type":"text"},"source":["## Set other global variables"]},{"cell_type":"code","metadata":{"id":"psz7wJKalaoj","colab_type":"code","colab":{}},"source":["# Specify names locations for outputs in Cloud Storage. \n","FOLDER = 'fcnn-demo'\n","TRAINING_BASE = 'training_patches'\n","EVAL_BASE = 'eval_patches'\n","\n","# Specify inputs (Landsat bands) to the model and the response variable.\n","opticalBands = ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7']\n","thermalBands = ['B10', 'B11']\n","BANDS = opticalBands + thermalBands\n","RESPONSE = 'impervious'\n","FEATURES = BANDS + [RESPONSE]\n","\n","# Specify the size and shape of patches expected by the model.\n","KERNEL_SIZE = 256\n","KERNEL_SHAPE = [KERNEL_SIZE, KERNEL_SIZE]\n","COLUMNS = [\n"," tf.io.FixedLenFeature(shape=KERNEL_SHAPE, dtype=tf.float32) for k in FEATURES\n","]\n","FEATURES_DICT = dict(zip(FEATURES, COLUMNS))\n","\n","# Sizes of the training and evaluation datasets.\n","TRAIN_SIZE = 16000\n","EVAL_SIZE = 8000\n","\n","# Specify model training parameters.\n","BATCH_SIZE = 16\n","EPOCHS = 10\n","BUFFER_SIZE = 2000\n","OPTIMIZER = 'SGD'\n","LOSS = 'MeanSquaredError'\n","METRICS = ['RootMeanSquaredError']"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"hgoDc7Hilfc4","colab_type":"text"},"source":["# Imagery\n","\n","Gather and setup the imagery to use for inputs (predictors). This is a three-year, cloud-free, Landsat 8 composite. Display it in the notebook for a sanity check."]},{"cell_type":"code","metadata":{"id":"-IlgXu-vcUEY","colab_type":"code","colab":{}},"source":["# Use Landsat 8 surface reflectance data.\n","l8sr = ee.ImageCollection('LANDSAT/LC08/C01/T1_SR')\n","\n","# Cloud masking function.\n","def maskL8sr(image):\n"," cloudShadowBitMask = ee.Number(2).pow(3).int()\n"," cloudsBitMask = ee.Number(2).pow(5).int()\n"," qa = image.select('pixel_qa')\n"," mask1 = qa.bitwiseAnd(cloudShadowBitMask).eq(0).And(\n"," qa.bitwiseAnd(cloudsBitMask).eq(0))\n"," mask2 = image.mask().reduce('min')\n"," mask3 = image.select(opticalBands).gt(0).And(\n"," image.select(opticalBands).lt(10000)).reduce('min')\n"," mask = mask1.And(mask2).And(mask3)\n"," return image.select(opticalBands).divide(10000).addBands(\n"," image.select(thermalBands).divide(10).clamp(273.15, 373.15)\n"," .subtract(273.15).divide(100)).updateMask(mask)\n","\n","# The image input data is a cloud-masked median composite.\n","image = l8sr.filterDate('2015-01-01', '2017-12-31').map(maskL8sr).median()\n","\n","# Use folium to visualize the imagery.\n","mapid = image.getMapId({'bands': ['B4', 'B3', 'B2'], 'min': 0, 'max': 0.3})\n","map = folium.Map(location=[38., -122.5])\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='median composite',\n"," ).add_to(map)\n","\n","mapid = image.getMapId({'bands': ['B10'], 'min': 0, 'max': 0.5})\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='thermal',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"gHznnctkJsZJ","colab_type":"text"},"source":["Prepare the response (what we want to predict). This is impervious surface area (in fraction of a pixel) from the 2016 NLCD dataset. Display to check."]},{"cell_type":"code","metadata":{"id":"e0wHDyxVirec","colab_type":"code","colab":{}},"source":["nlcd = ee.Image('USGS/NLCD/NLCD2016').select('impervious')\n","nlcd = nlcd.divide(100).float()\n","\n","mapid = nlcd.getMapId({'min': 0, 'max': 1})\n","map = folium.Map(location=[38., -122.5])\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='nlcd impervious',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"CTS7_ZzPDhhg","colab_type":"text"},"source":["Stack the 2D images (Landsat composite and NLCD impervious surface) to create a single image from which samples can be taken. Convert the image into an array image in which each pixel stores 256x256 patches of pixels for each band. This is a key step that bears emphasis: to export training patches, convert a multi-band image to [an array image](https://developers.google.com/earth-engine/arrays_array_images#array-images) using [`neighborhoodToArray()`](https://developers.google.com/earth-engine/api_docs#eeimageneighborhoodtoarray), then sample the image at points."]},{"cell_type":"code","metadata":{"id":"eGHYsdAOipa4","colab_type":"code","colab":{}},"source":["featureStack = ee.Image.cat([\n"," image.select(BANDS),\n"," nlcd.select(RESPONSE)\n","]).float()\n","\n","list = ee.List.repeat(1, KERNEL_SIZE)\n","lists = ee.List.repeat(list, KERNEL_SIZE)\n","kernel = ee.Kernel.fixed(KERNEL_SIZE, KERNEL_SIZE, lists)\n","\n","arrays = featureStack.neighborhoodToArray(kernel)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"F4djSxBRG2el","colab_type":"text"},"source":["Use some pre-made geometries to sample the stack in strategic locations. Specifically, these are hand-made polygons in which to take the 256x256 samples. Display the sampling polygons on a map, red for training polygons, blue for evaluation."]},{"cell_type":"code","metadata":{"id":"ure_WaD0itQY","colab_type":"code","colab":{}},"source":["trainingPolys = ee.FeatureCollection('projects/google/DemoTrainingGeometries')\n","evalPolys = ee.FeatureCollection('projects/google/DemoEvalGeometries')\n","\n","polyImage = ee.Image(0).byte().paint(trainingPolys, 1).paint(evalPolys, 2)\n","polyImage = polyImage.updateMask(polyImage)\n","\n","mapid = polyImage.getMapId({'min': 1, 'max': 2, 'palette': ['red', 'blue']})\n","map = folium.Map(location=[38., -100.], zoom_start=5)\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='training polygons',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ZV890gPHeZqz","colab_type":"text"},"source":["# Sampling\n","\n","The mapped data look reasonable so take a sample from each polygon and merge the results into a single export. The key step is sampling the array image at points, to get all the pixels in a 256x256 neighborhood at each point. It's worth noting that to build the training and testing data for the FCNN, you export a single TFRecord file that contains patches of pixel values in each record. You do NOT need to export each training/testing patch to a different image. Since each record potentially contains a lot of data (especially with big patches or many input bands), some manual sharding of the computation is necessary to avoid the `computed value too large` error. Specifically, the following code takes multiple (smaller) samples within each geometry, merging the results to get a single export."]},{"cell_type":"code","metadata":{"id":"FyRpvwENxE-A","colab_type":"code","cellView":"both","colab":{}},"source":["# Convert the feature collections to lists for iteration.\n","trainingPolysList = trainingPolys.toList(trainingPolys.size())\n","evalPolysList = evalPolys.toList(evalPolys.size())\n","\n","# These numbers determined experimentally.\n","n = 200 # Number of shards in each polygon.\n","N = 2000 # Total sample size in each polygon.\n","\n","# Export all the training data (in many pieces), with one task \n","# per geometry.\n","for g in range(trainingPolys.size().getInfo()):\n"," geomSample = ee.FeatureCollection([])\n"," for i in range(n):\n"," sample = arrays.sample(\n"," region = ee.Feature(trainingPolysList.get(g)).geometry(), \n"," scale = 30,\n"," numPixels = N / n, # Size of the shard.\n"," seed = i,\n"," tileScale = 8\n"," )\n"," geomSample = geomSample.merge(sample)\n","\n"," desc = TRAINING_BASE + '_g' + str(g)\n"," task = ee.batch.Export.table.toCloudStorage(\n"," collection = geomSample,\n"," description = desc,\n"," bucket = BUCKET,\n"," fileNamePrefix = FOLDER + '/' + desc,\n"," fileFormat = 'TFRecord',\n"," selectors = BANDS + [RESPONSE]\n"," )\n"," task.start()\n","\n","# Export all the evaluation data.\n","for g in range(evalPolys.size().getInfo()):\n"," geomSample = ee.FeatureCollection([])\n"," for i in range(n):\n"," sample = arrays.sample(\n"," region = ee.Feature(evalPolysList.get(g)).geometry(), \n"," scale = 30,\n"," numPixels = N / n,\n"," seed = i,\n"," tileScale = 8\n"," )\n"," geomSample = geomSample.merge(sample)\n","\n"," desc = EVAL_BASE + '_g' + str(g)\n"," task = ee.batch.Export.table.toCloudStorage(\n"," collection = geomSample,\n"," description = desc,\n"," bucket = BUCKET,\n"," fileNamePrefix = FOLDER + '/' + desc,\n"," fileFormat = 'TFRecord',\n"," selectors = BANDS + [RESPONSE]\n"," )\n"," task.start()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"rWXrvBE4607G","colab_type":"text"},"source":["# Training data\n","\n","Load the data exported from Earth Engine into a `tf.data.Dataset`. The following are helper functions for that."]},{"cell_type":"code","metadata":{"id":"WWZ0UXCVMyJP","colab_type":"code","colab":{}},"source":["def parse_tfrecord(example_proto):\n"," \"\"\"The parsing function.\n"," Read a serialized example into the structure defined by FEATURES_DICT.\n"," Args:\n"," example_proto: a serialized Example.\n"," Returns:\n"," A dictionary of tensors, keyed by feature name.\n"," \"\"\"\n"," return tf.io.parse_single_example(example_proto, FEATURES_DICT)\n","\n","\n","def to_tuple(inputs):\n"," \"\"\"Function to convert a dictionary of tensors to a tuple of (inputs, outputs).\n"," Turn the tensors returned by parse_tfrecord into a stack in HWC shape.\n"," Args:\n"," inputs: A dictionary of tensors, keyed by feature name.\n"," Returns:\n"," A tuple of (inputs, outputs).\n"," \"\"\"\n"," inputsList = [inputs.get(key) for key in FEATURES]\n"," stacked = tf.stack(inputsList, axis=0)\n"," # Convert from CHW to HWC\n"," stacked = tf.transpose(stacked, [1, 2, 0])\n"," return stacked[:,:,:len(BANDS)], stacked[:,:,len(BANDS):]\n","\n","\n","def get_dataset(pattern):\n"," \"\"\"Function to read, parse and format to tuple a set of input tfrecord files.\n"," Get all the files matching the pattern, parse and convert to tuple.\n"," Args:\n"," pattern: A file pattern to match in a Cloud Storage bucket.\n"," Returns:\n"," A tf.data.Dataset\n"," \"\"\"\n"," glob = tf.io.gfile.glob(pattern)\n"," dataset = tf.data.TFRecordDataset(glob, compression_type='GZIP')\n"," dataset = dataset.map(parse_tfrecord, num_parallel_calls=5)\n"," dataset = dataset.map(to_tuple, num_parallel_calls=5)\n"," return dataset"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Xg1fa18336D2","colab_type":"text"},"source":["Use the helpers to read in the training dataset. Print the first record to check."]},{"cell_type":"code","metadata":{"id":"rm0qRF0fAYcC","colab_type":"code","colab":{}},"source":["def get_training_dataset():\n","\t\"\"\"Get the preprocessed training dataset\n"," Returns: \n"," A tf.data.Dataset of training data.\n"," \"\"\"\n","\tglob = 'gs://' + BUCKET + '/' + FOLDER + '/' + TRAINING_BASE + '*'\n","\tdataset = get_dataset(glob)\n","\tdataset = dataset.shuffle(BUFFER_SIZE).batch(BATCH_SIZE).repeat()\n","\treturn dataset\n","\n","training = get_training_dataset()\n","\n","print(iter(training.take(1)).next())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"j-cQO5RL6vob","colab_type":"text"},"source":["# Evaluation data\n","\n","Now do the same thing to get an evaluation dataset. Note that unlike the training dataset, the evaluation dataset has a batch size of 1, is not repeated and is not shuffled."]},{"cell_type":"code","metadata":{"id":"ieKTCGiJ6xzo","colab_type":"code","colab":{}},"source":["def get_eval_dataset():\n","\t\"\"\"Get the preprocessed evaluation dataset\n"," Returns: \n"," A tf.data.Dataset of evaluation data.\n"," \"\"\"\n","\tglob = 'gs://' + BUCKET + '/' + FOLDER + '/' + EVAL_BASE + '*'\n","\tdataset = get_dataset(glob)\n","\tdataset = dataset.batch(1).repeat()\n","\treturn dataset\n","\n","evaluation = get_eval_dataset()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"9JIE7Yl87lgU","colab_type":"text"},"source":["# Model\n","\n","Here we use the Keras implementation of the U-Net model. The U-Net model takes 256x256 pixel patches as input and outputs per-pixel class probability, label or a continuous output. We can implement the model essentially unmodified, but will use mean squared error loss on the sigmoidal output since we are treating this as a regression problem, rather than a classification problem. Since impervious surface fraction is constrained to [0,1], with many values close to zero or one, a saturating activation function is suitable here."]},{"cell_type":"code","metadata":{"id":"wsnnnz56yS3l","colab_type":"code","colab":{}},"source":["from tensorflow.python.keras import layers\n","from tensorflow.python.keras import losses\n","from tensorflow.python.keras import models\n","from tensorflow.python.keras import metrics\n","from tensorflow.python.keras import optimizers\n","\n","def conv_block(input_tensor, num_filters):\n","\tencoder = layers.Conv2D(num_filters, (3, 3), padding='same')(input_tensor)\n","\tencoder = layers.BatchNormalization()(encoder)\n","\tencoder = layers.Activation('relu')(encoder)\n","\tencoder = layers.Conv2D(num_filters, (3, 3), padding='same')(encoder)\n","\tencoder = layers.BatchNormalization()(encoder)\n","\tencoder = layers.Activation('relu')(encoder)\n","\treturn encoder\n","\n","def encoder_block(input_tensor, num_filters):\n","\tencoder = conv_block(input_tensor, num_filters)\n","\tencoder_pool = layers.MaxPooling2D((2, 2), strides=(2, 2))(encoder)\n","\treturn encoder_pool, encoder\n","\n","def decoder_block(input_tensor, concat_tensor, num_filters):\n","\tdecoder = layers.Conv2DTranspose(num_filters, (2, 2), strides=(2, 2), padding='same')(input_tensor)\n","\tdecoder = layers.concatenate([concat_tensor, decoder], axis=-1)\n","\tdecoder = layers.BatchNormalization()(decoder)\n","\tdecoder = layers.Activation('relu')(decoder)\n","\tdecoder = layers.Conv2D(num_filters, (3, 3), padding='same')(decoder)\n","\tdecoder = layers.BatchNormalization()(decoder)\n","\tdecoder = layers.Activation('relu')(decoder)\n","\tdecoder = layers.Conv2D(num_filters, (3, 3), padding='same')(decoder)\n","\tdecoder = layers.BatchNormalization()(decoder)\n","\tdecoder = layers.Activation('relu')(decoder)\n","\treturn decoder\n","\n","def get_model():\n","\tinputs = layers.Input(shape=[None, None, len(BANDS)]) # 256\n","\tencoder0_pool, encoder0 = encoder_block(inputs, 32) # 128\n","\tencoder1_pool, encoder1 = encoder_block(encoder0_pool, 64) # 64\n","\tencoder2_pool, encoder2 = encoder_block(encoder1_pool, 128) # 32\n","\tencoder3_pool, encoder3 = encoder_block(encoder2_pool, 256) # 16\n","\tencoder4_pool, encoder4 = encoder_block(encoder3_pool, 512) # 8\n","\tcenter = conv_block(encoder4_pool, 1024) # center\n","\tdecoder4 = decoder_block(center, encoder4, 512) # 16\n","\tdecoder3 = decoder_block(decoder4, encoder3, 256) # 32\n","\tdecoder2 = decoder_block(decoder3, encoder2, 128) # 64\n","\tdecoder1 = decoder_block(decoder2, encoder1, 64) # 128\n","\tdecoder0 = decoder_block(decoder1, encoder0, 32) # 256\n","\toutputs = layers.Conv2D(1, (1, 1), activation='sigmoid')(decoder0)\n","\n","\tmodel = models.Model(inputs=[inputs], outputs=[outputs])\n","\n","\tmodel.compile(\n","\t\toptimizer=optimizers.get(OPTIMIZER), \n","\t\tloss=losses.get(LOSS),\n","\t\tmetrics=[metrics.get(metric) for metric in METRICS])\n","\n","\treturn model"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"uu_E7OTDBCoS","colab_type":"text"},"source":["# Training the model\n","\n","You train a Keras model by calling `.fit()` on it. Here we're going to train for 10 epochs, which is suitable for demonstration purposes. For production use, you probably want to optimize this parameter, for example through [hyperparamter tuning](https://cloud.google.com/ml-engine/docs/tensorflow/using-hyperparameter-tuning)."]},{"cell_type":"code","metadata":{"id":"NzzaWxOhSxBy","colab_type":"code","colab":{}},"source":["m = get_model()\n","\n","m.fit(\n"," x=training, \n"," epochs=EPOCHS, \n"," steps_per_epoch=int(TRAIN_SIZE / BATCH_SIZE), \n"," validation_data=evaluation,\n"," validation_steps=EVAL_SIZE)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"U2XrwZHp66j4","colab_type":"text"},"source":["Note that the notebook VM is sometimes not heavy-duty enough to get through a whole training job, especially if you have a large buffer size or a large number of epochs. You can still use this notebook for training, but may need to set up an alternative VM ([learn more](https://research.google.com/colaboratory/local-runtimes.html)) for production use. Alternatively, you can package your code for running large training jobs on Google's AI Platform [as described here](https://cloud.google.com/ml-engine/docs/tensorflow/trainer-considerations). The following code loads a pre-trained model, which you can use for predictions right away."]},{"cell_type":"code","metadata":{"id":"-RJpNfEUS1qp","colab_type":"code","colab":{}},"source":["# Load a trained model. 50 epochs. 25 hours. Final RMSE ~0.08.\n","MODEL_DIR = 'gs://ee-docs-demos/fcnn-demo/trainer/model'\n","m = tf.keras.models.load_model(MODEL_DIR)\n","m.summary()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"J1ySNup0xCqN","colab_type":"text"},"source":["# Prediction\n","\n","The prediction pipeline is:\n","\n","1. Export imagery on which to do predictions from Earth Engine in TFRecord format to a Cloud Storge bucket.\n","2. Use the trained model to make the predictions.\n","3. Write the predictions to a TFRecord file in a Cloud Storage.\n","4. Upload the predictions TFRecord file to Earth Engine.\n","\n","The following functions handle this process. It's useful to separate the export from the predictions so that you can experiment with different models without running the export every time."]},{"cell_type":"code","metadata":{"id":"M3WDAa-RUpXP","colab_type":"code","colab":{}},"source":["def doExport(out_image_base, kernel_buffer, region):\n"," \"\"\"Run the image export task. Block until complete.\n"," \"\"\"\n"," task = ee.batch.Export.image.toCloudStorage(\n"," image = image.select(BANDS),\n"," description = out_image_base,\n"," bucket = BUCKET,\n"," fileNamePrefix = FOLDER + '/' + out_image_base,\n"," region = region.getInfo()['coordinates'],\n"," scale = 30,\n"," fileFormat = 'TFRecord',\n"," maxPixels = 1e10,\n"," formatOptions = {\n"," 'patchDimensions': KERNEL_SHAPE,\n"," 'kernelSize': kernel_buffer,\n"," 'compressed': True,\n"," 'maxFileSize': 104857600\n"," }\n"," )\n"," task.start()\n","\n"," # Block until the task completes.\n"," print('Running image export to Cloud Storage...')\n"," import time\n"," while task.active():\n"," time.sleep(30)\n","\n"," # Error condition\n"," if task.status()['state'] != 'COMPLETED':\n"," print('Error with image export.')\n"," else:\n"," print('Image export completed.')"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"zb_9_FflygVw","colab_type":"code","colab":{}},"source":["def doPrediction(out_image_base, user_folder, kernel_buffer, region):\n"," \"\"\"Perform inference on exported imagery, upload to Earth Engine.\n"," \"\"\"\n","\n"," print('Looking for TFRecord files...')\n","\n"," # Get a list of all the files in the output bucket.\n"," filesList = !gsutil ls 'gs://'{BUCKET}'/'{FOLDER}\n","\n"," # Get only the files generated by the image export.\n"," exportFilesList = [s for s in filesList if out_image_base in s]\n","\n"," # Get the list of image files and the JSON mixer file.\n"," imageFilesList = []\n"," jsonFile = None\n"," for f in exportFilesList:\n"," if f.endswith('.tfrecord.gz'):\n"," imageFilesList.append(f)\n"," elif f.endswith('.json'):\n"," jsonFile = f\n","\n"," # Make sure the files are in the right order.\n"," imageFilesList.sort()\n","\n"," from pprint import pprint\n"," pprint(imageFilesList)\n"," print(jsonFile)\n","\n"," import json\n"," # Load the contents of the mixer file to a JSON object.\n"," jsonText = !gsutil cat {jsonFile}\n"," # Get a single string w/ newlines from the IPython.utils.text.SList\n"," mixer = json.loads(jsonText.nlstr)\n"," pprint(mixer)\n"," patches = mixer['totalPatches']\n","\n"," # Get set up for prediction.\n"," x_buffer = int(kernel_buffer[0] / 2)\n"," y_buffer = int(kernel_buffer[1] / 2)\n","\n"," buffered_shape = [\n"," KERNEL_SHAPE[0] + kernel_buffer[0],\n"," KERNEL_SHAPE[1] + kernel_buffer[1]]\n","\n"," imageColumns = [\n"," tf.io.FixedLenFeature(shape=buffered_shape, dtype=tf.float32) \n"," for k in BANDS\n"," ]\n","\n"," imageFeaturesDict = dict(zip(BANDS, imageColumns))\n","\n"," def parse_image(example_proto):\n"," return tf.io.parse_single_example(example_proto, imageFeaturesDict)\n","\n"," def toTupleImage(inputs):\n"," inputsList = [inputs.get(key) for key in BANDS]\n"," stacked = tf.stack(inputsList, axis=0)\n"," stacked = tf.transpose(stacked, [1, 2, 0])\n"," return stacked\n","\n"," # Create a dataset from the TFRecord file(s) in Cloud Storage.\n"," imageDataset = tf.data.TFRecordDataset(imageFilesList, compression_type='GZIP')\n"," imageDataset = imageDataset.map(parse_image, num_parallel_calls=5)\n"," imageDataset = imageDataset.map(toTupleImage).batch(1)\n","\n"," # Perform inference.\n"," print('Running predictions...')\n"," predictions = m.predict(imageDataset, steps=patches, verbose=1)\n"," # print(predictions[0])\n","\n"," print('Writing predictions...')\n"," out_image_file = 'gs://' + BUCKET + '/' + FOLDER + '/' + out_image_base + '.TFRecord'\n"," writer = tf.io.TFRecordWriter(out_image_file)\n"," patches = 0\n"," for predictionPatch in predictions:\n"," print('Writing patch ' + str(patches) + '...')\n"," predictionPatch = predictionPatch[\n"," x_buffer:x_buffer+KERNEL_SIZE, y_buffer:y_buffer+KERNEL_SIZE]\n","\n"," # Create an example.\n"," example = tf.train.Example(\n"," features=tf.train.Features(\n"," feature={\n"," 'impervious': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=predictionPatch.flatten()))\n"," }\n"," )\n"," )\n"," # Write the example.\n"," writer.write(example.SerializeToString())\n"," patches += 1\n","\n"," writer.close()\n","\n"," # Start the upload.\n"," out_image_asset = user_folder + '/' + out_image_base\n"," !earthengine upload image --asset_id={out_image_asset} {out_image_file} {jsonFile}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"LZqlymOehnQO","colab_type":"text"},"source":["Now there's all the code needed to run the prediction pipeline, all that remains is to specify the output region in which to do the prediction, the names of the output files, where to put them, and the shape of the outputs. In terms of the shape, the model is trained on 256x256 patches, but can work (in theory) on any patch that's big enough with even dimensions ([reference](https://www.cv-foundation.org/openaccess/content_cvpr_2015/papers/Long_Fully_Convolutional_Networks_2015_CVPR_paper.pdf)). Because of tile boundary artifacts, give the model slightly larger patches for prediction, then clip out the middle 256x256 patch. This is controlled with a kernel buffer, half the size of which will extend beyond the kernel buffer. For example, specifying a 128x128 kernel will append 64 pixels on each side of the patch, to ensure that the pixels in the output are taken from inputs completely covered by the kernel."]},{"cell_type":"code","metadata":{"id":"FPANwc7B1-TS","colab_type":"code","colab":{}},"source":["# Output assets folder: YOUR FOLDER\n","user_folder = 'users/username' # INSERT YOUR FOLDER HERE.\n","\n","# Base file name to use for TFRecord files and assets.\n","bj_image_base = 'FCNN_demo_beijing_384_'\n","# Half this will extend on the sides of each patch.\n","bj_kernel_buffer = [128, 128]\n","# Beijing\n","bj_region = ee.Geometry.Polygon(\n"," [[[115.9662455210937, 40.121362012835235],\n"," [115.9662455210937, 39.64293313749715],\n"," [117.01818643906245, 39.64293313749715],\n"," [117.01818643906245, 40.121362012835235]]], None, False)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"lLNEOLkXWvSi","colab_type":"code","colab":{}},"source":["# Run the export.\n","doExport(bj_image_base, bj_kernel_buffer, bj_region)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"KxACnxKFrQ_J","colab_type":"code","colab":{}},"source":["# Run the prediction.\n","doPrediction(bj_image_base, user_folder, bj_kernel_buffer, bj_region)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"uj_G9OZ1xH6K","colab_type":"text"},"source":["# Display the output\n","\n","One the data has been exported, the model has made predictions and the predictions have been written to a file, and the image imported to Earth Engine, it's possible to display the resultant Earth Engine asset. Here, display the impervious area predictions over Beijing, China."]},{"cell_type":"code","metadata":{"id":"Jgco6HJ4R5p2","colab_type":"code","colab":{}},"source":["out_image = ee.Image(user_folder + '/' + bj_image_base)\n","mapid = out_image.getMapId({'min': 0, 'max': 1})\n","map = folium.Map(location=[39.898, 116.5097])\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='predicted impervious',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]}]} \ No newline at end of file +{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"UNET_regression_demo.ipynb","provenance":[{"file_id":"https://github.com/google/earthengine-api/blob/master/python/examples/ipynb/UNET_regression_demo.ipynb","timestamp":1586992475463}],"private_outputs":true,"collapsed_sections":[],"toc_visible":true,"machine_shape":"hm"},"kernelspec":{"name":"python3","display_name":"Python 3"},"accelerator":"GPU"},"cells":[{"cell_type":"code","metadata":{"id":"esIMGVxhDI0f","colab_type":"code","colab":{}},"source":["#@title Copyright 2020 Google LLC. { display-mode: \"form\" }\n","# Licensed under the Apache License, Version 2.0 (the \"License\");\n","# you may not use this file except in compliance with the License.\n","# You may obtain a copy of the License at\n","#\n","# https://www.apache.org/licenses/LICENSE-2.0\n","#\n","# Unless required by applicable law or agreed to in writing, software\n","# distributed under the License is distributed on an \"AS IS\" BASIS,\n","# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","# See the License for the specific language governing permissions and\n","# limitations under the License."],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"aV1xZ1CPi3Nw","colab_type":"text"},"source":["
\n","\n"," Run in Google Colab\n","\n"," View source on GitHub
"]},{"cell_type":"markdown","metadata":{"id":"_SHAc5qbiR8l","colab_type":"text"},"source":["# Introduction\n","\n","This is an Earth Engine <> TensorFlow demonstration notebook. Suppose you want to predict a continuous output (regression) from a stack of continuous inputs. In this example, the output is impervious surface area from [NLCD](https://www.mrlc.gov/data) and the input is a Landsat 8 composite. The model is a [fully convolutional neural network (FCNN)](https://www.cv-foundation.org/openaccess/content_cvpr_2015/papers/Long_Fully_Convolutional_Networks_2015_CVPR_paper.pdf), specifically [U-net](https://arxiv.org/abs/1505.04597). This notebook shows:\n","\n","1. Exporting training/testing patches from Earth Engine, suitable for training an FCNN model.\n","2. Preprocessing.\n","3. Training and validating an FCNN model.\n","4. Making predictions with the trained model and importing them to Earth Engine."]},{"cell_type":"markdown","metadata":{"id":"_MJ4kW1pEhwP","colab_type":"text"},"source":["# Setup software libraries\n","\n","Authenticate and import as necessary."]},{"cell_type":"code","metadata":{"id":"neIa46CpciXq","colab_type":"code","colab":{}},"source":["# Cloud authentication.\n","from google.colab import auth\n","auth.authenticate_user()"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"jat01FEoUMqg","colab_type":"code","colab":{}},"source":["# Import, authenticate and initialize the Earth Engine library.\n","import ee\n","ee.Authenticate()\n","ee.Initialize()"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"8RnZzcYhcpsQ","colab_type":"code","colab":{}},"source":["# Tensorflow setup.\n","import tensorflow as tf\n","print(tf.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"n1hFdpBQfyhN","colab_type":"code","colab":{}},"source":["# Folium setup.\n","import folium\n","print(folium.__version__)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"iT8ycmzClYwf","colab_type":"text"},"source":["# Variables\n","\n","Declare the variables that will be in use throughout the notebook."]},{"cell_type":"markdown","metadata":{"id":"qKs6HuxOzjMl","colab_type":"text"},"source":["## Specify your Cloud Storage Bucket\n","You must have write access to a bucket to run this demo! To run it read-only, use the demo bucket below, but note that writes to this bucket will not work."]},{"cell_type":"code","metadata":{"id":"obDDH1eDzsch","colab_type":"code","colab":{}},"source":["# INSERT YOUR BUCKET HERE:\n","BUCKET = 'your-bucket-name'"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"wmfKLl9XcnGJ","colab_type":"text"},"source":["## Set other global variables"]},{"cell_type":"code","metadata":{"id":"psz7wJKalaoj","colab_type":"code","colab":{}},"source":["# Specify names locations for outputs in Cloud Storage. \n","FOLDER = 'fcnn-demo'\n","TRAINING_BASE = 'training_patches'\n","EVAL_BASE = 'eval_patches'\n","\n","# Specify inputs (Landsat bands) to the model and the response variable.\n","opticalBands = ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7']\n","thermalBands = ['B10', 'B11']\n","BANDS = opticalBands + thermalBands\n","RESPONSE = 'impervious'\n","FEATURES = BANDS + [RESPONSE]\n","\n","# Specify the size and shape of patches expected by the model.\n","KERNEL_SIZE = 256\n","KERNEL_SHAPE = [KERNEL_SIZE, KERNEL_SIZE]\n","COLUMNS = [\n"," tf.io.FixedLenFeature(shape=KERNEL_SHAPE, dtype=tf.float32) for k in FEATURES\n","]\n","FEATURES_DICT = dict(zip(FEATURES, COLUMNS))\n","\n","# Sizes of the training and evaluation datasets.\n","TRAIN_SIZE = 16000\n","EVAL_SIZE = 8000\n","\n","# Specify model training parameters.\n","BATCH_SIZE = 16\n","EPOCHS = 10\n","BUFFER_SIZE = 2000\n","OPTIMIZER = 'SGD'\n","LOSS = 'MeanSquaredError'\n","METRICS = ['RootMeanSquaredError']"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"hgoDc7Hilfc4","colab_type":"text"},"source":["# Imagery\n","\n","Gather and setup the imagery to use for inputs (predictors). This is a three-year, cloud-free, Landsat 8 composite. Display it in the notebook for a sanity check."]},{"cell_type":"code","metadata":{"id":"-IlgXu-vcUEY","colab_type":"code","colab":{}},"source":["# Use Landsat 8 surface reflectance data.\n","l8sr = ee.ImageCollection('LANDSAT/LC08/C01/T1_SR')\n","\n","# Cloud masking function.\n","def maskL8sr(image):\n"," cloudShadowBitMask = ee.Number(2).pow(3).int()\n"," cloudsBitMask = ee.Number(2).pow(5).int()\n"," qa = image.select('pixel_qa')\n"," mask1 = qa.bitwiseAnd(cloudShadowBitMask).eq(0).And(\n"," qa.bitwiseAnd(cloudsBitMask).eq(0))\n"," mask2 = image.mask().reduce('min')\n"," mask3 = image.select(opticalBands).gt(0).And(\n"," image.select(opticalBands).lt(10000)).reduce('min')\n"," mask = mask1.And(mask2).And(mask3)\n"," return image.select(opticalBands).divide(10000).addBands(\n"," image.select(thermalBands).divide(10).clamp(273.15, 373.15)\n"," .subtract(273.15).divide(100)).updateMask(mask)\n","\n","# The image input data is a cloud-masked median composite.\n","image = l8sr.filterDate('2015-01-01', '2017-12-31').map(maskL8sr).median()\n","\n","# Use folium to visualize the imagery.\n","mapid = image.getMapId({'bands': ['B4', 'B3', 'B2'], 'min': 0, 'max': 0.3})\n","map = folium.Map(location=[38., -122.5])\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='median composite',\n"," ).add_to(map)\n","\n","mapid = image.getMapId({'bands': ['B10'], 'min': 0, 'max': 0.5})\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='thermal',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"gHznnctkJsZJ","colab_type":"text"},"source":["Prepare the response (what we want to predict). This is impervious surface area (in fraction of a pixel) from the 2016 NLCD dataset. Display to check."]},{"cell_type":"code","metadata":{"id":"e0wHDyxVirec","colab_type":"code","colab":{}},"source":["nlcd = ee.Image('USGS/NLCD/NLCD2016').select('impervious')\n","nlcd = nlcd.divide(100).float()\n","\n","mapid = nlcd.getMapId({'min': 0, 'max': 1})\n","map = folium.Map(location=[38., -122.5])\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='nlcd impervious',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"CTS7_ZzPDhhg","colab_type":"text"},"source":["Stack the 2D images (Landsat composite and NLCD impervious surface) to create a single image from which samples can be taken. Convert the image into an array image in which each pixel stores 256x256 patches of pixels for each band. This is a key step that bears emphasis: to export training patches, convert a multi-band image to [an array image](https://developers.google.com/earth-engine/arrays_array_images#array-images) using [`neighborhoodToArray()`](https://developers.google.com/earth-engine/api_docs#eeimageneighborhoodtoarray), then sample the image at points."]},{"cell_type":"code","metadata":{"id":"eGHYsdAOipa4","colab_type":"code","colab":{}},"source":["featureStack = ee.Image.cat([\n"," image.select(BANDS),\n"," nlcd.select(RESPONSE)\n","]).float()\n","\n","list = ee.List.repeat(1, KERNEL_SIZE)\n","lists = ee.List.repeat(list, KERNEL_SIZE)\n","kernel = ee.Kernel.fixed(KERNEL_SIZE, KERNEL_SIZE, lists)\n","\n","arrays = featureStack.neighborhoodToArray(kernel)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"F4djSxBRG2el","colab_type":"text"},"source":["Use some pre-made geometries to sample the stack in strategic locations. Specifically, these are hand-made polygons in which to take the 256x256 samples. Display the sampling polygons on a map, red for training polygons, blue for evaluation."]},{"cell_type":"code","metadata":{"id":"ure_WaD0itQY","colab_type":"code","colab":{}},"source":["trainingPolys = ee.FeatureCollection('projects/google/DemoTrainingGeometries')\n","evalPolys = ee.FeatureCollection('projects/google/DemoEvalGeometries')\n","\n","polyImage = ee.Image(0).byte().paint(trainingPolys, 1).paint(evalPolys, 2)\n","polyImage = polyImage.updateMask(polyImage)\n","\n","mapid = polyImage.getMapId({'min': 1, 'max': 2, 'palette': ['red', 'blue']})\n","map = folium.Map(location=[38., -100.], zoom_start=5)\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='training polygons',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ZV890gPHeZqz","colab_type":"text"},"source":["# Sampling\n","\n","The mapped data look reasonable so take a sample from each polygon and merge the results into a single export. The key step is sampling the array image at points, to get all the pixels in a 256x256 neighborhood at each point. It's worth noting that to build the training and testing data for the FCNN, you export a single TFRecord file that contains patches of pixel values in each record. You do NOT need to export each training/testing patch to a different image. Since each record potentially contains a lot of data (especially with big patches or many input bands), some manual sharding of the computation is necessary to avoid the `computed value too large` error. Specifically, the following code takes multiple (smaller) samples within each geometry, merging the results to get a single export."]},{"cell_type":"code","metadata":{"id":"FyRpvwENxE-A","colab_type":"code","cellView":"both","colab":{}},"source":["# Convert the feature collections to lists for iteration.\n","trainingPolysList = trainingPolys.toList(trainingPolys.size())\n","evalPolysList = evalPolys.toList(evalPolys.size())\n","\n","# These numbers determined experimentally.\n","n = 200 # Number of shards in each polygon.\n","N = 2000 # Total sample size in each polygon.\n","\n","# Export all the training data (in many pieces), with one task \n","# per geometry.\n","for g in range(trainingPolys.size().getInfo()):\n"," geomSample = ee.FeatureCollection([])\n"," for i in range(n):\n"," sample = arrays.sample(\n"," region = ee.Feature(trainingPolysList.get(g)).geometry(), \n"," scale = 30,\n"," numPixels = N / n, # Size of the shard.\n"," seed = i,\n"," tileScale = 8\n"," )\n"," geomSample = geomSample.merge(sample)\n","\n"," desc = TRAINING_BASE + '_g' + str(g)\n"," task = ee.batch.Export.table.toCloudStorage(\n"," collection = geomSample,\n"," description = desc,\n"," bucket = BUCKET,\n"," fileNamePrefix = FOLDER + '/' + desc,\n"," fileFormat = 'TFRecord',\n"," selectors = BANDS + [RESPONSE]\n"," )\n"," task.start()\n","\n","# Export all the evaluation data.\n","for g in range(evalPolys.size().getInfo()):\n"," geomSample = ee.FeatureCollection([])\n"," for i in range(n):\n"," sample = arrays.sample(\n"," region = ee.Feature(evalPolysList.get(g)).geometry(), \n"," scale = 30,\n"," numPixels = N / n,\n"," seed = i,\n"," tileScale = 8\n"," )\n"," geomSample = geomSample.merge(sample)\n","\n"," desc = EVAL_BASE + '_g' + str(g)\n"," task = ee.batch.Export.table.toCloudStorage(\n"," collection = geomSample,\n"," description = desc,\n"," bucket = BUCKET,\n"," fileNamePrefix = FOLDER + '/' + desc,\n"," fileFormat = 'TFRecord',\n"," selectors = BANDS + [RESPONSE]\n"," )\n"," task.start()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"rWXrvBE4607G","colab_type":"text"},"source":["# Training data\n","\n","Load the data exported from Earth Engine into a `tf.data.Dataset`. The following are helper functions for that."]},{"cell_type":"code","metadata":{"id":"WWZ0UXCVMyJP","colab_type":"code","colab":{}},"source":["def parse_tfrecord(example_proto):\n"," \"\"\"The parsing function.\n"," Read a serialized example into the structure defined by FEATURES_DICT.\n"," Args:\n"," example_proto: a serialized Example.\n"," Returns:\n"," A dictionary of tensors, keyed by feature name.\n"," \"\"\"\n"," return tf.io.parse_single_example(example_proto, FEATURES_DICT)\n","\n","\n","def to_tuple(inputs):\n"," \"\"\"Function to convert a dictionary of tensors to a tuple of (inputs, outputs).\n"," Turn the tensors returned by parse_tfrecord into a stack in HWC shape.\n"," Args:\n"," inputs: A dictionary of tensors, keyed by feature name.\n"," Returns:\n"," A tuple of (inputs, outputs).\n"," \"\"\"\n"," inputsList = [inputs.get(key) for key in FEATURES]\n"," stacked = tf.stack(inputsList, axis=0)\n"," # Convert from CHW to HWC\n"," stacked = tf.transpose(stacked, [1, 2, 0])\n"," return stacked[:,:,:len(BANDS)], stacked[:,:,len(BANDS):]\n","\n","\n","def get_dataset(pattern):\n"," \"\"\"Function to read, parse and format to tuple a set of input tfrecord files.\n"," Get all the files matching the pattern, parse and convert to tuple.\n"," Args:\n"," pattern: A file pattern to match in a Cloud Storage bucket.\n"," Returns:\n"," A tf.data.Dataset\n"," \"\"\"\n"," glob = tf.io.gfile.glob(pattern)\n"," dataset = tf.data.TFRecordDataset(glob, compression_type='GZIP')\n"," dataset = dataset.map(parse_tfrecord, num_parallel_calls=5)\n"," dataset = dataset.map(to_tuple, num_parallel_calls=5)\n"," return dataset"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Xg1fa18336D2","colab_type":"text"},"source":["Use the helpers to read in the training dataset. Print the first record to check."]},{"cell_type":"code","metadata":{"id":"rm0qRF0fAYcC","colab_type":"code","colab":{}},"source":["def get_training_dataset():\n","\t\"\"\"Get the preprocessed training dataset\n"," Returns: \n"," A tf.data.Dataset of training data.\n"," \"\"\"\n","\tglob = 'gs://' + BUCKET + '/' + FOLDER + '/' + TRAINING_BASE + '*'\n","\tdataset = get_dataset(glob)\n","\tdataset = dataset.shuffle(BUFFER_SIZE).batch(BATCH_SIZE).repeat()\n","\treturn dataset\n","\n","training = get_training_dataset()\n","\n","print(iter(training.take(1)).next())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"j-cQO5RL6vob","colab_type":"text"},"source":["# Evaluation data\n","\n","Now do the same thing to get an evaluation dataset. Note that unlike the training dataset, the evaluation dataset has a batch size of 1, is not repeated and is not shuffled."]},{"cell_type":"code","metadata":{"id":"ieKTCGiJ6xzo","colab_type":"code","colab":{}},"source":["def get_eval_dataset():\n","\t\"\"\"Get the preprocessed evaluation dataset\n"," Returns: \n"," A tf.data.Dataset of evaluation data.\n"," \"\"\"\n","\tglob = 'gs://' + BUCKET + '/' + FOLDER + '/' + EVAL_BASE + '*'\n","\tdataset = get_dataset(glob)\n","\tdataset = dataset.batch(1).repeat()\n","\treturn dataset\n","\n","evaluation = get_eval_dataset()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"9JIE7Yl87lgU","colab_type":"text"},"source":["# Model\n","\n","Here we use the Keras implementation of the U-Net model. The U-Net model takes 256x256 pixel patches as input and outputs per-pixel class probability, label or a continuous output. We can implement the model essentially unmodified, but will use mean squared error loss on the sigmoidal output since we are treating this as a regression problem, rather than a classification problem. Since impervious surface fraction is constrained to [0,1], with many values close to zero or one, a saturating activation function is suitable here."]},{"cell_type":"code","metadata":{"id":"wsnnnz56yS3l","colab_type":"code","colab":{}},"source":["from tensorflow.python.keras import layers\n","from tensorflow.python.keras import losses\n","from tensorflow.python.keras import models\n","from tensorflow.python.keras import metrics\n","from tensorflow.python.keras import optimizers\n","\n","def conv_block(input_tensor, num_filters):\n","\tencoder = layers.Conv2D(num_filters, (3, 3), padding='same')(input_tensor)\n","\tencoder = layers.BatchNormalization()(encoder)\n","\tencoder = layers.Activation('relu')(encoder)\n","\tencoder = layers.Conv2D(num_filters, (3, 3), padding='same')(encoder)\n","\tencoder = layers.BatchNormalization()(encoder)\n","\tencoder = layers.Activation('relu')(encoder)\n","\treturn encoder\n","\n","def encoder_block(input_tensor, num_filters):\n","\tencoder = conv_block(input_tensor, num_filters)\n","\tencoder_pool = layers.MaxPooling2D((2, 2), strides=(2, 2))(encoder)\n","\treturn encoder_pool, encoder\n","\n","def decoder_block(input_tensor, concat_tensor, num_filters):\n","\tdecoder = layers.Conv2DTranspose(num_filters, (2, 2), strides=(2, 2), padding='same')(input_tensor)\n","\tdecoder = layers.concatenate([concat_tensor, decoder], axis=-1)\n","\tdecoder = layers.BatchNormalization()(decoder)\n","\tdecoder = layers.Activation('relu')(decoder)\n","\tdecoder = layers.Conv2D(num_filters, (3, 3), padding='same')(decoder)\n","\tdecoder = layers.BatchNormalization()(decoder)\n","\tdecoder = layers.Activation('relu')(decoder)\n","\tdecoder = layers.Conv2D(num_filters, (3, 3), padding='same')(decoder)\n","\tdecoder = layers.BatchNormalization()(decoder)\n","\tdecoder = layers.Activation('relu')(decoder)\n","\treturn decoder\n","\n","def get_model():\n","\tinputs = layers.Input(shape=[None, None, len(BANDS)]) # 256\n","\tencoder0_pool, encoder0 = encoder_block(inputs, 32) # 128\n","\tencoder1_pool, encoder1 = encoder_block(encoder0_pool, 64) # 64\n","\tencoder2_pool, encoder2 = encoder_block(encoder1_pool, 128) # 32\n","\tencoder3_pool, encoder3 = encoder_block(encoder2_pool, 256) # 16\n","\tencoder4_pool, encoder4 = encoder_block(encoder3_pool, 512) # 8\n","\tcenter = conv_block(encoder4_pool, 1024) # center\n","\tdecoder4 = decoder_block(center, encoder4, 512) # 16\n","\tdecoder3 = decoder_block(decoder4, encoder3, 256) # 32\n","\tdecoder2 = decoder_block(decoder3, encoder2, 128) # 64\n","\tdecoder1 = decoder_block(decoder2, encoder1, 64) # 128\n","\tdecoder0 = decoder_block(decoder1, encoder0, 32) # 256\n","\toutputs = layers.Conv2D(1, (1, 1), activation='sigmoid')(decoder0)\n","\n","\tmodel = models.Model(inputs=[inputs], outputs=[outputs])\n","\n","\tmodel.compile(\n","\t\toptimizer=optimizers.get(OPTIMIZER), \n","\t\tloss=losses.get(LOSS),\n","\t\tmetrics=[metrics.get(metric) for metric in METRICS])\n","\n","\treturn model"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"uu_E7OTDBCoS","colab_type":"text"},"source":["# Training the model\n","\n","You train a Keras model by calling `.fit()` on it. Here we're going to train for 10 epochs, which is suitable for demonstration purposes. For production use, you probably want to optimize this parameter, for example through [hyperparamter tuning](https://cloud.google.com/ml-engine/docs/tensorflow/using-hyperparameter-tuning)."]},{"cell_type":"code","metadata":{"id":"NzzaWxOhSxBy","colab_type":"code","colab":{}},"source":["m = get_model()\n","\n","m.fit(\n"," x=training, \n"," epochs=EPOCHS, \n"," steps_per_epoch=int(TRAIN_SIZE / BATCH_SIZE), \n"," validation_data=evaluation,\n"," validation_steps=EVAL_SIZE)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"U2XrwZHp66j4","colab_type":"text"},"source":["Note that the notebook VM is sometimes not heavy-duty enough to get through a whole training job, especially if you have a large buffer size or a large number of epochs. You can still use this notebook for training, but may need to set up an alternative VM ([learn more](https://research.google.com/colaboratory/local-runtimes.html)) for production use. Alternatively, you can package your code for running large training jobs on Google's AI Platform [as described here](https://cloud.google.com/ml-engine/docs/tensorflow/trainer-considerations). The following code loads a pre-trained model, which you can use for predictions right away."]},{"cell_type":"code","metadata":{"id":"-RJpNfEUS1qp","colab_type":"code","colab":{}},"source":["# Load a trained model. 50 epochs. 25 hours. Final RMSE ~0.08.\n","MODEL_DIR = 'gs://ee-docs-demos/fcnn-demo/trainer/model'\n","m = tf.keras.models.load_model(MODEL_DIR)\n","m.summary()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"J1ySNup0xCqN","colab_type":"text"},"source":["# Prediction\n","\n","The prediction pipeline is:\n","\n","1. Export imagery on which to do predictions from Earth Engine in TFRecord format to a Cloud Storage bucket.\n","2. Use the trained model to make the predictions.\n","3. Write the predictions to a TFRecord file in a Cloud Storage.\n","4. Upload the predictions TFRecord file to Earth Engine.\n","\n","The following functions handle this process. It's useful to separate the export from the predictions so that you can experiment with different models without running the export every time."]},{"cell_type":"code","metadata":{"id":"M3WDAa-RUpXP","colab_type":"code","colab":{}},"source":["def doExport(out_image_base, kernel_buffer, region):\n"," \"\"\"Run the image export task. Block until complete.\n"," \"\"\"\n"," task = ee.batch.Export.image.toCloudStorage(\n"," image = image.select(BANDS),\n"," description = out_image_base,\n"," bucket = BUCKET,\n"," fileNamePrefix = FOLDER + '/' + out_image_base,\n"," region = region.getInfo()['coordinates'],\n"," scale = 30,\n"," fileFormat = 'TFRecord',\n"," maxPixels = 1e10,\n"," formatOptions = {\n"," 'patchDimensions': KERNEL_SHAPE,\n"," 'kernelSize': kernel_buffer,\n"," 'compressed': True,\n"," 'maxFileSize': 104857600\n"," }\n"," )\n"," task.start()\n","\n"," # Block until the task completes.\n"," print('Running image export to Cloud Storage...')\n"," import time\n"," while task.active():\n"," time.sleep(30)\n","\n"," # Error condition\n"," if task.status()['state'] != 'COMPLETED':\n"," print('Error with image export.')\n"," else:\n"," print('Image export completed.')"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"zb_9_FflygVw","colab_type":"code","colab":{}},"source":["def doPrediction(out_image_base, user_folder, kernel_buffer, region):\n"," \"\"\"Perform inference on exported imagery, upload to Earth Engine.\n"," \"\"\"\n","\n"," print('Looking for TFRecord files...')\n","\n"," # Get a list of all the files in the output bucket.\n"," filesList = !gsutil ls 'gs://'{BUCKET}'/'{FOLDER}\n","\n"," # Get only the files generated by the image export.\n"," exportFilesList = [s for s in filesList if out_image_base in s]\n","\n"," # Get the list of image files and the JSON mixer file.\n"," imageFilesList = []\n"," jsonFile = None\n"," for f in exportFilesList:\n"," if f.endswith('.tfrecord.gz'):\n"," imageFilesList.append(f)\n"," elif f.endswith('.json'):\n"," jsonFile = f\n","\n"," # Make sure the files are in the right order.\n"," imageFilesList.sort()\n","\n"," from pprint import pprint\n"," pprint(imageFilesList)\n"," print(jsonFile)\n","\n"," import json\n"," # Load the contents of the mixer file to a JSON object.\n"," jsonText = !gsutil cat {jsonFile}\n"," # Get a single string w/ newlines from the IPython.utils.text.SList\n"," mixer = json.loads(jsonText.nlstr)\n"," pprint(mixer)\n"," patches = mixer['totalPatches']\n","\n"," # Get set up for prediction.\n"," x_buffer = int(kernel_buffer[0] / 2)\n"," y_buffer = int(kernel_buffer[1] / 2)\n","\n"," buffered_shape = [\n"," KERNEL_SHAPE[0] + kernel_buffer[0],\n"," KERNEL_SHAPE[1] + kernel_buffer[1]]\n","\n"," imageColumns = [\n"," tf.io.FixedLenFeature(shape=buffered_shape, dtype=tf.float32) \n"," for k in BANDS\n"," ]\n","\n"," imageFeaturesDict = dict(zip(BANDS, imageColumns))\n","\n"," def parse_image(example_proto):\n"," return tf.io.parse_single_example(example_proto, imageFeaturesDict)\n","\n"," def toTupleImage(inputs):\n"," inputsList = [inputs.get(key) for key in BANDS]\n"," stacked = tf.stack(inputsList, axis=0)\n"," stacked = tf.transpose(stacked, [1, 2, 0])\n"," return stacked\n","\n"," # Create a dataset from the TFRecord file(s) in Cloud Storage.\n"," imageDataset = tf.data.TFRecordDataset(imageFilesList, compression_type='GZIP')\n"," imageDataset = imageDataset.map(parse_image, num_parallel_calls=5)\n"," imageDataset = imageDataset.map(toTupleImage).batch(1)\n","\n"," # Perform inference.\n"," print('Running predictions...')\n"," predictions = m.predict(imageDataset, steps=patches, verbose=1)\n"," # print(predictions[0])\n","\n"," print('Writing predictions...')\n"," out_image_file = 'gs://' + BUCKET + '/' + FOLDER + '/' + out_image_base + '.TFRecord'\n"," writer = tf.io.TFRecordWriter(out_image_file)\n"," patches = 0\n"," for predictionPatch in predictions:\n"," print('Writing patch ' + str(patches) + '...')\n"," predictionPatch = predictionPatch[\n"," x_buffer:x_buffer+KERNEL_SIZE, y_buffer:y_buffer+KERNEL_SIZE]\n","\n"," # Create an example.\n"," example = tf.train.Example(\n"," features=tf.train.Features(\n"," feature={\n"," 'impervious': tf.train.Feature(\n"," float_list=tf.train.FloatList(\n"," value=predictionPatch.flatten()))\n"," }\n"," )\n"," )\n"," # Write the example.\n"," writer.write(example.SerializeToString())\n"," patches += 1\n","\n"," writer.close()\n","\n"," # Start the upload.\n"," out_image_asset = user_folder + '/' + out_image_base\n"," !earthengine upload image --asset_id={out_image_asset} {out_image_file} {jsonFile}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"LZqlymOehnQO","colab_type":"text"},"source":["Now there's all the code needed to run the prediction pipeline, all that remains is to specify the output region in which to do the prediction, the names of the output files, where to put them, and the shape of the outputs. In terms of the shape, the model is trained on 256x256 patches, but can work (in theory) on any patch that's big enough with even dimensions ([reference](https://www.cv-foundation.org/openaccess/content_cvpr_2015/papers/Long_Fully_Convolutional_Networks_2015_CVPR_paper.pdf)). Because of tile boundary artifacts, give the model slightly larger patches for prediction, then clip out the middle 256x256 patch. This is controlled with a kernel buffer, half the size of which will extend beyond the kernel buffer. For example, specifying a 128x128 kernel will append 64 pixels on each side of the patch, to ensure that the pixels in the output are taken from inputs completely covered by the kernel."]},{"cell_type":"code","metadata":{"id":"FPANwc7B1-TS","colab_type":"code","colab":{}},"source":["# Output assets folder: YOUR FOLDER\n","user_folder = 'users/username' # INSERT YOUR FOLDER HERE.\n","\n","# Base file name to use for TFRecord files and assets.\n","bj_image_base = 'FCNN_demo_beijing_384_'\n","# Half this will extend on the sides of each patch.\n","bj_kernel_buffer = [128, 128]\n","# Beijing\n","bj_region = ee.Geometry.Polygon(\n"," [[[115.9662455210937, 40.121362012835235],\n"," [115.9662455210937, 39.64293313749715],\n"," [117.01818643906245, 39.64293313749715],\n"," [117.01818643906245, 40.121362012835235]]], None, False)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"lLNEOLkXWvSi","colab_type":"code","colab":{}},"source":["# Run the export.\n","doExport(bj_image_base, bj_kernel_buffer, bj_region)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"KxACnxKFrQ_J","colab_type":"code","colab":{}},"source":["# Run the prediction.\n","doPrediction(bj_image_base, user_folder, bj_kernel_buffer, bj_region)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"uj_G9OZ1xH6K","colab_type":"text"},"source":["# Display the output\n","\n","One the data has been exported, the model has made predictions and the predictions have been written to a file, and the image imported to Earth Engine, it's possible to display the resultant Earth Engine asset. Here, display the impervious area predictions over Beijing, China."]},{"cell_type":"code","metadata":{"id":"Jgco6HJ4R5p2","colab_type":"code","colab":{}},"source":["out_image = ee.Image(user_folder + '/' + bj_image_base)\n","mapid = out_image.getMapId({'min': 0, 'max': 1})\n","map = folium.Map(location=[39.898, 116.5097])\n","folium.TileLayer(\n"," tiles=mapid['tile_fetcher'].url_format,\n"," attr='Map Data © Google Earth Engine',\n"," overlay=True,\n"," name='predicted impervious',\n"," ).add_to(map)\n","map.add_child(folium.LayerControl())\n","map"],"execution_count":0,"outputs":[]}]} \ No newline at end of file