diff --git a/.dockerignore b/.dockerignore index b885538d..91555831 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,4 +5,7 @@ Missions Missions_rel Missions_dev .env -**/.git \ No newline at end of file +**/.git +data +*DS_Store +*__pycache__ \ No newline at end of file diff --git a/.gitignore b/.gitignore index d75c3426..21fa8da7 100644 --- a/.gitignore +++ b/.gitignore @@ -17,6 +17,8 @@ /src/pre/tools.js /build/* +/data/* +*__pycache__ sessions diff --git a/API/Backend/Draw/models/published.js b/API/Backend/Draw/models/published.js index dd06cb91..064ebc34 100644 --- a/API/Backend/Draw/models/published.js +++ b/API/Backend/Draw/models/published.js @@ -25,39 +25,50 @@ const { sequelize } = require("../../../connection"); const attributes = { intent: { type: Sequelize.ENUM, - values: ["roi", "campaign", "campsite", "trail", "signpost"], + values: [ + "roi", + "campaign", + "campsite", + "trail", + "signpost", + "polygon", + "line", + "point", + "text", + "arrow", + ], allowNull: false, unique: false, - defaultValue: null + defaultValue: null, }, parent: { type: Sequelize.DataTypes.INTEGER, unique: false, - allowNull: true + allowNull: true, }, children: { type: Sequelize.DataTypes.ARRAY(Sequelize.DataTypes.INTEGER), unique: false, - allowNull: true + allowNull: true, }, level: { type: Sequelize.INTEGER, unique: false, - allowNull: false + allowNull: false, }, properties: { type: Sequelize.JSON, allowNull: true, - defaultValue: {} + defaultValue: {}, }, geom: { type: Sequelize.GEOMETRY, - allowNull: true - } + allowNull: true, + }, }; const options = { - timestamps: false + timestamps: false, }; var Published = sequelize.define("publisheds", attributes, options); diff --git a/API/Backend/Draw/routes/files.js b/API/Backend/Draw/routes/files.js index 79c9baf2..c8779b7c 100644 --- a/API/Backend/Draw/routes/files.js +++ b/API/Backend/Draw/routes/files.js @@ -113,7 +113,9 @@ router.post("/getfile", function (req, res, next) { (req.body.test === "true" ? "publisheds_test" : "publisheds") + "" + (req.body.intent && req.body.intent.length > 0 - ? " WHERE intent=:intent" + ? req.body.intent === "all" + ? " WHERE intent IN ('polygon', 'line', 'point', 'text', 'arrow')" + : " WHERE intent=:intent" : ""), { replacements: { @@ -698,7 +700,14 @@ const compile = function (req, res, callback) { where: { is_master: true, intent: { - [Sequelize.Op.in]: ["roi", "campaign", "campsite", "trail", "signpost"], + [Sequelize.Op.in]: [ + "roi", + "campaign", + "campsite", + "trail", + "signpost", + "all", + ], }, }, }).then((files) => { @@ -775,11 +784,11 @@ const compile = function (req, res, callback) { " " + "AND a.id != b.id" + " " + - "AND ((ST_OVERLAPS(a.geom, b.geom)" + + "AND ((ST_OVERLAPS(ST_BUFFER(a.geom, -0.000005, 'join=mitre'), b.geom)" + " " + "AND NOT ST_Touches(a.geom, b.geom))" + " " + - "OR ST_CROSSES(a.geom, b.geom))" + + "OR ST_CROSSES(ST_BUFFER(a.geom, -0.000005, 'join=mitre'), b.geom))" + " " + "UNION ALL" + " " + @@ -810,6 +819,7 @@ const compile = function (req, res, callback) { .spread((results) => { let hierarchy = []; let intentOrder = ["roi", "campaign", "campsite", "signpost"]; + let excludeIntents = ["polygon", "line", "point", "text", "arrow"]; let flatHierarchy = []; let issues = []; let changes = []; @@ -819,32 +829,36 @@ const compile = function (req, res, callback) { let intersects = []; let contains = []; let children = []; - for (let r = 0; r < results.length; r++) { - if (results[r].id == features[f].id) { - let childProps = JSON.parse(results[r].associated_properties); - if (results[r].association === "intersects") { - intersects.push({ - name: childProps.name, - uuid: childProps.uuid, - id: results[r].associated_id, - intent: results[r].associated_intent, - }); - } else if (results[r].association === "contains") { - contains.push({ - name: childProps.name, - uuid: childProps.uuid, - id: results[r].associated_id, - intent: results[r].associated_intent, - }); - children.push({ - name: childProps.name, - uuid: childProps.uuid, - id: results[r].associated_id, - intent: results[r].associated_intent, - }); + + if (!excludeIntents.includes(features[f].intent)) { + for (let r = 0; r < results.length; r++) { + if (results[r].id == features[f].id) { + let childProps = JSON.parse(results[r].associated_properties); + if (results[r].association === "intersects") { + intersects.push({ + name: childProps.name, + uuid: childProps.uuid, + id: results[r].associated_id, + intent: results[r].associated_intent, + }); + } else if (results[r].association === "contains") { + contains.push({ + name: childProps.name, + uuid: childProps.uuid, + id: results[r].associated_id, + intent: results[r].associated_intent, + }); + children.push({ + name: childProps.name, + uuid: childProps.uuid, + id: results[r].associated_id, + intent: results[r].associated_intent, + }); + } } } } + let featureProps = JSON.parse(features[f].properties); flatHierarchy.push({ feature: features[f], @@ -976,28 +990,32 @@ const compile = function (req, res, callback) { //Build the root of the trees for (let f = 0; f < features.length; f++) { let isCovered = false; - for (let r = 0; r < results.length; r++) { - if ( - results[r].association === "contains" && - results[r].associated_id == features[f].id - ) { - isCovered = true; - break; + if (!excludeIntents.includes(features[f].intent)) { + for (let r = 0; r < results.length; r++) { + if ( + !excludeIntents.includes(results[r].intent) && + results[r].association === "contains" && + results[r].associated_id == features[f].id + ) { + isCovered = true; + break; + } + } + + if (!isCovered) { + let featureProps = JSON.parse(features[f].properties); + hierarchy.push({ + intent: features[f].intent, + id: features[f].id, + name: featureProps.name, + uuid: featureProps.uuid, + children: { + intersects: [], + contains: [], + }, + }); + continue; } - } - if (!isCovered) { - let featureProps = JSON.parse(features[f].properties); - hierarchy.push({ - intent: features[f].intent, - id: features[f].id, - name: featureProps.name, - uuid: featureProps.uuid, - children: { - intersects: [], - contains: [], - }, - }); - continue; } } @@ -1099,7 +1117,7 @@ const compile = function (req, res, callback) { } } - //The Saviorng + //The Savioring depthTraversalB(hierarchy, 0); function depthTraversalB(node, depth) { for (let i = 0; i < node.length; i++) { @@ -1156,6 +1174,8 @@ const compile = function (req, res, callback) { let intent = node.intent; let props = JSON.parse(node.feature.properties); + if (excludeIntents.includes(intent)) continue; + //Check for duplicate uuids if (props.uuid == null) { issues.push({ @@ -1478,7 +1498,7 @@ router.post("/publish", function (req, res, next) { logger("error", "Failed to publish. " + message, req.originalUrl, req); res.send({ status: "failure", - message: "Failed to publish." + message, + message: "Failed to publish. " + message, body: {}, }); } @@ -1493,19 +1513,17 @@ router.post("/publish", function (req, res, next) { failureCallback ) { Table.findAll({ + limit: 1, where: { file_id: file_id, }, + order: [["history_id", "DESC"]], }) - .then((histories) => { - let maxHistoryId = -Infinity; - if (histories && histories.length > 0) { - for (let i = 0; i < histories.length; i++) { - maxHistoryId = Math.max(histories[i].history_id, maxHistoryId); - } + .then((lastHistory) => { + if (lastHistory && lastHistory.length > 0) { return { - historyIndex: maxHistoryId + 1, - history: histories[maxHistoryId].history, + historyIndex: lastHistory[0].history_id + 1, + history: lastHistory[0].history, }; } else return { historyIndex: 0, history: [] }; }) @@ -1580,7 +1598,14 @@ router.post("/publish", function (req, res, next) { cb(true); return null; }) - .catch(function (error) { + .catch(function (err) { + logger( + "error", + "Error adding to published.", + req.originalUrl, + req, + err + ); cb(false); return null; }); diff --git a/API/Backend/Draw/setup.js b/API/Backend/Draw/setup.js index bcd5c620..90df3c54 100644 --- a/API/Backend/Draw/setup.js +++ b/API/Backend/Draw/setup.js @@ -4,7 +4,7 @@ const routerDraw = require("./routes/draw").router; let setup = { //Once the app initializes - onceInit: s => { + onceInit: (s) => { s.app.use( "/API/files", s.ensureUser(), @@ -24,17 +24,18 @@ let setup = { ); }, //Once the server starts - onceStarted: s => {}, + onceStarted: (s) => {}, //Once all tables sync - onceSynced: s => { + onceSynced: (s) => { routeFiles.makeMasterFiles([ "roi", "campaign", "campsite", "trail", - "signpost" + "signpost", + "all", ]); - } + }, }; module.exports = setup; diff --git a/API/Backend/Geodatasets/routes/geodatasets.js b/API/Backend/Geodatasets/routes/geodatasets.js index 318f3661..be645aa2 100644 --- a/API/Backend/Geodatasets/routes/geodatasets.js +++ b/API/Backend/Geodatasets/routes/geodatasets.js @@ -13,10 +13,10 @@ const Geodatasets = geodatasets.Geodatasets; const makeNewGeodatasetTable = geodatasets.makeNewGeodatasetTable; //Returns a geodataset table as a geojson -router.post("/get", function(req, res, next) { +router.post("/get", function (req, res, next) { get("post", req, res, next); }); -router.get("/get", function(req, res, next) { +router.get("/get", function (req, res, next) { get("get", req, res, next); }); @@ -31,7 +31,7 @@ function get(reqtype, req, res, next) { xyz = { x: parseInt(req.body.x), y: parseInt(req.body.y), - z: parseInt(req.body.z) + z: parseInt(req.body.z), }; } } else if (reqtype == "get") { @@ -41,13 +41,13 @@ function get(reqtype, req, res, next) { xyz = { x: parseInt(req.query.x), y: parseInt(req.query.y), - z: parseInt(req.query.z) + z: parseInt(req.query.z), }; } } //First Find the table name Geodatasets.findOne({ where: { name: layer } }) - .then(result => { + .then((result) => { if (result) { let table = result.dataValues.table; if (type == "geojson") { @@ -55,7 +55,7 @@ function get(reqtype, req, res, next) { .query( "SELECT properties, ST_AsGeoJSON(geom)" + " " + "FROM " + table ) - .spread(results => { + .spread((results) => { let geojson = { type: "FeatureCollection", features: [] }; for (let i = 0; i < results.length; i++) { let properties = results[i].properties; @@ -71,14 +71,14 @@ function get(reqtype, req, res, next) { if (reqtype == "post") { res.send({ status: "success", - body: geojson + body: geojson, }); } else { res.send(geojson); } return null; }) - .catch(error => { + .catch((error) => { res.send({ status: "failure", message: "a" }); }); } else if ( @@ -89,11 +89,11 @@ function get(reqtype, req, res, next) { ) { let ne = { lat: tile2Lat(xyz.y, xyz.z), - lng: tile2Lng(xyz.x + 1, xyz.z) + lng: tile2Lng(xyz.x + 1, xyz.z), }; let sw = { lat: tile2Lat(xyz.y + 1, xyz.z), - lng: tile2Lng(xyz.x, xyz.z) + lng: tile2Lng(xyz.x, xyz.z), }; //We make these slightly large bounds for our initial bounds of data, @@ -151,25 +151,25 @@ function get(reqtype, req, res, next) { ") AS q;", { replacements: { - table: table - } + table: table, + }, } ) - .spread(results => { + .spread((results) => { res.setHeader("Content-Type", "application/x-protobuf"); res.setHeader("Access-Control-Allow-Origin", "*"); if (reqtype == "post") { res.send({ status: "success", - body: results + body: results, }); } else { res.send(Buffer.from(results[0].st_asmvt, "binary")); } return null; }) - .catch(err => { + .catch((err) => { logger( "error", "Geodataset SQL error.", @@ -182,25 +182,25 @@ function get(reqtype, req, res, next) { } else { res.send({ status: "failure", - message: "Unknown type or missing xyz." + message: "Unknown type or missing xyz.", }); } } else { - res.send({ status: "failure", message: "c" }); + res.send({ status: "failure", message: "Not Found" }); } return null; }) - .catch(err => { + .catch((err) => { logger("error", "Failure finding geodataset.", req.originalUrl, req, err); res.send({ status: "failure", message: "d" }); }); } //Returns a list of entries in the geodatasets table -router.post("/entries", function(req, res, next) { +router.post("/entries", function (req, res, next) { Geodatasets.findAll() - .then(sets => { + .then((sets) => { if (sets && sets.length > 0) { let entries = []; for (let i = 0; i < sets.length; i++) { @@ -208,15 +208,15 @@ router.post("/entries", function(req, res, next) { } res.send({ status: "success", - body: { entries: entries } + body: { entries: entries }, }); } else { res.send({ - status: "failure" + status: "failure", }); } }) - .catch(err => { + .catch((err) => { logger( "error", "Failure finding geodatasets.", @@ -225,7 +225,7 @@ router.post("/entries", function(req, res, next) { err ); res.send({ - status: "failure" + status: "failure", }); }); }); @@ -235,10 +235,10 @@ router.post("/entries", function(req, res, next) { * req.body.key * req.body.value */ -router.post("/search", function(req, res, next) { +router.post("/search", function (req, res, next) { //First Find the table name Geodatasets.findOne({ where: { name: req.body.layer } }) - .then(result => { + .then((result) => { if (result) { let table = result.dataValues.table; @@ -250,11 +250,11 @@ router.post("/search", function(req, res, next) { { replacements: { key: req.body.key, - value: req.body.value.replace(/[`;'"]/gi, "") - } + value: req.body.value.replace(/[`;'"]/gi, ""), + }, } ) - .spread(results => { + .spread((results) => { let r = []; for (let i = 0; i < results.length; i++) { let feature = JSON.parse(results[i].st_asgeojson); @@ -264,12 +264,12 @@ router.post("/search", function(req, res, next) { res.send({ status: "success", - body: r + body: r, }); return null; }) - .catch(err => { + .catch((err) => { logger( "error", "SQL error search through geodataset.", @@ -279,27 +279,27 @@ router.post("/search", function(req, res, next) { ); res.send({ status: "failure", - message: "SQL error." + message: "SQL error.", }); }); } else { res.send({ status: "failure", - message: "Layer not found." + message: "Layer not found.", }); } return null; }) - .catch(err => { + .catch((err) => { logger("error", "Failure finding geodataset.", req.originalUrl, req, err); res.send({ - status: "failure" + status: "failure", }); }); }); -router.post("/recreate", function(req, res, next) { +router.post("/recreate", function (req, res, next) { let features = null; try { features = JSON.parse(req.body.geojson).features; @@ -308,19 +308,19 @@ router.post("/recreate", function(req, res, next) { res.send({ status: "failure", message: "Failure: Malformed file.", - body: {} + body: {}, }); } makeNewGeodatasetTable( req.body.name, - function(result) { + function (result) { let checkEnding = result.table.split("_"); if (checkEnding[checkEnding.length - 1] !== "geodatasets") { logger("error", "Malformed table name.", req.originalUrl, req); res.send({ status: "failed", - message: "Malformed table name" + message: "Malformed table name", }); return; } @@ -328,22 +328,26 @@ router.post("/recreate", function(req, res, next) { sequelize .query("TRUNCATE TABLE " + result.table + " RESTART IDENTITY") .then(() => { - populateGeodatasetTable(result.tableObj, features, function(success) { - res.send({ - status: success == true ? "success" : "failure", - message: "", - body: {} - }); - }); + populateGeodatasetTable( + result.tableObj, + features, + function (success) { + res.send({ + status: success == true ? "success" : "failure", + message: "", + body: {}, + }); + } + ); return null; }) - .catch(err => { + .catch((err) => { logger("error", "Recreation error.", req.originalUrl, req, err); res.send(result); }); }, - function(result) { + function (result) { res.send(result); } ); @@ -358,17 +362,17 @@ router.post("/recreate", function(req, res, next) { geom: { crs: { type: "name", properties: { name: "EPSG:4326" } }, type: features[i].geometry.type, - coordinates: features[i].geometry.coordinates - } + coordinates: features[i].geometry.coordinates, + }, }); } Table.bulkCreate(rows, { returning: true }) - .then(function(response) { + .then(function (response) { cb(true); return null; }) - .catch(function(err) { + .catch(function (err) { logger( "error", "Geodatasets: Failed to populate a geodataset table!", diff --git a/API/Backend/Shortener/routes/shortener.js b/API/Backend/Shortener/routes/shortener.js index de0b9427..4134a65c 100644 --- a/API/Backend/Shortener/routes/shortener.js +++ b/API/Backend/Shortener/routes/shortener.js @@ -30,7 +30,9 @@ router.post("/shorten", function (req, res, next) { shorten(); function shorten() { - var short = Math.random().toString(36).substr(2, 4); + var short = Math.random() + .toString(36) + .substr(2, 5 + loop); let newUrlShortened = { full: encodeURIComponent(req.body.url), diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..3aa1b37d --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,297 @@ +# MMGIS Changelog + +## 2.5.0 + +#### Summary + +This release contains the IsochroneTool, revives the Model layer type and includes a new Query layer type. Each vector layer can now be filtered by the user through the LayersTool, leads in the DrawTool can now draw and publish arrows and annotations, and the MeasureTool finally supports continuous elevation profiles. + +#### Added + +- Isochrone Tool! +- Model layer type! +- Query layer type! +- User filterable layers! +- More mmgisAPI functions +- Deep linking 'centerPin' parameter +- DrawTool lead Map file +- DrawTool text rotation +- Annotation and Arrows are now supported in regular (non-DrawTool) geojson +- Configurable bearings, uncertainty ellipses, models and underlaid images for vector points +- MeasureTool now supports a continuous profile +- MeasureTool csv export includes 3D distance as well +- LayersTool support sublayer visibility toggles within a layer's settings menu +- Python3 version of gdal2customtiles.py +- More Coordinates configurations +- Option in great_circle_calculator to calculate distance between points with Vincenty's formulae +- CHANGELOG.md +- Raw Variables Link has a new 'replace' section for modifying property values before injecting into a url + +#### Changed + +- LithoSphere 1.0.1 => 1.1.0 - [See LithoSphere Releases](https://github.com/NASA-AMMOS/LithoSphere/releases) +- LayersTool, LegendTool and InfoTool panels are wider +- The MMGIS M logo is now an svg +- bulk_tiles.py's colormap is now optional +- DrawTool's compile includes an inward buffer to allow for smaller drawn features to pass checks +- InfoTool now lists all intersected polygons of a layer under a mouse click + +#### Fixed + +- Viewsheds play nicely with polar maps +- Various improvements to the top search bar +- Legend items wrap to new line instead of extending off screen +- `colors` package fix +- `globeLon` deep link not working +- Uses `asHTML` for IdentifierTool again +- `apt-get update` in Dockerfile now uses -y (yes to all) flag + +#### Removed + +- Excess Globe feature highlighting + +## 2.4.0 + +#### Summary + +This release adds in the Viewshed Tool, time enabled layers, [LithoSphere](https://github.com/NASA-AMMOS/LithoSphere), WMS support, data layers, a JavaScript API, and more. + +#### Added + +- The Viewshed Tool! +- Time enabled layers, configurations and a time UI component. +- Full support for WMS layers and projections in 2D and 3D. +- Data layer colorize shader enabling dynamic rendering of data. +- An extensive window.mmgisAPI for interacting with MMGIS through an iframe. +- Configuration for point marker shape. +- Support for serving MMGIS at a subpath with the PUBLIC_URL environment variable. +- bulk_tiles.py auxiliary script. +- Features can be dehighlighted by clicking off on the map. +- Measure Tool supports measurements in kilometers. +- Ability to type in and go to a coordinate. +- Elevation values on mouse over. +- Configurable coordinates. +- Draw Tool features behave like regular layer features when the Draw Tool is closed. + +#### Changed + +- The Globe has been refactored and made standalone in the npm library LithoSphere. +- The Waypoint Kind now uses a top-down image of Perseverance. +- Migrated from Python2 to Python3. + +#### Fixed + +- Documentation uses only relative links for resources now. +- Issue with auth=none not working. +- Draw Tool drawings now work at the meter level. +- Draw Tool drawings now properly respect 0 valued styles. +- Data layer names now support spaces. + +#### Removed + +- All PHP dependencies. + +--- + +## 2.3.1 + +#### Summary + +A point release to address bug fixes. + +#### Fixed + +- WMS layers now work for full polar projections +- Raster layers obey order even if they're initially off +- Draw Tool truly accepts .json files + +--- + +## 2.3.0 + +#### Summary + +The Draw Tool gets its own tag filtering system. The Measure Tool now uses great arcs and is way more accurate and the map now fully supports WMS layers! + +#### Migration Details + +- The DrawTool tagging system change ideally needs more space in the `file_description` column. To increase it and not hit a tag or file description limit in drawing files, back-up the MMGIS database and run the SQL command: + +``` +ALTER TABLE user_files ALTER COLUMN file_description TYPE VARCHAR(10000); +``` + +#### Added + +- Draw Tool files can now be search for by user defined tags/keyword +- Draw Tool file options modal has been upgraded +- Admins can pin preferred tags +- Measure Tool now uses great arcs to compute measurements as well as for rendering lines +- A docker-compose.yml +- Fully functional WMS Map layers + +#### Removed + +#### Changed + +- Draw Tool requires a user to enter a file name before creating a file. (Instead of adding one as "New File") +- Draw Tool now accepts uploads of .json geojson files. (From just .geojson and .shp) +- Tools plugins are captured at build time! (You do not need to run `npm start` before building anymore) +- Info Tool contents are condensed + +#### Fixed + +- Screenshot widget no longer captures the faint bottom bar in its images +- Deep links to selected feature can now activate their info in the Info Tool +- AUTH=local allows users to sign in again +- Measure Tool profile download data is now accurate + +--- + +## 2.0.0 + +#### Migration Details + +- The environment variable `ALLOW_EMBED` has been replaced with `FRAME_ANCESTORS` +- `npm install` is only needed in the root directory and not in the /API directory any more +- Instead of `npm start`, use `npm run build` and then afterwards `npm run start:prod` to run the application. + _You will still need to run `npm start` before building the first time_ + +#### Added + +- Webpack! +- Production level builds +- Babel +- React support +- Icons as markers +- Configurable vector highlight color +- Graticules +- Configure page help buttons to docs + +#### Removed + +- Require.js +- Unused libraries, tools and code +- Swap widget +- FORCE_CONFIGCONFIG environment variable removed + +#### Changed + +- Info Tool upgraded! +- Measure Tool upgraded! +- Top bar search +- The environment variable ALLOW_EMBED has been replaced with FRAME_ANCESTORS +- MMGIS2 splash screen +- Various small UI changes +- Improved configure look tab +- Development logging is friendlier + +#### Fixed + +- Configure save warns of bad json +- Removed unused configure globe projection option +- Configure look tab colors work properly + +--- + +## 1.3.5 + +#### Added + +- ALLOW_EMBED environment variable +- DISABLE_LINK_SHORTENER environment variable + +#### Fixed + +- Tweaked various UI elements +- The Configure page Look tab now correctly reads in any existing `logourl` and `helpurl` +- Configure page now warns of invalid raw variable JSON +- Raw variable `info` values don't break when there's no text to replace in them +- Configuration endpoints no longer assume SQL output is ordered + +--- + +## 1.3.4 + +#### Added: + +- WMS tile support for the Map (does not yet work on the Globe). +- `AUTH` env can be set to "off" to disable user login entirely. +- gdal2customtiles.py for tiling datasets with custom projections. + +--- + +## 1.3.3 + +#### Added: + +- Example docker-compose + +#### Fixed: + +- 3D Globe was rendering layers in depth order instead of breadth order +- Draw Tool publishing sometimes undid the last Lead Map edits +- Draw Tool styling options sometimes hidden in FireFox + +#### Changed: + +- New short URLs are one character longer +- Draw Tool publish overlap tolerance increased + +--- + +## 1.3.2 + +#### Fixed + +- Draw Tool history sql commands assumed rows would be returned in order which could completely break the tool. +- Draw Tool layers would get stuck due to automatic toggling when copying to files or turning the file you're drawing in off. +- The waypoint image links on the Test mission have been fixed. + +--- + +## 1.3.1 + +#### Fixed + +- Additional authorization headers prevented access to the configure login page. + +--- + +## 1.3.0 + +#### New Requirements + +- Node.js >= v10.10 + +#### New Features + +- Export vector layers as geojson from the Layers Tool +- Info Tool uses a JSON viewer +- Users can now split and merge features in the Draw Tool +- Rich application logging +- ENVs that end with \_HOST are pinged at start to test connections +- Ability to configure deep links to other sites based on properties of a selected feature +- Users can upload much larger files in the Draw Tool +- Missions can be configured to use any map projection +- Globe level of detail +- Globe space themed skysphere +- Tools and Backends are included by scanning a directory for setup files instead of editing code +- The Legend Tool supports color scales +- CSV files can be uploaded as datasets and can be queried on feature click +- Early API tokens that allow .csvs to be uploaded programmatically +- An optional top bar with search functionality +- Configurable page name and logo +- On screen Globe controls +- Support both TMS and WMS tilesets +- Layer Kinds for specialized interactions +- Better documentation in /docs +- Resources cache properly + +#### Fixed + +- All tables are properly created with just one start +- Failed layers no longer crash the application +- Infinite login bug +- Vectors disappearing with string weights +- Some endpoint calls began with home slashes that broke certain setups diff --git a/Dockerfile b/Dockerfile index 2107a850..cb388939 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM node:12 # Install GDAL with Python bindings -RUN apt-get update +RUN apt-get -y update RUN apt-get install -y gdal-bin libgdal-dev python3-pip python3-gdal # Use Python3 for python diff --git a/auxiliary/bulk_tiles/bulk_tiles.py b/auxiliary/bulk_tiles/bulk_tiles.py index e099401b..67fac65e 100755 --- a/auxiliary/bulk_tiles/bulk_tiles.py +++ b/auxiliary/bulk_tiles/bulk_tiles.py @@ -28,8 +28,9 @@ def process_tiffs(input_dir, process_dir, colormap_dir, legends_dir, prefix=''): colormap_key = os.path.basename(colormap_file).split('_')[1].split('.')[0] colormap_dict[colormap_key] = colormap_file else: - print('Error: ' + colormap_dir + ' directory does not exist') - sys.exit() + print('Warning: ' + colormap_dir + ' directory does not exist') + print('Processing without colormap') + # sys.exit() if not os.path.exists(process_dir): os.makedirs(process_dir) for input_file in input_files: @@ -203,6 +204,10 @@ def create_configs(output_dirs, json_config, prefix): # Generate JSON layer configurations if specified if args.json_config is not None: - create_configs(output_dirs, args.json_config, args.prefix) + if args.prefix != '': + json_config = args.json_config.replace('.json', '_' + args.prefix + '.json') + else: + json_config = args.json_config + create_configs(output_dirs, json_config, args.prefix) sys.exit() diff --git a/auxiliary/gdal2customtiles/gdal2customtiles.py b/auxiliary/gdal2customtiles/gdal2customtiles.py index b41e4ebc..2e4c1655 100644 --- a/auxiliary/gdal2customtiles/gdal2customtiles.py +++ b/auxiliary/gdal2customtiles/gdal2customtiles.py @@ -49,11 +49,10 @@ def binary(num): # 1bto4b # 1bto4b - return ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num)) + return ''.join(bin(c).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num)) # 1bto4b - def getTilePxBounds(self, tx, ty, tz, ds): querysize = self.tilesize @@ -1977,10 +1976,10 @@ def generate_base_tiles(self, tz): data3.append(int(f[16:24], 2)) data4.append(int(f[24:], 2)) - data1s = '' - data2s = '' - data3s = '' - data4s = '' + data1s = b'' + data2s = b'' + data3s = b'' + data4s = b'' indx = 0 for v in data1: data1s += struct.pack('B', data1[indx]) @@ -1988,7 +1987,6 @@ def generate_base_tiles(self, tz): data3s += struct.pack('B', data3[indx]) data4s += struct.pack('B', data4[indx]) indx += 1 - dstile.GetRasterBand(1).WriteRaster( wx, wy, wxsize + 1, wysize + 1, data1s, buf_type=gdal.GDT_Byte) dstile.GetRasterBand(2).WriteRaster( @@ -2023,10 +2021,10 @@ def generate_base_tiles(self, tz): data3.append(int(f[16:24], 2)) data4.append(int(f[24:], 2)) - data1s = '' - data2s = '' - data3s = '' - data4s = '' + data1s = b'' + data2s = b'' + data3s = b'' + data4s = b'' indx = 0 for v in data1: data1s += struct.pack('B', data1[indx]) diff --git a/auxiliary/gdal2customtiles/gdal2customtiles_py27.py b/auxiliary/gdal2customtiles/gdal2customtiles_py27.py new file mode 100644 index 00000000..8b6421d6 --- /dev/null +++ b/auxiliary/gdal2customtiles/gdal2customtiles_py27.py @@ -0,0 +1,3218 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# ****************************************************************************** +# $Id$ +# +# Project: Google Summer of Code 2007, 2008 (http://code.google.com/soc/) +# Support: BRGM (http://www.brgm.fr) +# Purpose: Convert a raster into TMS (Tile Map Service) tiles in a directory. +# - generate Google Earth metadata (KML SuperOverlay) +# - generate simple HTML viewer based on Google Maps and OpenLayers +# - support of global tiles (Spherical Mercator) for compatibility +# with interactive web maps a la Google Maps +# Author: Klokan Petr Pridal, klokan at klokan dot cz +# Web: http://www.klokan.cz/projects/gdal2tiles/ +# GUI: http://www.maptiler.org/ +# +############################################################################### +# Copyright (c) 2008, Klokan Petr Pridal +# Copyright (c) 2010-2013, Even Rouault +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +# ****************************************************************************** + +import math +import os +import sys + +from osgeo import gdal +from osgeo import osr + +import struct # 1bto4b + + +def binary(num): # 1bto4b + # 1bto4b + return ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num)) + +# 1bto4b + +def getTilePxBounds(self, tx, ty, tz, ds): + + querysize = self.tilesize + + if self.isRasterBounded: # 'raster' profile: + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.fWorldXSize + ysize = self.out_ds.fWorldYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld + #print("rx", rx) + rxsize = 0 + rxsize = tsize + + rysize = 0 + rysize = tsize + + ry = ysize - (ty * tsize) - rysize - \ + self.out_ds.fRasterYOriginWorld + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if rx < 0: + rxsize = tsize + rx + wx = -rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + rx = 0 + if ry < 0: + rysize = tsize + ry + wy = -ry + wysize = int(rysize/float(tsize) * self.tilesize) + ry = 0 + if rx + rxsize > self.out_ds.fRasterXSizeWorld: + rxsize = self.out_ds.fRasterXSizeWorld - rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + if ry + rysize > self.out_ds.fRasterYSizeWorld: + rysize = self.out_ds.fRasterYSizeWorld - ry + wysize = int(rysize/float(tsize) * self.tilesize) + + # Convert rx, ry back to non-world coordinates + rx = int(float(self.out_ds.RasterXSize) * + (float(rx) / self.out_ds.fRasterXSizeWorld)) + ry = int(float(self.out_ds.RasterYSize) * + (float(ry) / self.out_ds.fRasterYSizeWorld)) + rxsize = int(float(self.out_ds.RasterXSize) * + (float(rxsize) / self.out_ds.fRasterXSizeWorld)) + rysize = int(float(self.out_ds.RasterYSize) * + (float(rysize) / self.out_ds.fRasterYSizeWorld)) + else: + b = self.mercator.TileBounds(tx, ty, tz) + rb, wb = self.geo_query( + ds, b[0], b[3], b[2], b[1], querysize=querysize) + rx, ry, rxsize, rysize = rb + wx, wy, wxsize, wysize = wb + + return [rx, ry, rxsize, rysize, wxsize, wysize] + + +try: + from PIL import Image + import numpy + import osgeo.gdal_array as gdalarray +except Exception: + # 'antialias' resampling is not available + pass + +__version__ = "$Id$" + +resampling_list = ('average', 'near', 'bilinear', 'cubic', + 'cubicspline', 'lanczos', 'antialias') +profile_list = ('mercator', 'geodetic', 'raster') +webviewer_list = ('all', 'google', 'openlayers', 'leaflet', 'none') + +# ============================================================================= +# ============================================================================= +# ============================================================================= + +__doc__globalmaptiles = """ +globalmaptiles.py + +Global Map Tiles as defined in Tile Map Service (TMS) Profiles +============================================================== + +Functions necessary for generation of global tiles used on the web. +It contains classes implementing coordinate conversions for: + + - GlobalMercator (based on EPSG:3857) + for Google Maps, Yahoo Maps, Bing Maps compatible tiles + - GlobalGeodetic (based on EPSG:4326) + for OpenLayers Base Map and Google Earth compatible tiles + +More info at: + +http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification +http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation +http://msdn.microsoft.com/en-us/library/bb259689.aspx +http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates + +Created by Klokan Petr Pridal on 2008-07-03. +Google Summer of Code 2008, project GDAL2Tiles for OSGEO. + +In case you use this class in your product, translate it to another language +or find it useful for your project please let me know. +My email: klokan at klokan dot cz. +I would like to know where it was used. + +Class is available under the open-source GDAL license (www.gdal.org). +""" + +MAXZOOMLEVEL = 32 + + +class GlobalMercator(object): + r""" + TMS Global Mercator Profile + --------------------------- + + Functions necessary for generation of tiles in Spherical Mercator projection, + EPSG:3857. + + Such tiles are compatible with Google Maps, Bing Maps, Yahoo Maps, + UK Ordnance Survey OpenSpace API, ... + and you can overlay them on top of base maps of those web mapping applications. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Mercator tiles:: + + LatLon <-> Meters <-> Pixels <-> Tile + + WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid + lat/lon XY in meters XY pixels Z zoom XYZ from TMS + EPSG:4326 EPSG:387 + .----. --------- -- TMS + / \ <-> | | <-> /----/ <-> Google + \ / | | /--------/ QuadTree + ----- --------- /------------/ + KML, public WebMapService Web Clients TileMapService + + What is the coordinate extent of Earth in EPSG:3857? + + [-20037508.342789244, -20037508.342789244, + 20037508.342789244, 20037508.342789244] + Constant 20037508.342789244 comes from the circumference of the Earth in meters, + which is 40 thousand kilometers, the coordinate origin is in the middle of extent. + In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0 + $ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:3857 + Polar areas with abs(latitude) bigger then 85.05112878 are clipped off. + + What are zoom level constants (pixels/meter) for pyramid with EPSG:3857? + + whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile, + every lower zoom level resolution is always divided by two + initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062 + + What is the difference between TMS and Google Maps/QuadTree tile name convention? + + The tile raster itself is the same (equal extent, projection, pixel size), + there is just different identification of the same raster tile. + Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ. + Google placed the origin [0,0] to the top-left corner, reference is XYZ. + Microsoft is referencing tiles by a QuadTree name, defined on the website: + http://msdn2.microsoft.com/en-us/library/bb259689.aspx + + The lat/lon coordinates are using WGS84 datum, yes? + + Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum. + Well, the web clients like Google Maps are projecting those coordinates by + Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if + the were on the WGS84 ellipsoid. + + From MSDN documentation: + To simplify the calculations, we use the spherical form of projection, not + the ellipsoidal form. Since the projection is used only for map display, + and not for displaying numeric coordinates, we don't need the extra precision + of an ellipsoidal projection. The spherical projection causes approximately + 0.33 percent scale distortion in the Y direction, which is not visually + noticeable. + + How do I create a raster in EPSG:3857 and convert coordinates with PROJ.4? + + You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform. + All of the tools supports -t_srs 'epsg:3857'. + + For other GIS programs check the exact definition of the projection: + More info at http://spatialreference.org/ref/user/google-projection/ + The same projection is designated as EPSG:3857. WKT definition is in the + official EPSG database. + + Proj4 Text: + +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 + +k=1.0 +units=m +nadgrids=@null +no_defs + + Human readable WKT format of EPSG:3857: + PROJCS["Google Maps Global Mercator", + GEOGCS["WGS 84", + DATUM["WGS_1984", + SPHEROID["WGS 84",6378137,298.257223563, + AUTHORITY["EPSG","7030"]], + AUTHORITY["EPSG","6326"]], + PRIMEM["Greenwich",0], + UNIT["degree",0.0174532925199433], + AUTHORITY["EPSG","4326"]], + PROJECTION["Mercator_1SP"], + PARAMETER["central_meridian",0], + PARAMETER["scale_factor",1], + PARAMETER["false_easting",0], + PARAMETER["false_northing",0], + UNIT["metre",1, + AUTHORITY["EPSG","9001"]]] + """ + + def __init__(self, tileSize=256): + "Initialize the TMS Global Mercator pyramid" + self.tileSize = tileSize + self.initialResolution = 2 * math.pi * 6378137 / self.tileSize + # 156543.03392804062 for tileSize 256 pixels + self.originShift = 2 * math.pi * 6378137 / 2.0 + # 20037508.342789244 + + def LatLonToMeters(self, lat, lon): + "Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:3857" + + mx = lon * self.originShift / 180.0 + my = math.log(math.tan((90 + lat) * math.pi / 360.0)) / \ + (math.pi / 180.0) + + my = my * self.originShift / 180.0 + return mx, my + + def MetersToLatLon(self, mx, my): + "Converts XY point from Spherical Mercator EPSG:3857 to lat/lon in WGS84 Datum" + + lon = (mx / self.originShift) * 180.0 + lat = (my / self.originShift) * 180.0 + + lat = 180 / math.pi * \ + (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) + return lat, lon + + def PixelsToMeters(self, px, py, zoom): + "Converts pixel coordinates in given zoom level of pyramid to EPSG:3857" + + res = self.Resolution(zoom) + mx = px * res - self.originShift + my = py * res - self.originShift + return mx, my + + def MetersToPixels(self, mx, my, zoom): + "Converts EPSG:3857 to pyramid pixel coordinates in given zoom level" + + res = self.Resolution(zoom) + px = (mx + self.originShift) / res + py = (my + self.originShift) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns a tile covering region in given pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def PixelsToRaster(self, px, py, zoom): + "Move the origin of pixel coordinates to top-left corner" + + mapSize = self.tileSize << zoom + return px, mapSize - py + + def MetersToTile(self, mx, my, zoom): + "Returns tile for given mercator coordinates" + + px, py = self.MetersToPixels(mx, my, zoom) + return self.PixelsToTile(px, py) + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in EPSG:3857 coordinates" + + minx, miny = self.PixelsToMeters( + tx*self.tileSize, ty*self.tileSize, zoom) + maxx, maxy = self.PixelsToMeters( + (tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom) + return (minx, miny, maxx, maxy) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in latitude/longitude using WGS84 datum" + + bounds = self.TileBounds(tx, ty, zoom) + minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1]) + maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3]) + + return (minLat, minLon, maxLat, maxLon) + + def Resolution(self, zoom): + "Resolution (meters/pixel) for given zoom level (measured at Equator)" + + # return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom) + return self.initialResolution / (2**zoom) + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != -1: + return i-1 + else: + return 0 # We don't want to scale up + + def GoogleTile(self, tx, ty, zoom): + "Converts TMS tile coordinates to Google Tile coordinates" + + # coordinate origin is moved from bottom-left to top-left corner of the extent + return tx, (2**zoom - 1) - ty + + def QuadTree(self, tx, ty, zoom): + "Converts TMS tile coordinates to Microsoft QuadTree" + + quadKey = "" + ty = (2**zoom - 1) - ty + for i in range(zoom, 0, -1): + digit = 0 + mask = 1 << (i-1) + if (tx & mask) != 0: + digit += 1 + if (ty & mask) != 0: + digit += 2 + quadKey += str(digit) + + return quadKey + + +class GlobalGeodetic(object): + r""" + TMS Global Geodetic Profile + --------------------------- + + Functions necessary for generation of global tiles in Plate Carre projection, + EPSG:4326, "unprojected profile". + + Such tiles are compatible with Google Earth (as any other EPSG:4326 rasters) + and you can overlay the tiles on top of OpenLayers base map. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Geodetic tiles? + + Global Geodetic tiles are using geodetic coordinates (latitude,longitude) + directly as planar coordinates XY (it is also called Unprojected or Plate + Carre). We need only scaling to pixel pyramid and cutting to tiles. + Pyramid has on top level two tiles, so it is not square but rectangle. + Area [-180,-90,180,90] is scaled to 512x256 pixels. + TMS has coordinate origin (for pixels and tiles) in bottom-left corner. + Rasters are in EPSG:4326 and therefore are compatible with Google Earth. + + LatLon <-> Pixels <-> Tiles + + WGS84 coordinates Pixels in pyramid Tiles in pyramid + lat/lon XY pixels Z zoom XYZ from TMS + EPSG:4326 + .----. ---- + / \ <-> /--------/ <-> TMS + \ / /--------------/ + ----- /--------------------/ + WMS, KML Web Clients, Google Earth TileMapService + """ + + def __init__(self, tmscompatible, tileSize=256): + self.tileSize = tileSize + if tmscompatible is not None: + # Defaults the resolution factor to 0.703125 (2 tiles @ level 0) + # Adhers to OSGeo TMS spec + # http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification#global-geodetic + self.resFact = 180.0 / self.tileSize + else: + # Defaults the resolution factor to 1.40625 (1 tile @ level 0) + # Adheres OpenLayers, MapProxy, etc default resolution for WMTS + self.resFact = 360.0 / self.tileSize + + def LonLatToPixels(self, lon, lat, zoom): + "Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid" + + res = self.resFact / 2**zoom + px = (180 + lon) / res + py = (90 + lat) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns coordinates of the tile covering region in pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def LonLatToTile(self, lon, lat, zoom): + "Returns the tile for zoom which covers given lon/lat coordinates" + + px, py = self.LonLatToPixels(lon, lat, zoom) + return self.PixelsToTile(px, py) + + def Resolution(self, zoom): + "Resolution (arc/pixel) for given zoom level (measured at Equator)" + + return self.resFact / 2**zoom + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != 0: + return i-1 + else: + return 0 # We don't want to scale up + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile" + res = self.resFact / 2**zoom + return ( + tx*self.tileSize*res - 180, + ty*self.tileSize*res - 90, + (tx+1)*self.tileSize*res - 180, + (ty+1)*self.tileSize*res - 90 + ) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in the SWNE form" + b = self.TileBounds(tx, ty, zoom) + return (b[1], b[0], b[3], b[2]) + + +class Zoomify(object): + """ + Tiles compatible with the Zoomify viewer + ---------------------------------------- + """ + + def __init__(self, width, height, tilesize=256, tileformat='jpg'): + """Initialization of the Zoomify tile tree""" + + self.tilesize = tilesize + self.tileformat = tileformat + imagesize = (width, height) + tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize)) + + # Size (in tiles) for each tier of pyramid. + self.tierSizeInTiles = [] + self.tierSizeInTiles.append(tiles) + + # Image size in pixels for each pyramid tierself + self.tierImageSize = [] + self.tierImageSize.append(imagesize) + + while (imagesize[0] > tilesize or imagesize[1] > tilesize): + imagesize = (math.floor( + imagesize[0] / 2), math.floor(imagesize[1] / 2)) + tiles = (math.ceil(imagesize[0] / tilesize), + math.ceil(imagesize[1] / tilesize)) + self.tierSizeInTiles.append(tiles) + self.tierImageSize.append(imagesize) + + self.tierSizeInTiles.reverse() + self.tierImageSize.reverse() + + # Depth of the Zoomify pyramid, number of tiers (zoom levels) + self.numberOfTiers = len(self.tierSizeInTiles) + + # Number of tiles up to the given tier of pyramid. + self.tileCountUpToTier = [] + self.tileCountUpToTier[0] = 0 + for i in range(1, self.numberOfTiers+1): + self.tileCountUpToTier.append( + self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] + + self.tileCountUpToTier[i-1] + ) + + def tilefilename(self, x, y, z): + """Returns filename for tile with given coordinates""" + + tileIndex = x + y * \ + self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z] + return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256), + "%s-%s-%s.%s" % (z, x, y, self.tileformat)) + + +class Gdal2TilesError(Exception): + pass + + +class GDAL2Tiles(object): + + def process(self): + """The main processing function, runs all the main steps of processing""" + + # Opening and preprocessing of the input file + self.open_input() + + # Generation of main metadata files and HTML viewers + self.generate_metadata() + + # 1bto4b + if self.isDEMtile: + for z in range(self.tminz, self.tmaxz + int(abs(math.log(self.tilesize, 2) - 8))): # 1bto4b + self.generate_base_tiles(z) + print(' Zoom ' + str(z) + ' tiles done!') + else: + # Generation of the lowest tiles + self.generate_base_tiles(self.tmaxz) + + # Generation of the overview tiles (higher in the pyramid) + self.generate_overview_tiles() + + def error(self, msg, details=""): + """Print an error message and stop the processing""" + if details: + self.parser.error(msg + "\n\n" + details) + else: + self.parser.error(msg) + + def progressbar(self, complete=0.0): + """Print progressbar for float value 0..1""" + gdal.TermProgress_nocb(complete) + + def gettempfilename(self, suffix): + """Returns a temporary filename""" + if '_' in os.environ: + # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly) + if os.environ['_'].find('wine') >= 0: + tmpdir = '.' + if 'TMP' in os.environ: + tmpdir = os.environ['TMP'] + import time + import random + random.seed(time.time()) + random_part = 'file%d' % random.randint(0, 1000000000) + return os.path.join(tmpdir, random_part + suffix) + + import tempfile + return tempfile.mktemp(suffix) + + def stop(self): + """Stop the rendering immediately""" + self.stopped = True + + def __init__(self, arguments): + """Constructor function - initialization""" + self.out_drv = None + self.mem_drv = None + self.in_ds = None + self.out_ds = None + self.out_srs = None + self.nativezoom = None + self.tminmax = None + self.tsize = None + self.mercator = None + self.geodetic = None + self.alphaband = None + self.dataBandsCount = None + self.out_gt = None + self.tileswne = None + self.swne = None + self.ominx = None + self.omaxx = None + self.omaxy = None + self.ominy = None + + # MMGIS + self.isRasterBounded = False + + # 1bto4b + self.isDEMtile = False + + # MMGIS + self.fminx = None + self.fmaxx = None + self.fminy = None + self.fmaxy = None + self.fPixelSize = None + + self.stopped = False + self.input = None + self.output = None + + # Tile format + self.tilesize = 256 + self.tiledriver = 'PNG' + self.tileext = 'png' + + # Should we read bigger window of the input raster and scale it down? + # Note: Modified later by open_input() + # Not for 'near' resampling + # Not for Wavelet based drivers (JPEG2000, ECW, MrSID) + # Not for 'raster' profile + self.scaledquery = True + # How big should be query window be for scaling down + # Later on reset according the chosen resampling algorightm + self.querysize = 4 * self.tilesize + + # Should we use Read on the input file for generating overview tiles? + # Note: Modified later by open_input() + # Otherwise the overview tiles are generated from existing underlying tiles + self.overviewquery = False + + # RUN THE ARGUMENT PARSER: + + self.optparse_init() + self.options, self.args = self.parser.parse_args(args=arguments) + if not self.args: + self.error("No input file specified") + + # POSTPROCESSING OF PARSED ARGUMENTS: + + # Workaround for old versions of GDAL + try: + if ((self.options.verbose and self.options.resampling == 'near') or + gdal.TermProgress_nocb): + pass + except Exception: + self.error( + "This version of GDAL is not supported. Please upgrade to 1.6+.") + + # Is output directory the last argument? + + # Test output directory, if it doesn't exist + if (os.path.isdir(self.args[-1]) or + (len(self.args) > 1 and not os.path.exists(self.args[-1]))): + self.output = self.args[-1] + self.args = self.args[:-1] + + # More files on the input not directly supported yet + + if (len(self.args) > 1): + self.error("Processing of several input files is not supported.", + "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the " + "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(self.args)) + + self.input = self.args[0] + + # MMGIS + if self.options.extentworld: + extentworld = self.options.extentworld.split(",") + self.isRasterBounded = True + self.fminx = float(extentworld[0]) + self.fmaxx = float(extentworld[2]) + self.fminy = float(extentworld[3]) + self.fmaxy = float(extentworld[1]) + self.fPixelSize = float(extentworld[4]) + + # 1bto4b + if self.options.isDEMtile: + self.isDEMtile = True + self.tilesize = 32 + self.querysize = 4 * self.tilesize + + # Default values for not given options + + if not self.output: + # Directory with input filename without extension in actual directory + self.output = os.path.splitext(os.path.basename(self.input))[0] + + if not self.options.title: + self.options.title = os.path.basename(self.input) + + if self.options.url and not self.options.url.endswith('/'): + self.options.url += '/' + if self.options.url: + self.options.url += os.path.basename(self.output) + '/' + + # Supported options + + self.resampling = None + + if self.options.resampling == 'average': + try: + if gdal.RegenerateOverview: + pass + except Exception: + self.error("'average' resampling algorithm is not available.", + "Please use -r 'near' argument or upgrade to newer version of GDAL.") + + elif self.options.resampling == 'antialias': + try: + if numpy: # pylint:disable=W0125 + pass + except Exception: + self.error("'antialias' resampling algorithm is not available.", + "Install PIL (Python Imaging Library) and numpy.") + + elif self.options.resampling == 'near': + self.resampling = gdal.GRA_NearestNeighbour + self.querysize = self.tilesize + + elif self.options.resampling == 'bilinear': + self.resampling = gdal.GRA_Bilinear + self.querysize = self.tilesize * 2 + + elif self.options.resampling == 'cubic': + self.resampling = gdal.GRA_Cubic + + elif self.options.resampling == 'cubicspline': + self.resampling = gdal.GRA_CubicSpline + + elif self.options.resampling == 'lanczos': + self.resampling = gdal.GRA_Lanczos + + # User specified zoom levels + self.tminz = None + self.tmaxz = None + if self.options.zoom: + minmax = self.options.zoom.split('-', 1) + minmax.extend(['']) + zoom_min, zoom_max = minmax[:2] + self.tminz = int(zoom_min) + if zoom_max: + self.tmaxz = int(zoom_max) + else: + self.tmaxz = int(zoom_min) + + # KML generation + self.kml = self.options.kml + + # Check if the input filename is full ascii or not + try: + os.path.basename(self.input).encode('ascii') + except UnicodeEncodeError: + full_ascii = False + else: + full_ascii = True + + # LC_CTYPE check + if not full_ascii and 'UTF-8' not in os.environ.get("LC_CTYPE", ""): + if not self.options.quiet: + print("\nWARNING: " + "You are running gdal2tiles.py with a LC_CTYPE environment variable that is " + "not UTF-8 compatible, and your input file contains non-ascii characters. " + "The generated sample googlemaps, openlayers or " + "leaflet files might contain some invalid characters as a result\n") + + # Output the results + if self.options.verbose: + print("Options:", self.options) + print("Input:", self.input) + print("Output:", self.output) + print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024)) + print('') + + def optparse_init(self): + """Prepare the option parser for input (argv)""" + + from optparse import OptionParser, OptionGroup + usage = "Usage: %prog [options] input_file(s) [output]" + p = OptionParser(usage, version="%prog " + __version__) + p.add_option("-p", "--profile", dest='profile', + type='choice', choices=profile_list, + help=("Tile cutting profile (%s) - default 'mercator' " + "(Google Maps compatible)" % ",".join(profile_list))) + p.add_option("-r", "--resampling", dest="resampling", + type='choice', choices=resampling_list, + help="Resampling method (%s) - default 'average'" % ",".join(resampling_list)) + p.add_option('-s', '--s_srs', dest="s_srs", metavar="SRS", + help="The spatial reference system used for the source input data") + p.add_option('-z', '--zoom', dest="zoom", + help="Zoom levels to render (format:'2-5' or '10').") + p.add_option('-e', '--resume', dest="resume", action="store_true", + help="Resume mode. Generate only missing files.") + p.add_option('-a', '--srcnodata', dest="srcnodata", metavar="NODATA", + help="NODATA transparency value to assign to the input data") + p.add_option('-d', '--tmscompatible', dest="tmscompatible", action="store_true", + help=("When using the geodetic profile, specifies the base resolution " + "as 0.703125 or 2 tiles at zoom level 0.")) + p.add_option("-v", "--verbose", + action="store_true", dest="verbose", + help="Print status messages to stdout") + p.add_option("-q", "--quiet", + action="store_true", dest="quiet", + help="Disable messages and status to stdout") + # MMGIS + p.add_option("-x", "--extentworld", dest="extentworld", + help="The full world meter extent (comma-separated as minx,maxx,miny,maxy,pixelsize) of an inner raster profile.") + # 1bto4b + p.add_option("-m", "--dem", action="store_true", dest="isDEMtile", + help="Indicate if the input is a Digital Elevation Model") + # KML options + g = OptionGroup(p, "KML (Google Earth) options", + "Options for generated Google Earth SuperOverlay metadata") + g.add_option("-k", "--force-kml", dest='kml', action="store_true", + help=("Generate KML for Google Earth - default for 'geodetic' profile and " + "'raster' in EPSG:4326. For a dataset with different projection use " + "with caution!")) + g.add_option("-n", "--no-kml", dest='kml', action="store_false", + help="Avoid automatic generation of KML files for EPSG:4326") + g.add_option("-u", "--url", dest='url', + help="URL address where the generated tiles are going to be published") + p.add_option_group(g) + + # HTML options + g = OptionGroup(p, "Web viewer options", + "Options for generated HTML viewers a la Google Maps") + g.add_option("-w", "--webviewer", dest='webviewer', type='choice', choices=webviewer_list, + help="Web viewer to generate (%s) - default 'all'" % ",".join(webviewer_list)) + g.add_option("-t", "--title", dest='title', + help="Title of the map") + g.add_option("-c", "--copyright", dest='copyright', + help="Copyright for the map") + g.add_option("-g", "--googlekey", dest='googlekey', + help="Google Maps API key from http://code.google.com/apis/maps/signup.html") + g.add_option("-b", "--bingkey", dest='bingkey', + help="Bing Maps API key from https://www.bingmapsportal.com/") + p.add_option_group(g) + + p.set_defaults(verbose=False, profile="mercator", kml=False, url='', + webviewer='all', copyright='', resampling='average', resume=False, + googlekey='INSERT_YOUR_KEY_HERE', bingkey='INSERT_YOUR_KEY_HERE') + + self.parser = p + + # ------------------------------------------------------------------------- + def open_input(self): + """Initialization of the input raster, reprojection if necessary""" + gdal.AllRegister() + + self.out_drv = gdal.GetDriverByName(self.tiledriver) + self.mem_drv = gdal.GetDriverByName('MEM') + + if not self.out_drv: + raise Exception("The '%s' driver was not found, is it available in this GDAL build?", + self.tiledriver) + if not self.mem_drv: + raise Exception( + "The 'MEM' driver was not found, is it available in this GDAL build?") + + # Open the input file + + if self.input: + self.in_ds = gdal.Open(self.input, gdal.GA_ReadOnly) + else: + raise Exception("No input file was specified") + + if self.options.verbose: + print("Input file:", + "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, + self.in_ds.RasterCount)) + + if not self.in_ds: + # Note: GDAL prints the ERROR message too + self.error( + "It is not possible to open the input file '%s'." % self.input) + + # Read metadata from the input file + if self.in_ds.RasterCount == 0: + self.error("Input file '%s' has no raster band" % self.input) + + if self.in_ds.GetRasterBand(1).GetRasterColorTable(): + self.error("Please convert this file to RGB/RGBA and run gdal2tiles on the result.", + "From paletted file you can create RGBA file (temp.vrt) by:\n" + "gdal_translate -of vrt -expand rgba %s temp.vrt\n" + "then run:\n" + "gdal2tiles temp.vrt" % self.input) + + # Get NODATA value + in_nodata = [] + for i in range(1, self.in_ds.RasterCount+1): + if self.in_ds.GetRasterBand(i).GetNoDataValue() is not None: + in_nodata.append(self.in_ds.GetRasterBand(i).GetNoDataValue()) + if self.options.srcnodata: + nds = list(map(float, self.options.srcnodata.split(','))) + if len(nds) < self.in_ds.RasterCount: + in_nodata = ( + nds * self.in_ds.RasterCount)[:self.in_ds.RasterCount] + else: + in_nodata = nds + + if self.options.verbose: + print("NODATA: %s" % in_nodata) + + if self.options.verbose: + print("Preprocessed file:", + "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, + self.in_ds.RasterCount)) + + in_srs = None + + if self.options.s_srs: + in_srs = osr.SpatialReference() + in_srs.SetFromUserInput(self.options.s_srs) + in_srs_wkt = in_srs.ExportToWkt() + else: + in_srs_wkt = self.in_ds.GetProjection() + if not in_srs_wkt and self.in_ds.GetGCPCount() != 0: + in_srs_wkt = self.in_ds.GetGCPProjection() + if in_srs_wkt: + in_srs = osr.SpatialReference() + in_srs.ImportFromWkt(in_srs_wkt) + + self.out_srs = osr.SpatialReference() + + if self.options.profile == 'mercator': + self.out_srs.ImportFromEPSG(3857) + elif self.options.profile == 'geodetic': + self.out_srs.ImportFromEPSG(4326) + else: + self.out_srs = in_srs + + # Are the reference systems the same? Reproject if necessary. + + self.out_ds = None + + if self.options.profile in ('mercator', 'geodetic'): + + if ((self.in_ds.GetGeoTransform() == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) and + (self.in_ds.GetGCPCount() == 0)): + self.error("There is no georeference - neither affine transformation (worldfile) " + "nor GCPs. You can generate only 'raster' profile tiles.", + "Either gdal2tiles with parameter -p 'raster' or use another GIS " + "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs") + + if in_srs: + if ((in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or + (self.in_ds.GetGCPCount() != 0)): + # Generation of VRT dataset in tile projection, + # default 'nearest neighbour' warping + self.out_ds = gdal.AutoCreateWarpedVRT( + self.in_ds, in_srs_wkt, self.out_srs.ExportToWkt()) + + if self.options.verbose: + print("Warping of the raster by AutoCreateWarpedVRT " + "(result saved into 'tiles.vrt')") + self.out_ds.GetDriver().CreateCopy("tiles.vrt", self.out_ds) + + # Correction of AutoCreateWarpedVRT for NODATA values + if in_nodata != []: + tempfilename = self.gettempfilename('-gdal2tiles.vrt') + self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) + # open as a text file + s = open(tempfilename).read() + # Add the warping options + s = s.replace( + "", + """ + + + + """) + # replace BandMapping tag for NODATA bands.... + for i in range(len(in_nodata)): + s = s.replace( + '' % ( + (i+1), (i+1)), + """ + + %i + 0 + %i + 0 + + """ % ((i+1), (i+1), in_nodata[i], in_nodata[i])) + # save the corrected VRT + open(tempfilename, "w").write(s) + # open by GDAL as self.out_ds + self.out_ds = gdal.Open(tempfilename) + # delete the temporary file + os.unlink(tempfilename) + + # set NODATA_VALUE metadata + self.out_ds.SetMetadataItem( + 'NODATA_VALUES', ' '.join([str(i) for i in in_nodata])) + + if self.options.verbose: + print("Modified warping result saved into 'tiles1.vrt'") + open("tiles1.vrt", "w").write(s) + + # Correction of AutoCreateWarpedVRT for Mono (1 band) and RGB (3 bands) files + # without NODATA: + # equivalent of gdalwarp -dstalpha + if in_nodata == [] and self.out_ds.RasterCount in [1, 3]: + tempfilename = self.gettempfilename('-gdal2tiles.vrt') + self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) + # open as a text file + s = open(tempfilename).read() + # Add the warping options + s = s.replace( + "", + """ + + Alpha + + + """ % (self.out_ds.RasterCount + 1)) + s = s.replace( + "", + """ + %i + + """ % (self.out_ds.RasterCount + 1)) + s = s.replace( + "", + """ + + + """) + # save the corrected VRT + open(tempfilename, "w").write(s) + # open by GDAL as self.out_ds + self.out_ds = gdal.Open(tempfilename) + # delete the temporary file + os.unlink(tempfilename) + + if self.options.verbose: + print( + "Modified -dstalpha warping result saved into 'tiles1.vrt'") + open("tiles1.vrt", "w").write(s) + s = ''' + ''' + + else: + self.error("Input file has unknown SRS.", + "Use --s_srs ESPG:xyz (or similar) to provide source reference system.") + + if self.out_ds and self.options.verbose: + print("Projected file:", "tiles.vrt", "( %sP x %sL - %s bands)" % ( + self.out_ds.RasterXSize, self.out_ds.RasterYSize, self.out_ds.RasterCount)) + + if not self.out_ds: + self.out_ds = self.in_ds + + # + # Here we should have a raster (out_ds) in the correct Spatial Reference system + # + + # Get alpha band (either directly or from NODATA value) + self.alphaband = self.out_ds.GetRasterBand(1).GetMaskBand() + if ((self.alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or + self.out_ds.RasterCount == 4 or + self.out_ds.RasterCount == 2): + self.dataBandsCount = self.out_ds.RasterCount - 1 + else: + self.dataBandsCount = self.out_ds.RasterCount + + # KML test + isepsg4326 = False + srs4326 = osr.SpatialReference() + srs4326.ImportFromEPSG(4326) + if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4(): + self.kml = True + isepsg4326 = True + if self.options.verbose: + print("KML autotest OK!") + + # Read the georeference + self.out_gt = self.out_ds.GetGeoTransform() + + # Test the size of the pixel + + # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile) + if (self.out_gt[2], self.out_gt[4]) != (0, 0): + self.error("Georeference of the raster contains rotation or skew. " + "Such raster is not supported. Please use gdalwarp first.") + + # Here we expect: pixel is square, no rotation on the raster + + # Output Bounds - coordinates in the output SRS + self.ominx = self.out_gt[0] + self.omaxx = self.out_gt[0] + self.out_ds.RasterXSize * self.out_gt[1] + self.omaxy = self.out_gt[3] + self.ominy = self.out_gt[3] - self.out_ds.RasterYSize * self.out_gt[1] + + # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15 + + # MMGIS + def linearScale(domain, rang, value): + return ( + ((rang[1] - rang[0]) * (value - domain[0])) / + (domain[1] - domain[0]) + + rang[0] + ) + # MMGIS + self.out_ds.fRasterXSize = self.out_ds.RasterXSize + self.out_ds.fRasterYSize = self.out_ds.RasterYSize + self.out_ds.fRasterXOrigin = 0 + self.out_ds.fRasterYOrigin = 0 + self.out_ds.PixelSize = self.out_gt[1] + self.out_ds.fPixelSize = self.fPixelSize + # print("ominx", self.ominx, "omaxx", self.omaxx, "ominy", self.ominy, "omaxy", self.omaxy) + # print("fminx", self.fminx, "fmaxx", self.fmaxx, "fminy", self.fminy, "fmaxy", self.fmaxy) + if self.isRasterBounded: + self.out_ds.fRasterXSize = int(math.floor(self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / ( + self.omaxx - self.ominx) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) + self.out_ds.fRasterYSize = int(math.ceil(self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / ( + self.omaxy - self.ominy) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) + self.out_ds.fRasterXSizeRaw = int(math.floor( + self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / (self.omaxx - self.ominx))) + self.out_ds.fRasterYSizeRaw = int(math.ceil( + self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / (self.omaxy - self.ominy))) + # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize ) + self.out_ds.fRasterXOrigin = int(math.floor(linearScale( + [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.out_gt[0]))) + self.out_ds.fRasterYOrigin = int(math.ceil(linearScale( + [self.fminy, self.fmaxy], [self.out_ds.fRasterYSize, 0], self.out_gt[3]))) + self.out_ds.fRasterXOriginRaw = int(math.floor(linearScale([self.fminx, self.fmaxx], [ + 0, self.out_ds.fRasterXSize], self.out_gt[0]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) + self.out_ds.fRasterYOriginRaw = int(math.ceil(linearScale([self.fminy, self.fmaxy], [ + self.out_ds.fRasterYSize, 0], self.out_gt[3]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) + self.out_ds.fRasterXWidth = int(math.floor(linearScale( + [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.omaxx))) - self.out_ds.fRasterXOrigin + self.out_ds.fRasterYHeight = int(math.ceil(linearScale( + [self.fminy, self.fmaxy], [0, self.out_ds.fRasterYSize], self.omaxy))) - self.out_ds.fRasterYOrigin + + if self.options.verbose: + print("Bounds (output srs):", round(self.ominx, 13), + self.ominy, self.omaxx, self.omaxy) + + # print("Input Raster Size: ", self.out_ds.RasterXSize, self.out_ds.RasterYSize) + # print("fmaxx-fminx", self.fmaxx - self.fminx, "omaxx-ominx", self.omaxx - self.ominx, "fmaxy-fminy", self.fmaxy - self.fminy, "omaxy-ominy", self.omaxy - self.ominy) + # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize) + # print("Full Raster Size Raw: ", self.out_ds.fRasterXSizeRaw, self.out_ds.fRasterYSizeRaw) + # print("Raster Origin: ", self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin) + # print("Raster Origin Raw: ", self.out_ds.fRasterXOriginRaw, self.out_ds.fRasterYOriginRaw) + # print("Raster Width Height: ", self.out_ds.fRasterXWidth, self.out_ds.fRasterYHeight) + + # Calculating ranges for tiles in different zoom levels + if self.options.profile == 'mercator': + + self.mercator = GlobalMercator() + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.mercator.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.mercator.MetersToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.mercator.MetersToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the minimal zoom level (map covers area equivalent to one tile) + if self.tminz is None: + self.tminz = self.mercator.ZoomForPixelSize( + self.out_gt[1] * max(self.out_ds.RasterXSize, + self.out_ds.RasterYSize) / float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", + self.mercator.MetersToLatLon(self.ominx, self.ominy), + self.mercator.MetersToLatLon(self.omaxx, self.omaxy)) + print('MinZoomLevel:', self.tminz) + print("MaxZoomLevel:", + self.tmaxz, + "(", + self.mercator.Resolution(self.tmaxz), + ")") + + if self.options.profile == 'geodetic': + + self.geodetic = GlobalGeodetic(self.options.tmscompatible) + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.geodetic.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.geodetic.LonLatToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.geodetic.LonLatToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tminz is None: + self.tminz = self.geodetic.ZoomForPixelSize( + self.out_gt[1] * max(self.out_ds.RasterXSize, + self.out_ds.RasterYSize) / float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", self.ominx, + self.ominy, self.omaxx, self.omaxy) + + # MMGIS + if self.options.profile == 'raster' and self.isRasterBounded: + + def log2(x): + return math.log10(x) / math.log10(2) + + # MMGIS added 'f'* + self.nativezoom = int( + max(math.ceil(log2(self.out_ds.fRasterXSizeRaw/float(self.tilesize))), + math.ceil(log2(self.out_ds.fRasterYSizeRaw/float(self.tilesize))))) + + self.basenativezoom = int( + max(math.ceil(log2(self.out_ds.fRasterXSize/float(self.tilesize))), + math.ceil(log2(self.out_ds.fRasterYSize/float(self.tilesize))))) + + # MMGIS + self.out_ds.fWorldXSize = int( + float(self.out_ds.fRasterXSize) * (2**(self.nativezoom - self.basenativezoom))) + self.out_ds.fWorldYSize = int( + float(self.out_ds.fRasterYSize) * (2**(self.nativezoom - self.basenativezoom))) + self.out_ds.fRasterXOriginWorld = int(float( + self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXOrigin) / self.out_ds.fRasterXSize)) + self.out_ds.fRasterYOriginWorld = int(float( + self.out_ds.fWorldYSize) * (float(self.out_ds.fRasterYOrigin) / self.out_ds.fRasterYSize)) + self.out_ds.fRasterXSizeWorld = int(float( + self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXWidth) / self.out_ds.fRasterXSize)) + self.out_ds.fRasterYSizeWorld = int(float( + self.out_ds.RasterYSize) * (float(self.out_ds.fRasterXSizeWorld) / self.out_ds.RasterXSize)) + # print("World Size", self.out_ds.fWorldXSize, self.out_ds.fWorldYSize) + # print("Raster Origin World", self.out_ds.fRasterXOriginWorld, self.out_ds.fRasterYOriginWorld) + # print("Raster Size World", self.out_ds.fRasterXSizeWorld, self.out_ds.fRasterYSizeWorld) + + if self.options.verbose: + print("Native zoom of the raster:", self.nativezoom) + + # Get the minimal zoom level (whole raster in one tile) + if self.tminz is None: + self.tminz = 0 + + # Get the maximal zoom level (native resolution of the raster) + if self.tmaxz is None: + self.tmaxz = self.nativezoom + + # MMGIS added 'f'* + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, self.tmaxz+1)) + self.tsize = list(range(0, self.tmaxz+1)) + # print("Raster Size:", self.out_ds.RasterXSize,self.out_ds.RasterYSize) + # print("Pixel Size Ratio:", (self.out_ds.fPixelSize / self.out_ds.PixelSize)) + # print("nativezoom", self.nativezoom, "basenativezoom", self.basenativezoom, "tminz", self.tminz, "tmaxz", self.tmaxz) + for tz in range(0, self.tmaxz+1): + tsize = 2.0**(self.tmaxz-tz)*self.tilesize + toffsetx = int(math.floor( + 2.0**(tz) * self.out_ds.fRasterXOriginRaw / self.out_ds.fRasterXSizeRaw)) + toffsety = int(math.floor( + 2.0**(tz) * (self.out_ds.fRasterYOriginRaw) / self.out_ds.fRasterYSizeRaw)) + # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) + toffsetx = int(math.floor( + self.out_ds.fRasterXOriginWorld / tsize)) + toffsety = int(math.floor( + self.out_ds.fRasterYOriginWorld / tsize)) + # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) + tmaxx = int(math.floor( + self.out_ds.fRasterXSizeWorld / tsize)) + toffsetx + 1 + + tmaxy = int(math.floor( + self.out_ds.fRasterYSizeWorld / tsize)) + toffsety + 1 + self.tsize[tz] = math.ceil(tsize) + #tminx = toffsetx + tminx = int(tmaxx - ((tmaxx - toffsetx) / (0.75))) - 1 + tminy = int(tmaxy - ((tmaxy - toffsety) / (0.75))) - 1 + + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + # print("tminx", tminx, "tminy", tminy, "tmaxx", tmaxx, "tmaxy", tmaxy, "tz", tz) + + elif self.options.profile == 'raster': + + def log2(x): + return math.log10(x) / math.log10(2) + self.nativezoom = int( + max(math.ceil(log2(self.out_ds.RasterXSize/float(self.tilesize))), + math.ceil(log2(self.out_ds.RasterYSize/float(self.tilesize))))) + + if self.options.verbose: + print("Native zoom of the raster:", self.nativezoom) + + # Get the minimal zoom level (whole raster in one tile) + if self.tminz is None: + self.tminz = 0 + + # Get the maximal zoom level (native resolution of the raster) + if self.tmaxz is None: + self.tmaxz = self.nativezoom + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, self.tmaxz+1)) + self.tsize = list(range(0, self.tmaxz+1)) + for tz in range(0, self.tmaxz+1): + tsize = 2.0**(self.tmaxz-tz)*self.tilesize + tminx, tminy = 0, 0 + tmaxx = int(math.ceil(self.out_ds.RasterXSize / tsize)) - 1 + tmaxy = int(math.ceil(self.out_ds.RasterYSize / tsize)) - 1 + self.tsize[tz] = math.ceil(tsize) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # Function which generates SWNE in LatLong for given tile + if self.kml and in_srs_wkt: + ct = osr.CoordinateTransformation(in_srs, srs4326) + + def rastertileswne(x, y, z): + # X-pixel size in level + pixelsizex = (2**(self.tmaxz-z) * self.out_gt[1]) + west = self.out_gt[0] + x*self.tilesize*pixelsizex + east = west + self.tilesize*pixelsizex + south = self.ominy + y*self.tilesize*pixelsizex + north = south + self.tilesize*pixelsizex + if not isepsg4326: + # Transformation to EPSG:4326 (WGS84 datum) + west, south = ct.TransformPoint(west, south)[:2] + east, north = ct.TransformPoint(east, north)[:2] + return south, west, north, east + + self.tileswne = rastertileswne + else: + self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa + + def generate_metadata(self): + """ + Generation of main metadata files and HTML viewers (metadata related to particular + tiles are generated during the tile processing). + """ + + if not os.path.exists(self.output): + os.makedirs(self.output) + + if self.options.profile == 'mercator': + + south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy) + north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy) + south, west = max(-85.05112878, south), max(-180.0, west) + north, east = min(85.05112878, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate googlemaps.html + if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator': + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'googlemaps.html'))): + f = open(os.path.join(self.output, 'googlemaps.html'), 'wb') + f.write(self.generate_googlemaps().encode('utf-8')) + f.close() + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + # Generate leaflet.html + if self.options.webviewer in ('all', 'leaflet'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'leaflet.html'))): + f = open(os.path.join(self.output, 'leaflet.html'), 'wb') + f.write(self.generate_leaflet().encode('utf-8')) + f.close() + + elif self.options.profile == 'geodetic': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + south, west = max(-90.0, south), max(-180.0, west) + north, east = min(90.0, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + elif self.options.profile == 'raster': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + + # MMGIS + if self.isRasterBounded: + west = self.fminx + east = self.fmaxx + south = self.fminy + north = self.fmaxy + + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + # Generate tilemapresource.xml. + if not self.options.resume or not os.path.exists(os.path.join(self.output, 'tilemapresource.xml')): + f = open(os.path.join(self.output, 'tilemapresource.xml'), 'wb') + f.write(self.generate_tilemapresource().encode('utf-8')) + f.close() + + if self.kml: + # TODO: Maybe problem for not automatically generated tminz + # The root KML should contain links to all tiles in the tminz level + children = [] + xmin, ymin, xmax, ymax = self.tminmax[self.tminz] + for x in range(xmin, xmax+1): + for y in range(ymin, ymax+1): + children.append([x, y, self.tminz]) + # Generate Root KML + if self.kml: + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'doc.kml'))): + f = open(os.path.join(self.output, 'doc.kml'), 'wb') + f.write(self.generate_kml( + None, None, None, children).encode('utf-8')) + f.close() + + def generate_base_tiles(self, tz): + """ + Generation of the base tiles (the lowest in the pyramid) directly from the input raster + """ + + if self.isDEMtile: + print("Generating Tiles at Zoom " + str(tz) + ": ") + + if not self.options.quiet: + print("Generating Base Tiles:") + + if self.options.verbose: + print('') + print("Tiles generated from the max zoom level:") + print("----------------------------------------") + print('') + + ds = self.out_ds + + querysize = self.querysize + + # 1bto4b + if self.isDEMtile: + tilebands = 4 + querysize = self.tilesize + else: + tilebands = self.dataBandsCount + 1 + tz = self.tmaxz + + try: + self.tminmax[tz] + except IndexError: + print(" Won't make zoom level " + str(tz)) + return + + # Set the bounds + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + + if self.options.verbose: + print("dataBandsCount: ", self.dataBandsCount) + print("tilebands: ", tilebands) + + tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + ti = 0 + + for ty in range(tmaxy, tminy-1, -1): + for tx in range(tminx, tmaxx+1): + + if self.stopped: + break + ti += 1 + tilefilename = os.path.join( + self.output, str(tz), str(tx), "%s.%s" % (ty, self.tileext)) + if self.options.verbose: + print(ti, '/', tcount, tilefilename) + + if self.options.resume and os.path.exists(tilefilename): + if self.options.verbose: + print("Tile generation skipped because of --resume") + else: + self.progressbar(ti / float(tcount)) + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + if self.options.profile == 'mercator': + # Tile bounds in EPSG:3857 + b = self.mercator.TileBounds(tx, ty, tz) + elif self.options.profile == 'geodetic': + b = self.geodetic.TileBounds(tx, ty, tz) + + # Don't scale up by nearest neighbour, better change the querysize + # to the native resolution (and return smaller query tile) for scaling + + if self.options.profile in ('mercator', 'geodetic'): + rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1]) + + # Pixel size in the raster covering query geo extent + nativesize = wb[0] + wb[2] + if self.options.verbose: + print("\tNative Extent (querysize", + nativesize, "): ", rb, wb) + + # Tile bounds in raster coordinates for ReadRaster query + rb, wb = self.geo_query( + ds, b[0], b[3], b[2], b[1], querysize=querysize) + + rx, ry, rxsize, rysize = rb + wx, wy, wxsize, wysize = wb + wxsize -= 1 # 1bto4b + wysize -= 1 # 1bto4b + + # MMGIS + elif self.isRasterBounded: # 'raster' profile: + + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.fWorldXSize + ysize = self.out_ds.fWorldYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld + #print("rx", rx) + rxsize = 0 + rxsize = tsize + + rysize = 0 + rysize = tsize + + ry = ysize - (ty * tsize) - rysize - \ + self.out_ds.fRasterYOriginWorld + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if rx < 0: + rxsize = tsize + rx + wx = -rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + rx = 0 + if ry < 0: + rysize = tsize + ry + wy = -ry + wysize = int(rysize/float(tsize) * self.tilesize) + ry = 0 + if rx + rxsize > self.out_ds.fRasterXSizeWorld: + rxsize = self.out_ds.fRasterXSizeWorld - rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + if ry + rysize > self.out_ds.fRasterYSizeWorld: + rysize = self.out_ds.fRasterYSizeWorld - ry + wysize = int(rysize/float(tsize) * self.tilesize) + + # Convert rx, ry back to non-world coordinates + rx = int(float(self.out_ds.RasterXSize) * + (float(rx) / self.out_ds.fRasterXSizeWorld)) + ry = int(float(self.out_ds.RasterYSize) * + (float(ry) / self.out_ds.fRasterYSizeWorld)) + rxsize = int(float(self.out_ds.RasterXSize) * + (float(rxsize) / self.out_ds.fRasterXSizeWorld)) + rysize = int(float(self.out_ds.RasterYSize) * + (float(rysize) / self.out_ds.fRasterYSizeWorld)) + + wxsize -= 1 # 1bto4b + wysize -= 1 # 1bto4b + + #print("Extent: ", (tx, ty, tz, tsize), (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize), (self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin)) + else: # 'raster' profile: + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.RasterXSize # size of the raster in pixels + ysize = self.out_ds.RasterYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize + rxsize = 0 + if tx == tmaxx: + rxsize = xsize % tsize + if rxsize == 0: + rxsize = tsize + + rysize = 0 + if ty == tmaxy: + rysize = ysize % tsize + if rysize == 0: + rysize = tsize + ry = ysize - (ty * tsize) - rysize + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if self.options.verbose: + print("\tReadRaster Extent: ", + (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)) + + # Query is in 'nearest neighbour' but can be bigger in then the tilesize + # We scale down the query to the tilesize by supplied algorithm. + + # Tile dataset in memory + + # 1bto4b + if self.isDEMtile: + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands, gdal.GDT_Byte) + else: + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands) + + data = alpha = None + # Read the source raster if anything is going inside the tile as per the computed + # geo_query + if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0: + # 1bto4b + if self.isDEMtile: + data = ds.GetRasterBand(1).ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) + else: + data = ds.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize, + band_list=list(range(1, self.dataBandsCount+1))) + alpha = self.alphaband.ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize) + + # The tile in memory is a transparent file by default. Write pixel values into it if + # any + if data: + # 1bto4b - both this full if and else + if self.isDEMtile: + if (wxsize * wysize) > 0: + data = struct.unpack('f' * wxsize * wysize, data) + else: + return + + if self.tilesize == querysize: + # Interpolate the values from four surrounding + + # This takes our 1d list of WxH data and pads it with a rect of none values + dataPad = list(data) + for i in reversed(range(1, wysize)): + dataPad.insert(wxsize * i, 0) + dataPad.insert(wxsize * i, 0) + for i in range(wxsize + 3): + dataPad.insert(0, 0) + for i in range(wxsize + 3): + dataPad.append(0) + + dataIn = [] + # Resample based on average of four + # averaging over: i, i + 1, i + wxsize, i + wxsize + 1 + for y in range(wysize+2 - 1): + for x in range(wxsize+2 - 1): + i = x+(y*(wxsize+2)) + nW = dataPad[i] + nE = dataPad[i+1] + sW = dataPad[i+(wxsize+2)] + sE = dataPad[i+(wxsize+2)+1] + dataIn.append((nW + nE + sW + sE)/float(4)) + + # Get the surrounding eight tiles + # Get NW + if tx - 1 >= tminx and ty + 1 <= tmaxy: + rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW = getTilePxBounds(self, + tx - 1, ty + 1, tz, ds) + wxsizeNW -= 1 + wysizeNW -= 1 + if wxsizeNW != 0 and wysizeNW != 0: + dataNW = ds.GetRasterBand(1).ReadRaster( + rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW, buf_type=gdal.GDT_Float32) + if dataNW is not None and (wxsizeNW * wysizeNW) > 0: + dataNW = struct.unpack( + 'f' * wxsizeNW * wysizeNW, dataNW) + else: + dataNW = None + else: + dataNW = None + + # Get N + if ty + 1 <= tmaxy: + rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN = getTilePxBounds( + self, tx, ty + 1, tz, ds) + wxsizeN -= 1 + wysizeN -= 1 + if wxsizeN != 0 and wysizeN != 0: + dataN = ds.GetRasterBand(1).ReadRaster( + rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN, buf_type=gdal.GDT_Float32) + if dataN is not None and (wxsizeN * wysizeN) > 0: + dataN = struct.unpack( + 'f' * wxsizeN * wysizeN, dataN) + else: + dataN = None + else: + dataN = None + # Get NE + if tx + 1 <= tmaxx and ty + 1 <= tmaxy: + rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE = getTilePxBounds( + self, tx + 1, ty + 1, tz, ds) + wxsizeNE -= 1 + wysizeNE -= 1 + if wxsizeNE != 0 and wysizeNE != 0: + dataNE = ds.GetRasterBand(1).ReadRaster( + rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE, buf_type=gdal.GDT_Float32) + if dataNE is not None and (wxsizeNE * wysizeNE) > 0: + dataNE = struct.unpack( + 'f' * wxsizeNE * wysizeNE, dataNE) + else: + dataNE = None + else: + dataNE = None + # Get E + if tx + 1 <= tmaxx: + rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE = getTilePxBounds( + self, tx + 1, ty, tz, ds) + wxsizeE -= 1 + wysizeE -= 1 + if wxsizeE != 0 and wysizeE != 0: + dataE = ds.GetRasterBand(1).ReadRaster( + rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE, buf_type=gdal.GDT_Float32) + if dataE is not None and (wxsizeE * wysizeE) > 0: + dataE = struct.unpack( + 'f' * wxsizeE * wysizeE, dataE) + else: + dataE = None + else: + dataE = None + # Get SE + if tx + 1 <= tmaxx and ty - 1 >= tminy: + rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE = getTilePxBounds( + self, tx + 1, ty - 1, tz, ds) + wxsizeSE -= 1 + wysizeSE -= 1 + if wxsizeSE != 0 and wysizeSE != 0: + dataSE = ds.GetRasterBand(1).ReadRaster( + rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE, buf_type=gdal.GDT_Float32) + if dataSE is not None and (wxsizeSE * wysizeSE) > 0: + dataSE = struct.unpack( + 'f' * wxsizeSE * wysizeSE, dataSE) + else: + dataSE = None + else: + dataSE = None + # Get S + if ty - 1 >= tminy: + rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS = getTilePxBounds( + self, tx, ty - 1, tz, ds) + wxsizeS -= 1 + wysizeS -= 1 + if wxsizeS != 0 and wysizeS != 0: + dataS = ds.GetRasterBand(1).ReadRaster( + rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS, buf_type=gdal.GDT_Float32) + if dataS is not None and (wxsizeS * wysizeS) > 0: + dataS = struct.unpack( + 'f' * wxsizeS * wysizeS, dataS) + else: + dataS = None + else: + dataS = None + # Get SW + if tx - 1 >= tminx and ty - 1 >= tminy: + rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW = getTilePxBounds( + self, tx - 1, ty - 1, tz, ds) + wxsizeSW -= 1 + wysizeSW -= 1 + if wxsizeSW != 0 and wysizeSW != 0: + dataSW = ds.GetRasterBand(1).ReadRaster( + rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW, buf_type=gdal.GDT_Float32) + if dataSW is not None and (wxsizeSW * wysizeSW) > 0: + dataSW = struct.unpack( + 'f' * wxsizeSW * wysizeSW, dataSW) + else: + dataSW = None + else: + dataSW = None + # Get W + if tx - 1 >= tminx: + rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW = getTilePxBounds( + self, tx - 1, ty, tz, ds) + wxsizeW -= 1 + wysizeW -= 1 + if wxsizeW != 0 and wysizeW != 0: + dataW = ds.GetRasterBand(1).ReadRaster( + rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW, buf_type=gdal.GDT_Float32) + if dataW is not None and (wxsizeW * wysizeW) > 0: + dataW = struct.unpack( + 'f' * wxsizeW * wysizeW, dataW) + else: + dataW = None + else: + dataW = None + + # NW (uses N, NW, W) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataN is not None: + fN = dataN[len(dataN)-wxsizeN] + values = values + 1 + if dataNW is not None: + fNW = dataNW[len(dataNW)-1] + values = values + 1 + if dataW is not None: + fW = dataW[wxsizeW-1] + values = values + 1 + dataIn[0] = ((dataIn[0]*4) + fN + + fNW + fW)/float(values) + + # NE (uses N, NE, E) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataN is not None: + fN = dataN[len(dataN)-1] + values = values + 1 + if dataNE is not None: + fNE = dataNE[len(dataNE)-wxsizeNE] + values = values + 1 + if dataE is not None: + fE = dataE[0] + values = values + 1 + dataIn[wxsize] = ( + (dataIn[wxsize]*4) + fN + fNE + fE)/float(values) + + # SE (uses S, SE, E) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataS is not None: + fS = dataS[wxsizeS-1] + values = values + 1 + if dataSE is not None: + fSE = dataSE[0] + values = values + 1 + if dataE is not None: + fE = dataE[len(dataE)-wxsizeE] + values = values + 1 + dataIn[len(dataIn)-1] = ((dataIn[len(dataIn)-1] + * 4) + fS + fSE + fE)/float(values) + + # SW (uses S, SW, W) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataS is not None: + fS = dataS[0] + values = values + 1 + if dataSW is not None: + fSW = dataSW[wxsizeSW-1] + values = values + 1 + if dataW is not None: + fW = dataW[len(dataW)-1] + values = values + 1 + dataIn[len( + dataIn)-wxsize-1] = ((dataIn[len(dataIn)-wxsize-1]*4) + fS + fSW + fW)/float(values) + + # Then the edges minus corners + # N + if dataN is not None: + for i in range(1, wxsize): + dataIn[i] = ( + (dataIn[i]*4) + dataN[len(dataN)-wxsizeN-1+i] + dataN[len(dataN)-wxsizeN-1+i+1])/float(4) + else: + for i in range(1, wxsize): + dataIn[i] = (dataIn[i]*4)/float(2) + + # E + if dataE is not None: + for i in range(1, wysize): + dataIn[((i+1)*(wxsize+1)-1)] = ((dataIn[((i+1)*(wxsize+1)-1)] + * 4) + dataE[(i-1)*wxsizeE] + dataE[i*wxsizeE])/float(4) + else: + for i in range(1, wysize): + dataIn[( + (i+1)*(wxsize+1)-1)] = (dataIn[((i+1)*(wxsize+1)-1)]*4)/float(2) + + # S + if dataS is not None: + for i in range(1, wxsize): + dataIn[len(dataIn)-wxsize-1+i] = ( + (dataIn[len(dataIn)-wxsize-1+i]*4) + dataS[i-1] + dataS[i])/float(4) + else: + for i in range(1, wxsize): + dataIn[len( + dataIn)-wxsize-1+i] = (dataIn[len(dataIn)-wxsize-1+i]*4)/float(2) + + # W + if dataW is not None: + for i in range(1, wysize): + dataIn[(i)*(wxsize+1)] = ((dataIn[(i)*(wxsize+1)]*4) + + dataW[i*wxsizeW-1] + dataW[(i+1)*wxsizeW-1])/float(4) + else: + for i in range(1, wysize): + dataIn[(i)*(wxsize+1)] = (dataIn[(i) + * (wxsize+1)]*4)/float(2) + + data1 = [] + data2 = [] + data3 = [] + data4 = [] + for f in dataIn: + f = str(binary(f)) + data1.append(int(f[:8], 2)) + data2.append(int(f[8:16], 2)) + data3.append(int(f[16:24], 2)) + data4.append(int(f[24:], 2)) + + data1s = '' + data2s = '' + data3s = '' + data4s = '' + indx = 0 + for v in data1: + data1s += struct.pack('B', data1[indx]) + data2s += struct.pack('B', data2[indx]) + data3s += struct.pack('B', data3[indx]) + data4s += struct.pack('B', data4[indx]) + indx += 1 + dstile.GetRasterBand(1).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data1s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(2).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data2s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(3).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data3s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(4).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data4s, buf_type=gdal.GDT_Byte) + elif wxsize != 0 and wysize != 0: + # Big ReadRaster query in memory scaled to the tilesize - all but 'near' algo + dsquery = self.mem_drv.Create( + '', querysize, querysize, tilebands, gdal.GDT_Byte) # 1bto4b + # TODO: fill the null value in case a tile without alpha is produced (now only png tiles are supported) + # for i in range(1, tilebands+1): + # dsquery.GetRasterBand(1).Fill(tilenodata) + # dsquery.WriteRaster(wx, wy, wxsize, wysize, data, band_list=list(range(1,self.dataBandsCount+1)))###############1bto4b + # dsquery.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands])###############################1bto4b + + # 1bto4b + data = ds.GetRasterBand(1).ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) + + data = struct.unpack('f' * wxsize * wysize, data) + data1 = [] + data2 = [] + data3 = [] + data4 = [] + for f in data: + f = str(binary(f)) + data1.append(int(f[:8], 2)) + data2.append(int(f[8:16], 2)) + data3.append(int(f[16:24], 2)) + data4.append(int(f[24:], 2)) + + data1s = '' + data2s = '' + data3s = '' + data4s = '' + indx = 0 + for v in data1: + data1s += struct.pack('B', data1[indx]) + data2s += struct.pack('B', data2[indx]) + data3s += struct.pack('B', data3[indx]) + data4s += struct.pack('B', data4[indx]) + indx += 1 + + dsquery.GetRasterBand(1).WriteRaster( + wx, wy, wxsize, wysize, data1s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(2).WriteRaster( + wx, wy, wxsize, wysize, data2s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(3).WriteRaster( + wx, wy, wxsize, wysize, data3s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(4).WriteRaster( + wx, wy, wxsize, wysize, data4s, buf_type=gdal.GDT_Byte) + # sys.exit('done') + # 1bto4b + + self.scale_query_to_tile( + dsquery, dstile, tilefilename) + del dsquery + + else: + if self.tilesize == querysize: + # Use the ReadRaster result directly in tiles ('nearest neighbour' query) + dstile.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, self.dataBandsCount+1))) + dstile.WriteRaster( + wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + # Note: For source drivers based on WaveLet compression (JPEG2000, ECW, + # MrSID) the ReadRaster function returns high-quality raster (not ugly + # nearest neighbour) + # TODO: Use directly 'near' for WaveLet files + else: + # Big ReadRaster query in memory scaled to the tilesize - all but 'near' + # algo + dsquery = self.mem_drv.Create( + '', querysize, querysize, tilebands) + # TODO: fill the null value in case a tile without alpha is produced (now + # only png tiles are supported) + dsquery.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, self.dataBandsCount+1))) + dsquery.WriteRaster( + wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + self.scale_query_to_tile( + dsquery, dstile, tilefilename) + del dsquery + + del data + + if self.options.resampling != 'antialias': + # Write a copy of tile to png/jpg + self.out_drv.CreateCopy(tilefilename, dstile, strict=0) + + del dstile + + # Create a KML file for this tile. + if self.kml: + kmlfilename = os.path.join( + self.output, str(tz), str(tx), '%d.kml' % ty) + if not self.options.resume or not os.path.exists(kmlfilename): + f = open(kmlfilename, 'wb') + f.write(self.generate_kml(tx, ty, tz).encode('utf-8')) + f.close() + + if not self.options.verbose and not self.options.quiet: + self.progressbar(ti / float(tcount)) + + def generate_overview_tiles(self): + """Generation of the overview tiles (higher in the pyramid) based on existing tiles""" + + if not self.options.quiet: + print("Generating Overview Tiles:") + + # 1bto4b + if self.isDEMtile: + tilebands = 4 + else: + tilebands = self.dataBandsCount + 1 + + # Usage of existing tiles: from 4 underlying tiles generate one as overview. + + tcount = 0 + for tz in range(self.tmaxz-1, self.tminz-1, -1): + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + tcount += (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + + ti = 0 + + for tz in range(self.tmaxz-1, self.tminz-1, -1): + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + for ty in range(tmaxy, tminy-1, -1): + for tx in range(tminx, tmaxx+1): + + if self.stopped: + break + + ti += 1 + tilefilename = os.path.join(self.output, + str(tz), + str(tx), + "%s.%s" % (ty, self.tileext)) + + if self.options.verbose: + print(ti, '/', tcount, tilefilename) + + if self.options.resume and os.path.exists(tilefilename): + if self.options.verbose: + print("Tile generation skipped because of --resume") + else: + self.progressbar(ti / float(tcount)) + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + dsquery = self.mem_drv.Create( + '', 2*self.tilesize, 2*self.tilesize, tilebands) + # TODO: fill the null value + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands) + + # TODO: Implement more clever walking on the tiles with cache functionality + # probably walk should start with reading of four tiles from top left corner + # Hilbert curve + + children = [] + # Read the tiles and write them to query window + for y in range(2*ty, 2*ty+2): + for x in range(2*tx, 2*tx+2): + minx, miny, maxx, maxy = self.tminmax[tz+1] + if x >= minx and x <= maxx and y >= miny and y <= maxy: + dsquerytile = gdal.Open( + os.path.join(self.output, str(tz+1), str(x), + "%s.%s" % (y, self.tileext)), + gdal.GA_ReadOnly) + if (ty == 0 and y == 1) or (ty != 0 and (y % (2*ty)) != 0): + tileposy = 0 + else: + tileposy = self.tilesize + if tx: + tileposx = x % (2*tx) * self.tilesize + elif tx == 0 and x == 1: + tileposx = self.tilesize + else: + tileposx = 0 + dsquery.WriteRaster( + tileposx, tileposy, self.tilesize, self.tilesize, + dsquerytile.ReadRaster( + 0, 0, self.tilesize, self.tilesize), + band_list=list(range(1, tilebands+1))) + children.append([x, y, tz+1]) + + self.scale_query_to_tile(dsquery, dstile, tilefilename) + # Write a copy of tile to png/jpg + if self.options.resampling != 'antialias': + # Write a copy of tile to png/jpg + self.out_drv.CreateCopy(tilefilename, dstile, strict=0) + + if self.options.verbose: + print("\tbuild from zoom", tz+1, + " tiles:", (2*tx, 2*ty), (2*tx+1, 2*ty), + (2*tx, 2*ty+1), (2*tx+1, 2*ty+1)) + + # Create a KML file for this tile. + if self.kml: + f = open(os.path.join( + self.output, '%d/%d/%d.kml' % (tz, tx, ty)), 'wb') + f.write(self.generate_kml( + tx, ty, tz, children).encode('utf-8')) + f.close() + + if not self.options.verbose and not self.options.quiet: + self.progressbar(ti / float(tcount)) + + def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): + """ + For given dataset and query in cartographic coordinates returns parameters for ReadRaster() + in raster coordinates and x/y shifts (for border tiles). If the querysize is not given, the + extent is returned in the native resolution of dataset ds. + + raises Gdal2TilesError if the dataset does not contain anything inside this geo_query + """ + geotran = ds.GetGeoTransform() + rx = int((ulx - geotran[0]) / geotran[1] + 0.001) + ry = int((uly - geotran[3]) / geotran[5] + 0.001) + rxsize = int((lrx - ulx) / geotran[1] + 0.5) + rysize = int((lry - uly) / geotran[5] + 0.5) + + if not querysize: + wxsize, wysize = rxsize, rysize + else: + wxsize, wysize = querysize, querysize + + # Coordinates should not go out of the bounds of the raster + wx = 0 + if rx < 0: + rxshift = abs(rx) + wx = int(wxsize * (float(rxshift) / rxsize)) + wxsize = wxsize - wx + rxsize = rxsize - int(rxsize * (float(rxshift) / rxsize)) + rx = 0 + if rx+rxsize > ds.RasterXSize: + wxsize = int(wxsize * (float(ds.RasterXSize - rx) / rxsize)) + rxsize = ds.RasterXSize - rx + + wy = 0 + if ry < 0: + ryshift = abs(ry) + wy = int(wysize * (float(ryshift) / rysize)) + wysize = wysize - wy + rysize = rysize - int(rysize * (float(ryshift) / rysize)) + ry = 0 + if ry+rysize > ds.RasterYSize: + wysize = int(wysize * (float(ds.RasterYSize - ry) / rysize)) + rysize = ds.RasterYSize - ry + + return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize) + + def scale_query_to_tile(self, dsquery, dstile, tilefilename=''): + """Scales down query dataset to the tile dataset""" + + querysize = dsquery.RasterXSize + tilesize = dstile.RasterXSize + tilebands = dstile.RasterCount + + if self.options.resampling == 'average': + + # Function: gdal.RegenerateOverview() + for i in range(1, tilebands+1): + # Black border around NODATA + res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i), + 'average') + if res != 0: + self.error("RegenerateOverview() failed on %s, error %d" % ( + tilefilename, res)) + + elif self.options.resampling == 'antialias': + + # Scaling by PIL (Python Imaging Library) - improved Lanczos + array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) + for i in range(tilebands): + array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i+1), + 0, 0, querysize, querysize) + im = Image.fromarray(array, 'RGBA') # Always four bands + im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS) + if os.path.exists(tilefilename): + im0 = Image.open(tilefilename) + im1 = Image.composite(im1, im0, im1) + im1.save(tilefilename, self.tiledriver) + + else: + + # Other algorithms are implemented by gdal.ReprojectImage(). + dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0, + tilesize / float(querysize))) + dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) + + res = gdal.ReprojectImage( + dsquery, dstile, None, None, self.resampling) + if res != 0: + self.error("ReprojectImage() failed on %s, error %d" % + (tilefilename, res)) + + def generate_tilemapresource(self): + """ + Template for tilemapresource.xml. Returns filled string. Expected variables: + title, north, south, east, west, isepsg4326, projection, publishurl, + zoompixels, tilesize, tileformat, profile + """ + + args = {} + args['title'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['profile'] = self.options.profile + + if self.options.profile == 'mercator': + args['srs'] = "EPSG:3857" + elif self.options.profile == 'geodetic': + args['srs'] = "EPSG:4326" + elif self.options.s_srs: + args['srs'] = self.options.s_srs + elif self.out_srs: + args['srs'] = self.out_srs.ExportToWkt() + else: + args['srs'] = "" + + s = """ + + %(title)s + + %(srs)s + + + + +""" % args # noqa + for z in range(self.tminz, self.tmaxz+1): + if self.options.profile == 'raster': + s += """ \n""" % ( + args['publishurl'], z, (2**(self.nativezoom-z) * self.out_gt[1]), z) + elif self.options.profile == 'mercator': + s += """ \n""" % ( + args['publishurl'], z, 156543.0339/2**z, z) + elif self.options.profile == 'geodetic': + s += """ \n""" % ( + args['publishurl'], z, 0.703125/2**z, z) + s += """ + + """ + return s + + def generate_kml(self, tx, ty, tz, children=None, **args): + """ + Template for the KML. Returns filled string. + """ + if not children: + children = [] + + args['tx'], args['ty'], args['tz'] = tx, ty, tz + args['tileformat'] = self.tileext + if 'tilesize' not in args: + args['tilesize'] = self.tilesize + + if 'minlodpixels' not in args: + args['minlodpixels'] = int(args['tilesize'] / 2) + if 'maxlodpixels' not in args: + args['maxlodpixels'] = int(args['tilesize'] * 8) + if children == []: + args['maxlodpixels'] = -1 + + if tx is None: + tilekml = False + args['title'] = self.options.title + else: + tilekml = True + args['title'] = "%d/%d/%d.kml" % (tz, tx, ty) + args['south'], args['west'], args['north'], args['east'] = self.tileswne( + tx, ty, tz) + + if tx == 0: + args['drawOrder'] = 2 * tz + 1 + elif tx is not None: + args['drawOrder'] = 2 * tz + else: + args['drawOrder'] = 0 + + url = self.options.url + if not url: + if tilekml: + url = "../../" + else: + url = "" + + s = """ + + + %(title)s + + """ % args + if tilekml: + s += """ + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + %(minlodpixels)d + %(maxlodpixels)d + + + + %(drawOrder)d + + %(ty)d.%(tileformat)s + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + """ % args + + for cx, cy, cz in children: + csouth, cwest, cnorth, ceast = self.tileswne(cx, cy, cz) + s += """ + + %d/%d/%d.%s + + + %.14f + %.14f + %.14f + %.14f + + + %d + -1 + + + + %s%d/%d/%d.kml + onRegion + + + + """ % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest, + args['minlodpixels'], url, cz, cx, cy) + + s += """ + + """ + return s + + def generate_googlemaps(self): + """ + Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, + publishurl + """ + args = {} + args['title'] = self.options.title + args['googlemapskey'] = self.options.googlekey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + + s = r""" + + + %(title)s + + + + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + def generate_leaflet(self): + """ + Template for leaflet.html implementing overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title.replace('"', '\\"') + args['htmltitle'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['centerlon'] = (args['north'] + args['south']) / 2. + args['centerlat'] = (args['west'] + args['east']) / 2. + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['beginzoom'] = self.tmaxz + args['tilesize'] = self.tilesize # not used + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url # not used + args['copyright'] = self.options.copyright.replace('"', '\\"') + + s = """ + + + + + %(htmltitle)s + + + + + + + + + + +
+ + + + + + + """ % args # noqa + + return s + + def generate_openlayers(self): + """ + Template for openlayers.html implementing overlay of available Spherical Mercator layers. + + It returns filled string. Expected variables: + title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title + args['bingkey'] = self.options.bingkey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + if self.options.tmscompatible: + args['tmsoffset'] = "-1" + else: + args['tmsoffset'] = "" + if self.options.profile == 'raster': + args['rasterzoomlevels'] = self.tmaxz+1 + args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1] + + s = r""" + + %(title)s + + """ % args # noqa + + if self.options.profile == 'mercator': + s += """ + + """ % args + + s += """ + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + +def main(): + argv = gdal.GeneralCmdLineProcessor(sys.argv) + if argv: + gdal2tiles = GDAL2Tiles(argv[1:]) + gdal2tiles.process() + + +if __name__ == '__main__': + main() + +# vim: set tabstop=4 shiftwidth=4 expandtab: diff --git a/auxiliary/rasterstotiles/rasterstotiles.py b/auxiliary/rasterstotiles/rasterstotiles.py index d714fd01..de8ee065 100644 --- a/auxiliary/rasterstotiles/rasterstotiles.py +++ b/auxiliary/rasterstotiles/rasterstotiles.py @@ -85,9 +85,9 @@ def tile(raster, outputDir=None): # reproject to EPSG:4326 if it's a projection we can't handle downstream projection = osr.SpatialReference(wkt=ds.GetProjection()).GetName() if projection == "unnamed": - gdal_warp = "gdalwarp -t_srs EPSG:4326 " + raster + " " + raster[:-4] + "_espg4326" + raster[-4:] + gdal_warp = "gdalwarp -t_srs EPSG:4326 " + raster + " " + raster[:-4] + "_epsg4326" + raster[-4:] print(gdal_warp) - raster = raster[:-4] + "_espg4326" + raster[-4:] + raster = raster[:-4] + "_epsg4326" + raster[-4:] gdal_warp_process = subprocess.Popen(gdal_warp, shell=True) gdal_warp_process.wait() ds = gdal.Open(raster) diff --git a/auxiliary/rastertolegend/color_relief_slope.txt b/auxiliary/rastertolegend/color_relief_slope.txt index 2cea1610..7f14ef60 100644 --- a/auxiliary/rastertolegend/color_relief_slope.txt +++ b/auxiliary/rastertolegend/color_relief_slope.txt @@ -1,11 +1,11 @@ -95 158 1 66 -85 213 62 79 -75 244 109 67 -65 253 174 97 -55 254 224 139 -45 230 245 152 -35 171 221 164 -25 102 194 165 -15 50 136 189 -5 94 79 162 +95% 158 1 66 +85% 213 62 79 +75% 244 109 67 +65% 253 174 97 +55% 254 224 139 +45% 230 245 152 +35% 171 221 164 +25% 102 194 165 +15% 50 136 189 +5% 94 79 162 nv 0 0 0 0 \ No newline at end of file diff --git a/config/js/config.js b/config/js/config.js index 9b61c9cf..59e7139c 100644 --- a/config/js/config.js +++ b/config/js/config.js @@ -451,6 +451,17 @@ function initialize() { true ); } + $("#tab_panels #panels_globeDemFallbackPath").val( + cData.panelSettings ? cData.panelSettings.demFallbackPath : "" + ); + $("#tab_panels #panels_globeDemFallbackFormat").val( + cData.panelSettings + ? cData.panelSettings.demFallbackFormat + : "" + ); + $("#tab_panels #panels_globeDemFallbackType").val( + cData.panelSettings ? cData.panelSettings.demFallbackType : "" + ); //time if (typeof cData.time != "undefined") { @@ -723,37 +734,39 @@ function makeLayerBarAndModal(d, level) { var nameEl = "block", kindEl = "block", typeEl = "block", urlEl = "block", demtileurlEl = "block", demparserEl = "block", controlledEl = "block", legendEl = "block", visEl = "block", viscutEl = "block", initOpacEl = "block", togwheadEl = "block", minzEl = "block", tileformatEl = "block", - visEl = "block", - viscutEl = "block", - togwheadEl = "block", - minzEl = "block", - modelLonEl = "block", - modelLatEl = "block", - modelElevEl = "block", - modelRotXEl = "block", - modelRotYEl = "block", - modelRotZEl = "block", - modelScaleEl = "block", - maxnzEl = "block", - maxzEl = "block", - strcolEl = "block", - filcolEl = "block", - weightEl = "block", - opacityEl = "block", - radiusEl = "block", - variableEl = "block", - xmlEl = "block", - bbEl = "block", - vtLayerEl = "block", - vtIdEl = "block", - vtKeyEl = "block", - vtLayerSetStylesEl = "block", - timeEl = "block", - timeTypeEl = "block", - timeFormatEl = "block", - timeRefreshEl = "none", - timeIncrementEl = "none"; - shapeEl = "none"; + visEl = "block", + viscutEl = "block", + togwheadEl = "block", + minzEl = "block", + modelLonEl = "block", + modelLatEl = "block", + modelElevEl = "block", + modelRotXEl = "block", + modelRotYEl = "block", + modelRotZEl = "block", + modelScaleEl = "block", + maxnzEl = "block", + maxzEl = "block", + strcolEl = "block", + filcolEl = "block", + weightEl = "block", + opacityEl = "block", + radiusEl = "block", + variableEl = "block", + xmlEl = "block", + bbEl = "block", + vtLayerEl = "block", + vtIdEl = "block", + vtKeyEl = "block", + vtLayerSetStylesEl = "block", + timeEl = "block", + timeTypeEl = "block", + timeFormatEl = "block", + timeRefreshEl = "none", + timeIncrementEl = "none", + shapeEl = "none", + queryEndpointEl = "none", + queryTypeEl = "none"; // prettier-ignore switch( d.type ) { @@ -767,6 +780,7 @@ function makeLayerBarAndModal(d, level) { weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "none"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; timeEl = "none"; timeTypeEl = "none"; timeFormatEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + queryEndpointEl = "none"; queryTypeEl = "none"; break; case "tile": nameEl = "block"; kindEl = "none"; typeEl = "block"; urlEl = "block"; demtileurlEl = "block"; demparserEl = "block"; controlledEl = "none"; legendEl = "block"; @@ -778,6 +792,7 @@ function makeLayerBarAndModal(d, level) { weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "none"; xmlEl = "block"; bbEl = "block"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + queryEndpointEl = "none"; queryTypeEl = "none"; break; case "vectortile": nameEl = "block"; kindEl = "block"; typeEl = "block"; urlEl = "block"; demtileurlEl = "block"; demparserEl = "block"; controlledEl = "none"; legendEl = "block"; @@ -789,6 +804,7 @@ function makeLayerBarAndModal(d, level) { weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "block"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "block"; vtIdEl = "block"; vtKeyEl = "block"; vtLayerSetStylesEl = "block"; timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "block"; + queryEndpointEl = "none"; queryTypeEl = "none"; break; case "data": nameEl = "block"; kindEl = "none"; typeEl = "block"; urlEl = "none"; demtileurlEl = "block"; demparserEl = "block"; controlledEl = "none"; legendEl = "block"; @@ -800,17 +816,19 @@ function makeLayerBarAndModal(d, level) { weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "block"; xmlEl = "block"; bbEl = "block"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + queryEndpointEl = "none"; queryTypeEl = "none"; break; - case "point": - nameEl = "block"; kindEl = "block"; typeEl = "block"; urlEl = "block"; demtileurlEl = "none"; demparserEl = "none"; controlledEl = "none"; legendEl = "block"; - visEl = "block"; viscutEl = "block"; initOpacEl = "block"; togwheadEl = "block"; minzEl = "none"; + case "query": + nameEl = "block"; kindEl = "none"; typeEl = "block"; urlEl = "none"; demtileurlEl = "none"; demparserEl = "none"; controlledEl = "none"; legendEl = "none"; + visEl = "none"; viscutEl = "none"; initOpacEl = "none"; togwheadEl = "none"; minzEl = "none"; tileformatEl = "none"; modelLonEl = "none"; modelLatEl = "none"; modelElevEl = "none"; modelRotXEl = "none"; modelRotYEl = "none"; modelRotZEl = "none"; modelScaleEl = "none"; maxnzEl = "none"; maxzEl = "none"; strcolEl = "block"; filcolEl = "block"; weightEl = "block"; opacityEl = "block"; radiusEl = "block"; variableEl = "block"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; - timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + timeEl = "none"; timeTypeEl = "none"; timeFormatEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + queryEndpointEl = "block"; queryTypeEl = "block"; break; case "vector": nameEl = "block"; kindEl = "block"; typeEl = "block"; urlEl = "block"; controlledEl = "block"; demtileurlEl = "none"; demparserEl = "none"; legendEl = "block"; @@ -833,6 +851,7 @@ function makeLayerBarAndModal(d, level) { weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "none"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + queryEndpointEl = "none"; queryTypeEl = "none"; break; default: console.warn(`Unknown layer type: ${d.type}`) @@ -843,7 +862,7 @@ function makeLayerBarAndModal(d, level) { tileSel = "", vectortileSel = "", dataSel = "", - pointSel = "", + querySel = "", vectorSel = "", modelSel = ""; @@ -864,9 +883,9 @@ function makeLayerBarAndModal(d, level) { barColor = "rgb(189, 15, 50)"; dataSel = "selected"; break; - case "point": - barColor = "#892f45"; - pointSel = "selected"; + case "query": + barColor = "#0fbd4d"; + querySel = "selected"; break; case "vector": barColor = "rgb(15, 119, 189)"; @@ -880,6 +899,8 @@ function makeLayerBarAndModal(d, level) { console.warn(`Unknown layer type: ${d.type}`); } + var queryTypeESSel = "selected"; + var tileformatTMSSel = "", tileformatWMTSSel = "", tileformatWMSSel = ""; @@ -1069,7 +1090,7 @@ function makeLayerBarAndModal(d, level) { // prettier-ignore $( "#modal_divs" ).append( "