diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..c44e609d0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,43 @@ +--- +name: Bug Report +about: Report a bug to help us improve +title: '[Bug]: ' +labels: 'bug' +assignees: '' + +--- + +**Checked for duplicates** + +> Have you checked for duplicate issue tickets? + +- Ex. Yes - I've already checked +- Ex. No - I haven't checked + +**Describe the bug** + +> A clear and concise description of what the bug is. Plain-text snippets preferred but screenshots welcome. + +Ex. When I did [...] action, I noticed [...] + +**What did you expect?** + +> A clear and concise description of what you expect to happen + +Ex. I expected [...] + +**Reproducible steps** + +> How would we reproduce this bug? Please walk us through it step by step. Plain-text snippets preferred but screenshots welcome. + +1. +2. +3. + +**What is your environment?** + +> Include any computer hardware, operating system, framework, browser, time-of-day or other contextual information related to your issue + +- Ex. Version of this software [e.g. vX.Y.Z] +- Ex. Operating System: [e.g. MacOSX with Docker Desktop vX.Y] +- ... diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..0b74a7aac --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,34 @@ +--- +name: New Feature +about: Suggest a new feature for us to implement +title: '[New Feature]: ' +labels: 'enhancement' +assignees: '' + +--- + +**Checked for duplicates** + +> Have you checked for duplicate issue tickets? + +- Ex. Yes - I've already checked +- Ex. No - I haven't checked + +**Alternatives considered** + +> Have you considered alternative solutions to your feature request? + +- Ex. Yes - and alternatives don't suffice +- Ex. No - I haven't considered + +**Related problems** + +> Is your feature request related to any problems? Please help us understand if so, including linking to any other issue tickets. + +Ex. I'm frustrated when [...] happens as documented in issue-XYZ + +**Describe the feature request** + +> A clear and concise description of your request. + +Ex. I need or want [...] diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 161ffc86b..8ef73bbc1 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,4 +10,4 @@ ## Testing - Provide some proof you've tested your changes - Example: test results available at ... -- Example: tested on operating system ... \ No newline at end of file +- Example: tested on operating system ... diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 000000000..23d9c36a1 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +16.13.2 diff --git a/API/Backend/Config/models/config.js b/API/Backend/Config/models/config.js index a516407ed..75e9a36f1 100644 --- a/API/Backend/Config/models/config.js +++ b/API/Backend/Config/models/config.js @@ -11,22 +11,22 @@ var Config = sequelize.define( mission: { type: Sequelize.STRING, unique: false, - allowNull: false + allowNull: false, }, config: { type: Sequelize.JSON, allowNull: true, - defaultValue: {} + defaultValue: {}, }, version: { type: Sequelize.DataTypes.INTEGER, unique: false, - allowNull: false - } + allowNull: false, + }, }, { timestamps: true, - updatedAt: false + updatedAt: false, } ); diff --git a/API/Backend/Config/routes/configs.js b/API/Backend/Config/routes/configs.js index b7388d7b7..5b3ea1dab 100644 --- a/API/Backend/Config/routes/configs.js +++ b/API/Backend/Config/routes/configs.js @@ -6,16 +6,19 @@ require("dotenv").config(); const express = require("express"); const router = express.Router(); const execFile = require("child_process").execFile; +const Sequelize = require("sequelize"); +const { sequelize } = require("../../../connection"); const logger = require("../../../logger"); const Config = require("../models/config"); const config_template = require("../../../templates/config_template"); const validate = require("../validate"); +const populateUUIDs = require("../uuids"); const Utils = require("../../../utils.js"); const websocket = require("../../../websocket.js"); -const WebSocket = require('isomorphic-ws'); +const WebSocket = require("isomorphic-ws"); const fs = require("fs"); const deepmerge = require("deepmerge"); @@ -250,6 +253,13 @@ function upsert(req, res, next, cb, info) { let hasVersion = false; req.body = req.body || {}; + info = info || { + type: "upsert", + }; + info.route = "config"; + info.id = req.body.id; + info.mission = req.body.mission; + if (req.body.version != null) hasVersion = true; let versionConfig = null; @@ -294,6 +304,32 @@ function upsert(req, res, next, cb, info) { } } else configJSON = req.body.config; } + + const { newlyAddedUUIDs, allNewUUIDs } = populateUUIDs(configJSON); + + // Do not update the config if there are duplicate or bad UUIDs + const badUUIDs = newlyAddedUUIDs + .filter((i) => { + return "replacesBadUUID" in i; + }) + .map((i) => i.replacesBadUUID); + + if (badUUIDs.length > 0) { + if (cb) + cb({ + status: "failure", + message: "There are duplicate or bad UUIDs.", + badUUIDs, + }); + else + res.send({ + status: "failure", + message: "There are duplicate or bad UUIDs.", + badUUIDs, + }); + return; + } + const validation = validate(configJSON); if (!validation.valid) { @@ -332,20 +368,53 @@ function upsert(req, res, next, cb, info) { status: "success", mission: created.mission, version: created.version, + newlyAddedUUIDs: newlyAddedUUIDs, }); else res.send({ status: "success", mission: created.mission, version: created.version, + newlyAddedUUIDs: newlyAddedUUIDs, }); - openWebSocket(req.body, { - status: "success", - mission: created.mission, - version: created.version, - }, info, - forceClientUpdate - ); + + if (info && info.layerName) { + // Find the layer UUID instead of passing back the layer's display name + let isArray = true; + let infoLayerNames = info.layerName; + if (!Array.isArray(info.layerName)) { + infoLayerNames = []; + infoLayerNames.push(info.layerName); + isArray = false; + } + + for (let i in infoLayerNames) { + const found = allNewUUIDs.findIndex( + (x) => x.name == infoLayerNames[i] + ); + if (found > -1) { + const result = allNewUUIDs[found]; + infoLayerNames[i] = result.uuid; + allNewUUIDs.splice(found, 1); + } + } + + if (!isArray) { + info.layerName = infoLayerNames[0]; + } + } + + openWebSocket( + req.body, + { + status: "success", + mission: created.mission, + version: created.version, + newlyAddedUUIDs: newlyAddedUUIDs, + }, + info, + forceClientUpdate + ); return null; }) .catch((err) => { @@ -382,20 +451,36 @@ if (fullAccess) }); router.get("/missions", function (req, res, next) { - Config.aggregate("mission", "DISTINCT", { plain: false }) - .then((missions) => { - let allMissions = []; - for (let i = 0; i < missions.length; i++) - allMissions.push(missions[i].DISTINCT); - allMissions.sort(); - res.send({ status: "success", missions: allMissions }); - return null; - }) - .catch((err) => { - logger("error", "Failed to find missions.", req.originalUrl, req, err); - res.send({ status: "failure", message: "Failed to find missions." }); - return null; - }); + if (req.query.full === "true") { + sequelize + .query( + "SELECT DISTINCT ON (mission) mission, version, config FROM configs ORDER BY mission ASC" + ) + .spread((results) => { + res.send({ status: "success", missions: results }); + return null; + }) + .catch((err) => { + logger("error", "Failed to find missions.", req.originalUrl, req, err); + res.send({ status: "failure", message: "Failed to find missions." }); + return null; + }); + } else { + Config.aggregate("mission", "DISTINCT", { plain: false }) + .then((missions) => { + let allMissions = []; + for (let i = 0; i < missions.length; i++) + allMissions.push(missions[i].DISTINCT); + allMissions.sort(); + res.send({ status: "success", missions: allMissions }); + return null; + }) + .catch((err) => { + logger("error", "Failed to find missions.", req.originalUrl, req, err); + res.send({ status: "failure", message: "Failed to find missions." }); + return null; + }); + } return null; }); @@ -577,12 +662,15 @@ if (fullAccess) function openWebSocket(body, response, info, forceClientUpdate) { if ( !process.env.hasOwnProperty("ENABLE_MMGIS_WEBSOCKETS") || - process.env.ENABLE_MMGIS_WEBSOCKETS != "true") { - return + process.env.ENABLE_MMGIS_WEBSOCKETS != "true" + ) { + return; } const port = parseInt(process.env.PORT || "8888", 10); - const path = `ws://localhost:${port}/` + const path = `ws://localhost:${port}${ + process.env.WEBSOCKET_ROOT_PATH || process.env.ROOT_PATH || "" + }/`; const ws = new WebSocket(path); ws.onopen = function () { const data = { @@ -591,7 +679,7 @@ function openWebSocket(body, response, info, forceClientUpdate) { forceClientUpdate, }; ws.send(JSON.stringify(data)); - } + }; } // === Quick API Functions === @@ -599,7 +687,7 @@ function addLayer(req, res, next, cb, forceConfig, caller = "addLayer") { const exampleBody = { mission: "{mission_name}", layer: { - name: "{new_unique_layer_name}", + name: "{new_layer_name}", type: "header || vector || vectortile || query || model || tile || data", "more...": "...", }, @@ -683,18 +771,55 @@ function addLayer(req, res, next, cb, forceConfig, caller = "addLayer") { ); } - const didSet = Utils.setIn( - config.layers, - `${placementPath}${placementIndex}`, - req.body.layer, - true, - true - ); + let didSet = false; + + // Input can be object or array + if (Array.isArray(req.body.layer)) { + for (let i in req.body.layer) { + // This adds the proposed_uuid key to all of the new layers/sublayers to be added that have + // user defined UUIDs. We remove the proposed_uuid key after using it to check for unique UUIDs. + Utils.traverseLayers([req.body.layer[i]], (layer) => { + if (layer.uuid != null) { + layer.proposed_uuid = layer.uuid; + } + }); + + didSet = Utils.setIn( + config.layers, + `${placementPath}${placementIndex}`, + req.body.layer[i], + true, + true + ); + + placementIndex += 1; + } + } else { + // This adds the proposed_uuid key to all of the new layers/sublayers to be added that have + // user defined UUIDs. We remove the proposed_uuid key after using it to check for unique UUIDs. + Utils.traverseLayers([req.body.layer], (layer) => { + if (layer.uuid != null) { + layer.proposed_uuid = layer.uuid; + } + }); + + didSet = Utils.setIn( + config.layers, + `${placementPath}${placementIndex}`, + req.body.layer, + true, + true + ); + } if (didSet) { upsert( { - body: { mission: req.body.mission, config: config, forceClientUpdate: req.body.forceClientUpdate }, + body: { + mission: req.body.mission, + config: config, + forceClientUpdate: req.body.forceClientUpdate, + }, }, null, null, @@ -706,6 +831,7 @@ function addLayer(req, res, next, cb, forceConfig, caller = "addLayer") { message: `Added layer to the ${response.mission} mission. Configuration versioned ${response.version}.`, mission: response.mission, version: response.version, + newlyAddedUUIDs: response.newlyAddedUUIDs, }); } else { res.send({ @@ -713,6 +839,7 @@ function addLayer(req, res, next, cb, forceConfig, caller = "addLayer") { message: `Added layer to the ${response.mission} mission. Configuration versioned ${response.version}.`, mission: response.mission, version: response.version, + newlyAddedUUIDs: response.newlyAddedUUIDs, }); } } else { @@ -721,7 +848,9 @@ function addLayer(req, res, next, cb, forceConfig, caller = "addLayer") { }, { type: caller, - layerName: req.body.layer.name, + layerName: Array.isArray(req.body.layer) + ? req.body.layer.map((i) => i.name) + : req.body.layer.name, } ); } else if (cb) @@ -764,7 +893,7 @@ if (fullAccess) router.post("/updateLayer", function (req, res, next) { const exampleBody = { mission: "{mission_name}", - layerName: "{existing_layer_name}", + layerUUID: "{existing_layer_uuid}", layer: { "...": "...", }, @@ -785,10 +914,10 @@ if (fullAccess) }); return; } - if (req.body.layerName == null) { + if (req.body.layerUUID == null) { res.send({ status: "failure", - message: `Required parameter 'layerName' is unset. (a layer.name is not sufficient)`, + message: `Required parameter 'layerUUID' is unset. (a layer.uuid is not sufficient)`, example: exampleBody, }); return; @@ -821,7 +950,7 @@ if (fullAccess) let placementIndex = req.body.placement?.index; Utils.traverseLayers(config.layers, (layer, path, index) => { - if (layer.name === req.body.layerName) { + if (layer.uuid === req.body.layerUUID) { existingLayer = JSON.parse(JSON.stringify(layer)); if (placementPath == null) placementPath = path; if (placementIndex == null) placementIndex = index; @@ -832,7 +961,7 @@ if (fullAccess) if (existingLayer == null) { res.send({ status: "failure", - message: `Layer ${req.body.layerName} not found. Cannot update.`, + message: `Layer ${req.body.layerUUID} not found. Cannot update.`, }); return; } @@ -862,7 +991,7 @@ if (fullAccess) if (resp.status === "success") { res.send({ status: "success", - message: `Updated layer '${req.body.layerName}' in the ${resp.mission} mission. Configuration versioned ${resp.version}.`, + message: `Updated layer '${req.body.layerUUID}' in the ${resp.mission} mission. Configuration versioned ${resp.version}.`, }); } else { resp.message = `Update layer failed with: ${resp.message}`; @@ -876,14 +1005,14 @@ if (fullAccess) } catch (err) { logger( "error", - `Failed to update layer: ${req.body.layerName}.`, + `Failed to update layer: ${req.body.layerUUID}.`, req.originalUrl, req, err ); res.send({ status: "failure", - message: `Failed to update layer: ${req.body.layerName}. Uncaught reason.`, + message: `Failed to update layer: ${req.body.layerUUID}. Uncaught reason.`, }); } } @@ -894,7 +1023,7 @@ if (fullAccess) function removeLayer(req, res, next, cb) { const exampleBody = { mission: "{mission_name}", - layerName: "{existing_layer_name}", + layerUUID: "{existing_layer_uuid}", "forceClientUpdate?": "{true}; default false", }; @@ -906,10 +1035,10 @@ function removeLayer(req, res, next, cb) { }); return; } - if (req.body.layerName == null) { + if (req.body.layerUUID == null) { res.send({ status: "failure", - message: `Required parameter 'layerName' is unset.`, + message: `Required parameter 'layerUUID' is unset.`, example: exampleBody, }); return; @@ -928,14 +1057,29 @@ function removeLayer(req, res, next, cb) { res.send(config); } else { try { + let layerUUIDs = []; + + // Input can be object or array + if (!Array.isArray(req.body.layerUUID)) { + layerUUIDs.push(req.body.layerUUID); + } else { + layerUUIDs = [...req.body.layerUUID]; + } + let didRemove = false; - Utils.traverseLayers(config.layers, (layer, path, index) => { - if (layer.name === req.body.layerName) { - didRemove = true; - return "remove"; + const removedUUIDs = Utils.traverseLayers( + config.layers, + (layer, path, index) => { + if (layerUUIDs.includes(layer.uuid)) { + didRemove = true; + return "remove"; + } } - }); + ); + const unableToRemoveUUIDs = layerUUIDs.filter( + (i) => !removedUUIDs.map((x) => x.uuid).includes(i) + ); if (didRemove) { upsert( { @@ -951,24 +1095,36 @@ function removeLayer(req, res, next, cb) { if (resp.status === "success") { res.send({ status: "success", - message: `Successfully removed layer '${req.body.layerName}'.`, + message: `Successfully removed layer${ + removedUUIDs.length >= 1 ? "s" : "" + }. Configuration versioned ${resp.version}.`, + removedUUIDs: removedUUIDs, + unableToRemoveUUIDs: unableToRemoveUUIDs, }); } else { res.send({ status: "failure", - message: `Failed to remove layer '${req.body.layerName}': ${resp.message}`, + message: `Failed to remove layer${ + layerUUIDs.length >= 1 ? "s" : "" + }: ${resp.message}.`, + unableToRemoveUUIDs: layerUUIDs, }); } }, { - type: 'removeLayer', - layerName: req.body.layerName, + type: "removeLayer", + layerName: layerUUIDs.filter((i) => + removedUUIDs.map((x) => x.uuid).includes(i) + ), } ); } else { res.send({ status: "failure", - message: `Failed to remove layer '${req.body.layerName}'. Layer not found.`, + message: `Failed to remove layer${ + layerUUIDs.length >= 1 ? "s" : "" + }. Layer${layerUUIDs.length >= 1 ? "s" : ""} not found.`, + unableToRemoveUUIDs: layerUUIDs, }); } } catch (err) {} @@ -981,7 +1137,7 @@ if (fullAccess) * /removeLayer * body: { "mission": "", - "layerName": "" + "layerUUID": "" "forceClientUpdate?": true } */ diff --git a/API/Backend/Config/setup.js b/API/Backend/Config/setup.js index cc71f7a0d..ebff217dd 100644 --- a/API/Backend/Config/setup.js +++ b/API/Backend/Config/setup.js @@ -9,7 +9,7 @@ let setup = { process.env.HIDE_CONFIG != "true" ) { s.app.get( - "/configure", + s.ROOT_PATH + "/configure", s.ensureGroup(s.permissions.users), s.ensureAdmin(true), (req, res) => { @@ -18,13 +18,24 @@ let setup = { user: user, AUTH: process.env.AUTH, NODE_ENV: process.env.NODE_ENV, + PORT: process.env.PORT || "8888", + ENABLE_CONFIG_WEBSOCKETS: process.env.ENABLE_CONFIG_WEBSOCKETS, + ENABLE_CONFIG_OVERRIDE: process.env.ENABLE_CONFIG_OVERRIDE, + ROOT_PATH: + process.env.NODE_ENV === "development" + ? "" + : process.env.ROOT_PATH || "", + WEBSOCKET_ROOT_PATH: + process.env.NODE_ENV === "development" + ? "" + : process.env.WEBSOCKET_ROOT_PATH || "", }); } ); } s.app.use( - "/API/configure", + s.ROOT_PATH + "/API/configure", s.ensureAdmin(), s.checkHeadersCodeInjection, s.setContentType, diff --git a/API/Backend/Config/uuids.js b/API/Backend/Config/uuids.js new file mode 100644 index 000000000..0d761abf8 --- /dev/null +++ b/API/Backend/Config/uuids.js @@ -0,0 +1,61 @@ +const Utils = require("../../utils.js"); + +const { v4: uuidv4, validate: uuidValidate } = require("uuid"); + +const populateUUIDs = (config) => { + const newlyAddedUUIDs = []; + const definedUUIDs = []; + const allNewUUIDs = []; + + // Track of all of the previously defined UUIDs (i.e. ignore the UUIDs of the newly added layers) + Utils.traverseLayers(config.layers, (layer) => { + if (layer.uuid != null && !layer.proposed_uuid) { + definedUUIDs.push(layer.uuid); + } + }); + + Utils.traverseLayers(config.layers, (layer) => { + if (layer.uuid == null) { + layer.uuid = uuidv4(); + newlyAddedUUIDs.push({ + name: layer.name, + uuid: layer.uuid, + }); + allNewUUIDs.push({ + name: layer.name, + uuid: layer.uuid, + }); + } else if ( + !uuidValidate(layer.uuid) || + definedUUIDs.includes(layer.proposed_uuid) + ) { + const badUUID = layer.uuid; + layer.uuid = uuidv4(); + newlyAddedUUIDs.push({ + name: layer.name, + uuid: layer.uuid, + replacesBadUUID: badUUID, + }); + allNewUUIDs.push({ + name: layer.name, + uuid: layer.uuid, + }); + } else { + if (!definedUUIDs.includes(layer.uuid)) { + definedUUIDs.push(layer.uuid); + allNewUUIDs.push({ + name: layer.name, + uuid: layer.uuid, + }); + } + } + + if (layer.proposed_uuid) { + delete layer.proposed_uuid; + } + }); + + return { newlyAddedUUIDs, allNewUUIDs }; +}; + +module.exports = populateUUIDs; diff --git a/API/Backend/Config/validate.js b/API/Backend/Config/validate.js index 5df39c364..d1d11ffdc 100644 --- a/API/Backend/Config/validate.js +++ b/API/Backend/Config/validate.js @@ -39,6 +39,7 @@ const validateStructure = (config) => { const validateLayers = (config) => { let errs = []; + let existingUUIDs = []; Utils.traverseLayers(config.layers, (layer) => { // Check layer name const validNameErrs = isValidLayerName(layer.name); @@ -90,10 +91,17 @@ const validateLayers = (config) => { err(`Unknown layer type: '${layer.type}'`, ["layers[layer].type"]) ); } - }); - errs = errs.concat(hasDuplicateLayerNames(config)); - errs = errs.concat(hasNonHeaderWithSublayers(config)); + if (layer.uuid != null) { + if (existingUUIDs.includes(layer.uuid)) { + errs = errs.concat([ + err( + `Found a layer with duplicate uuid: ${layer.name} - ${layer.uuid}` + ), + ]); + } else existingUUIDs.push(layer.uuid); + } + }); return errs; }; @@ -291,34 +299,6 @@ const hasNonHeaderWithSublayers = (config) => { return errs; }; -const hasDuplicateLayerNames = (config) => { - let allNames = []; - - depthTraversal(config.layers, 0); - - function depthTraversal(node, depth) { - for (var i = 0; i < node.length; i++) { - allNames.push(node[i].name); - //Add other feature information while we're at it - if (node[i].sublayers != null && node[i].sublayers.length > 0) { - depthTraversal(node[i].sublayers, depth + 1); - } - } - } - - let unique = []; - const errs = []; - allNames.forEach((name) => { - if (!unique.includes(name)) unique.push(name); - else - errs.push( - err(`Found duplicate layer name: '${name}'`, ["layers[layer].name"]) - ); - }); - - return errs; -}; - const fillInMissingFieldsWithDefaults = (layer) => { if (layer.type != "header") { layer.initialOpacity = diff --git a/API/Backend/Datasets/models/datasets.js b/API/Backend/Datasets/models/datasets.js index d7ad863a7..4ee65c89f 100644 --- a/API/Backend/Datasets/models/datasets.js +++ b/API/Backend/Datasets/models/datasets.js @@ -9,17 +9,17 @@ const attributes = { name: { type: Sequelize.STRING, unique: true, - allowNull: false + allowNull: false, }, table: { type: Sequelize.STRING, unique: true, - allowNull: false - } + allowNull: false, + }, }; const options = { - timestamps: true + timestamps: true, }; // setup User model and its fields. @@ -30,20 +30,20 @@ function makeNewDatasetTable(name, columns, success, failure) { let attributes = {}; - columns.forEach(element => { + columns.forEach((element) => { attributes[element] = { type: Sequelize.STRING, unique: false, - allowNull: true + allowNull: true, }; }); const options = { - timestamps: false + timestamps: false, }; Datasets.findOne({ where: { name: name } }) - .then(result => { + .then((result) => { if (result) { let DatasetTable = sequelize.define( result.dataValues.table, @@ -54,16 +54,16 @@ function makeNewDatasetTable(name, columns, success, failure) { { updatedAt: new Date().toISOString() }, { where: { name: name }, silent: true } ) - .then(r => { + .then((r) => { success({ name: result.dataValues.name, table: result.dataValues.table, - tableObj: DatasetTable + tableObj: DatasetTable, }); return null; }) - .catch(err => { + .catch((err) => { logger( "error", "Failed to update datasets.", @@ -73,19 +73,19 @@ function makeNewDatasetTable(name, columns, success, failure) { ); failure({ status: "failure", - message: "Failed to update datasets" + message: "Failed to update datasets", }); }); } else { sequelize .query("SELECT COUNT(*) FROM datasets") - .spread(results => { + .spread((results) => { let newTable = "d" + (parseInt(results[0].count) + 1) + "_datasets"; Datasets.create({ name: name, - table: newTable + table: newTable, }) - .then(created => { + .then((created) => { let DatasetTable = sequelize.define( newTable, attributes, @@ -97,11 +97,11 @@ function makeNewDatasetTable(name, columns, success, failure) { success({ name: name, table: newTable, - tableObj: DatasetTable + tableObj: DatasetTable, }); return null; }) - .catch(err => { + .catch((err) => { logger( "error", "Failed to sync dataset table.", @@ -111,13 +111,13 @@ function makeNewDatasetTable(name, columns, success, failure) { ); failure({ status: "failure", - message: "Failed to sync" + message: "Failed to sync", }); }); return null; }) - .catch(err => { + .catch((err) => { logger( "error", "Failed to create dataset table.", @@ -127,12 +127,12 @@ function makeNewDatasetTable(name, columns, success, failure) { ); failure({ status: "failure", - message: "Failed to create" + message: "Failed to create", }); }); return null; }) - .catch(err => { + .catch((err) => { logger( "error", "Failed to count existing datasets.", @@ -142,14 +142,14 @@ function makeNewDatasetTable(name, columns, success, failure) { ); failure({ status: "failure", - message: "Failed to count existing datasets" + message: "Failed to count existing datasets", }); }); } return null; }) - .catch(err => { + .catch((err) => { logger( "error", "Failed to find existing datasets.", @@ -161,7 +161,7 @@ function makeNewDatasetTable(name, columns, success, failure) { status: "failure", message: "Failed to find existing datasets", error: error, - name: name + name: name, }); }); } @@ -169,5 +169,5 @@ function makeNewDatasetTable(name, columns, success, failure) { // export User model for use in other files. module.exports = { Datasets: Datasets, - makeNewDatasetTable: makeNewDatasetTable + makeNewDatasetTable: makeNewDatasetTable, }; diff --git a/API/Backend/Datasets/routes/datasets.js b/API/Backend/Datasets/routes/datasets.js index 46490473a..12e56f86f 100644 --- a/API/Backend/Datasets/routes/datasets.js +++ b/API/Backend/Datasets/routes/datasets.js @@ -17,7 +17,7 @@ const Datasets = datasets.Datasets; const makeNewDatasetTable = datasets.makeNewDatasetTable; //Returns dataset rows based on search -router.post("/get", function(req, res, next) { +router.post("/get", function (req, res, next) { get(req, res, next); }); function get(req, res, next) { @@ -30,13 +30,13 @@ function get(req, res, next) { if (i >= queries.length) { res.send({ status: "success", - body: results + body: results, }); return; } //First Find the table name Datasets.findOne({ where: { name: queries[i].dataset } }) - .then(result => { + .then((result) => { if (result) { const column = queries[i].column .replace(/[`~!@#$%^&*|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, "") @@ -50,20 +50,20 @@ function get(req, res, next) { '"=:search ORDER BY id ASC LIMIT 100', { replacements: { - search: queries[i].search - } + search: queries[i].search, + }, } ) - .spread(r => { + .spread((r) => { results.push({ ...queries[i], table: result.dataValues.table, - results: r + results: r, }); loopedGet(i + 1); return null; }) - .catch(err => { + .catch((err) => { loopedGet(i + 1); return null; }); @@ -72,7 +72,7 @@ function get(req, res, next) { } return null; }) - .catch(err => { + .catch((err) => { loopedGet(i + 1); return null; }); @@ -80,9 +80,9 @@ function get(req, res, next) { } //Returns a list of entries in the datasets table -router.post("/entries", function(req, res, next) { +router.post("/entries", function (req, res, next) { Datasets.findAll() - .then(sets => { + .then((sets) => { if (sets && sets.length > 0) { let entries = []; for (let i = 0; i < sets.length; i++) { @@ -90,18 +90,18 @@ router.post("/entries", function(req, res, next) { } res.send({ status: "success", - body: { entries: entries } + body: { entries: entries }, }); } else { res.send({ - status: "failure" + status: "failure", }); } }) - .catch(err => { + .catch((err) => { logger("error", "Failure finding datasets.", req.originalUrl, req, err); res.send({ - status: "failure" + status: "failure", }); }); }); @@ -111,10 +111,10 @@ router.post("/entries", function(req, res, next) { * req.body.key * req.body.value */ -router.post("/search", function(req, res, next) { +router.post("/search", function (req, res, next) { //First Find the table name Datasets.findOne({ where: { name: req.body.layer } }) - .then(result => { + .then((result) => { if (result) { let table = result.dataValues.table; @@ -126,11 +126,11 @@ router.post("/search", function(req, res, next) { { replacements: { key: req.body.key, - value: req.body.value.replace(/[`;'"]/gi, "") - } + value: req.body.value.replace(/[`;'"]/gi, ""), + }, } ) - .spread(results => { + .spread((results) => { let r = []; for (let i = 0; i < results.length; i++) { let feature = JSON.parse(results[i].st_asgeojson); @@ -140,12 +140,12 @@ router.post("/search", function(req, res, next) { res.send({ status: "success", - body: r + body: r, }); return null; }) - .catch(err => { + .catch((err) => { logger( "error", "SQL error search through dataset.", @@ -155,34 +155,34 @@ router.post("/search", function(req, res, next) { ); res.send({ status: "failure", - message: "SQL error." + message: "SQL error.", }); }); } else { res.send({ status: "failure", - message: "Layer not found." + message: "Layer not found.", }); } return null; }) - .catch(err => { + .catch((err) => { logger("error", "Failure finding dataset.", req.originalUrl, req, err); res.send({ - status: "failure" + status: "failure", }); }); }); -router.post("/upload", function(req, res, next) { +router.post("/upload", function (req, res, next) { // Disable timeout req.setTimeout(0); let fields = { name: null, header: null, - upsert: null + upsert: null, }; let tableName = null; @@ -196,26 +196,33 @@ router.post("/upload", function(req, res, next) { let uploaded = ""; let uploadFinished = false; const busboy = new Busboy({ headers: req.headers }); - busboy.on("file", function(fieldname, file, filename, encoding, mimetype) { - file.on("data", function(data) { + busboy.on("file", function (fieldname, file, filename, encoding, mimetype) { + file.on("data", function (data) { uploaded += data.toString("utf8"); populateNext(); }); - file.on("end", function() {}); + file.on("end", function () {}); }); - busboy.on("field", function( - fieldname, - val, - fieldnameTruncated, - valTruncated, - encoding, - mimetype - ) { - fields[fieldname] = val; - if (fields.name != null && fields.header != null && fields.upsert != null) - begin(); - }); - busboy.on("finish", function() { + busboy.on( + "field", + function ( + fieldname, + val, + fieldnameTruncated, + valTruncated, + encoding, + mimetype + ) { + fields[fieldname] = val; + if ( + fields.name != null && + fields.header != null && + fields.upsert != null + ) + begin(); + } + ); + busboy.on("finish", function () { uploadFinished = true; populateInterval = setInterval(populateNext, 100); }); @@ -231,7 +238,7 @@ router.post("/upload", function(req, res, next) { clearInterval(populateInterval); if (fields.upsert === "true") { let condition = ""; - fields.header.forEach(elm => { + fields.header.forEach((elm) => { elm = elm.replace(/[`~!@#$%^&*|+\-=?;:'",.<>\{\}\[\]\\\/]/gi, ""); condition += ' AND ( a."' + @@ -264,11 +271,11 @@ router.post("/upload", function(req, res, next) { " out of " + totalPopulates + " chunks successfully uploaded. Data has been upserted too.", - body: {} + body: {}, }); res.end(); }) - .catch(err => { + .catch((err) => { logger( "error", "Upload but failed to remove duplicated rows.", @@ -278,7 +285,7 @@ router.post("/upload", function(req, res, next) { ); res.send({ status: "failed", - message: "Upload but failed to remove duplicated rows." + message: "Upload but failed to remove duplicated rows.", }); }); } else { @@ -290,7 +297,7 @@ router.post("/upload", function(req, res, next) { " out of " + totalPopulates + " chunks successfully uploaded", - body: {} + body: {}, }); res.end(); } @@ -313,7 +320,7 @@ router.post("/upload", function(req, res, next) { csv.push(r); } totalPopulates++; - populateDatasetTable(tableObj, csv, function(success) { + populateDatasetTable(tableObj, csv, function (success) { working = false; if (success) successfulPopulates++; }); @@ -328,18 +335,18 @@ router.post("/upload", function(req, res, next) { } catch (err) { res.send({ status: "failed", - message: "The field 'header' is not valid json array." + message: "The field 'header' is not valid json array.", }); res.end(); return; } - makeNewDatasetTable(fields.name, fields.header, function(result) { + makeNewDatasetTable(fields.name, fields.header, function (result) { let checkEnding = result.table.split("_"); if (checkEnding[checkEnding.length - 1] !== "datasets") { logger("error", "Malformed table name.", req.originalUrl, req); res.send({ status: "failed", - message: "Malformed table name" + message: "Malformed table name", }); return; } @@ -354,7 +361,7 @@ router.post("/upload", function(req, res, next) { .then(() => { tableObj = result.tableObj; }) - .catch(err => { + .catch((err) => { logger("error", "Recreation error.", req.originalUrl, req, err); res.send(result); }); @@ -364,12 +371,12 @@ router.post("/upload", function(req, res, next) { function populateDatasetTable(Table, csv, cb) { Table.bulkCreate(csv, { - returning: true + returning: true, }) - .then(function(response) { + .then(function (response) { cb(true); }) - .catch(function(err) { + .catch(function (err) { logger( "error", "Datasets: Failed to populate a dataset table!", @@ -383,20 +390,20 @@ router.post("/upload", function(req, res, next) { } }); -router.post("/recreate", function(req, res, next) { +router.post("/recreate", function (req, res, next) { // Disable timeout req.setTimeout(0); makeNewDatasetTable( req.body.name || req.fields.name, req.body.header || JSON.parse(req.fields.header), - function(result) { + function (result) { let checkEnding = result.table.split("_"); if (checkEnding[checkEnding.length - 1] !== "datasets") { logger("error", "Malformed table name.", req.originalUrl, req); res.send({ status: "failed", - message: "Malformed table name" + message: "Malformed table name", }); return; } @@ -408,18 +415,18 @@ router.post("/recreate", function(req, res, next) { populateDatasetTable( result.tableObj, JSON.parse(req.body.csv), - function(success) { + function (success) { res.send({ status: success == true ? "success" : "failure", message: "", - body: {} + body: {}, }); } ); return null; }) - .catch(err => { + .catch((err) => { logger("error", "Recreation error.", req.originalUrl, req, err); res.send(result); }); @@ -427,29 +434,29 @@ router.post("/recreate", function(req, res, next) { populateDatasetTable( result.tableObj, JSON.parse(req.body.csv), - function(success) { + function (success) { res.send({ status: success == true ? "success" : "failure", message: "", - body: {} + body: {}, }); } ); } }, - function(result) { + function (result) { res.send(result); } ); function populateDatasetTable(Table, csv, cb) { Table.bulkCreate(csv, { - returning: true + returning: true, }) - .then(function(response) { + .then(function (response) { cb(true); }) - .catch(function(err) { + .catch(function (err) { logger( "error", "Datasets: Failed to populate a dataset table!", diff --git a/API/Backend/Datasets/setup.js b/API/Backend/Datasets/setup.js index ef6a8b974..71d506d88 100644 --- a/API/Backend/Datasets/setup.js +++ b/API/Backend/Datasets/setup.js @@ -1,9 +1,9 @@ const router = require("./routes/datasets"); let setup = { //Once the app initializes - onceInit: s => { + onceInit: (s) => { s.app.use( - "/API/datasets", + s.ROOT_PATH + "/API/datasets", s.ensureAdmin(), s.checkHeadersCodeInjection, s.setContentType, @@ -11,9 +11,9 @@ let setup = { ); }, //Once the server starts - onceStarted: s => {}, + onceStarted: (s) => {}, //Once all tables sync - onceSynced: s => {} + onceSynced: (s) => {}, }; module.exports = setup; diff --git a/API/Backend/Draw/models/filehistories.js b/API/Backend/Draw/models/filehistories.js index dc5a01cb2..034d20b20 100644 --- a/API/Backend/Draw/models/filehistories.js +++ b/API/Backend/Draw/models/filehistories.js @@ -25,28 +25,33 @@ require("dotenv").config(); const attributes = { file_id: { type: Sequelize.INTEGER, - allowNull: false + allowNull: false, }, history_id: { type: Sequelize.INTEGER, - allowNull: false + allowNull: false, }, time: { type: Sequelize.BIGINT, - allowNull: false + allowNull: false, }, action_index: { type: Sequelize.INTEGER, - allowNull: false + allowNull: false, }, history: { type: Sequelize.DataTypes.ARRAY(Sequelize.DataTypes.INTEGER), - allowNull: true - } + allowNull: true, + }, + author: { + type: Sequelize.STRING, + unique: false, + allowNull: true, + }, }; const options = { - timestamps: false + timestamps: false, }; // setup Filehistories model and its fields. @@ -57,5 +62,27 @@ var FilehistoriesTEST = sequelize.define( options ); +// Adds to the table, never removes +const up = async () => { + // author column + await sequelize + .query( + `ALTER TABLE file_histories ADD COLUMN IF NOT EXISTS author varchar(255) NULL;` + ) + .then(() => { + return null; + }) + .catch((err) => { + logger( + "error", + `Failed to adding file_histories.author column. DB tables may be out of sync!`, + "file_histories", + null, + err + ); + return null; + }); +}; + // export Filehistories model for use in other files. -module.exports = { Filehistories, FilehistoriesTEST }; +module.exports = { Filehistories, FilehistoriesTEST, up }; diff --git a/API/Backend/Draw/models/publishedstore.js b/API/Backend/Draw/models/publishedstore.js index 60d4a8d1f..4a178577c 100644 --- a/API/Backend/Draw/models/publishedstore.js +++ b/API/Backend/Draw/models/publishedstore.js @@ -11,21 +11,21 @@ var PublishedStore = sequelize.define( name: { type: Sequelize.STRING, unique: false, - allowNull: false + allowNull: false, }, value: { type: Sequelize.TEXT, allowNull: true, - defaultValue: "" + defaultValue: "", }, time: { type: Sequelize.BIGINT, - allowNull: false - } + allowNull: false, + }, }, { timestamps: false, - updatedAt: false + updatedAt: false, } ); diff --git a/API/Backend/Draw/models/userfeatures.js b/API/Backend/Draw/models/userfeatures.js index 5580c0215..2bdf49dd2 100644 --- a/API/Backend/Draw/models/userfeatures.js +++ b/API/Backend/Draw/models/userfeatures.js @@ -25,12 +25,12 @@ require("dotenv").config(); const attributes = { file_id: { type: Sequelize.INTEGER, - allowNull: false + allowNull: false, }, level: { type: Sequelize.INTEGER, unique: false, - allowNull: false + allowNull: false, }, intent: { type: Sequelize.ENUM, @@ -44,24 +44,24 @@ const attributes = { "line", "point", "text", - "arrow" + "arrow", ], allowNull: true, - defaultValue: null + defaultValue: null, }, properties: { type: Sequelize.JSON, allowNull: true, - defaultValue: {} + defaultValue: {}, }, geom: { type: Sequelize.GEOMETRY, - allowNull: true - } + allowNull: true, + }, }; const options = { - timestamps: false + timestamps: false, }; var Userfeatures = sequelize.define("user_features", attributes, options); diff --git a/API/Backend/Draw/models/userfiles.js b/API/Backend/Draw/models/userfiles.js index c562c88e5..f701f7439 100644 --- a/API/Backend/Draw/models/userfiles.js +++ b/API/Backend/Draw/models/userfiles.js @@ -79,6 +79,21 @@ const attributes = { defaultValue: "0", unique: false, }, + template: { + type: Sequelize.JSON, + allowNull: true, + defaultValue: null, + }, + publicity_type: { + type: Sequelize.STRING, + unique: false, + allowNull: true, + }, + public_editors: { + type: Sequelize.ARRAY(Sequelize.TEXT), + unique: false, + allowNull: true, + }, }; const options = { @@ -128,5 +143,65 @@ const makeMasterFiles = (intents) => { } }; +// Adds to the table, never removes +const up = async () => { + // template column + await sequelize + .query( + `ALTER TABLE user_files ADD COLUMN IF NOT EXISTS template json NULL;` + ) + .then(() => { + return null; + }) + .catch((err) => { + logger( + "error", + `Failed to adding user_files.template column. DB tables may be out of sync!`, + "user_files", + null, + err + ); + return null; + }); + + // publicity_type column + await sequelize + .query( + `ALTER TABLE user_files ADD COLUMN IF NOT EXISTS publicity_type varchar(255) NULL;` + ) + .then(() => { + return null; + }) + .catch((err) => { + logger( + "error", + `Failed to adding user_files.publicity_type column. DB tables may be out of sync!`, + "user_files", + null, + err + ); + return null; + }); + + // public_editors column + await sequelize + .query( + `ALTER TABLE user_files ADD COLUMN IF NOT EXISTS public_editors text[] NULL;` + ) + .then(() => { + return null; + }) + .catch((err) => { + logger( + "error", + `Failed to adding user_files.public_editors column. DB tables may be out of sync!`, + "user_files", + null, + err + ); + return null; + }); +}; + // export User model for use in other files. -module.exports = { Userfiles, UserfilesTEST, makeMasterFiles }; +module.exports = { Userfiles, UserfilesTEST, makeMasterFiles, up }; diff --git a/API/Backend/Draw/routes/draw.js b/API/Backend/Draw/routes/draw.js index 903d2971b..c2dc8fab5 100644 --- a/API/Backend/Draw/routes/draw.js +++ b/API/Backend/Draw/routes/draw.js @@ -2,7 +2,7 @@ const express = require("express"); const logger = require("../../../logger"); const database = require("../../../database"); const Sequelize = require("sequelize"); -const uuidv4 = require("uuid/v4"); +const { v4: uuidv4 } = require("uuid"); const fhistories = require("../models/filehistories"); const Filehistories = fhistories.Filehistories; const FilehistoriesTEST = fhistories.FilehistoriesTEST; @@ -12,6 +12,10 @@ const UserfilesTEST = ufiles.UserfilesTEST; const uf = require("../models/userfeatures"); const Userfeatures = uf.Userfeatures; const UserfeaturesTEST = uf.UserfeaturesTEST; + +const filesutils = require("./filesutils"); +const getfile = filesutils.getfile; + const { sequelize } = require("../../../connection"); const router = express.Router(); @@ -49,6 +53,7 @@ const pushToHistory = ( time, undoToTime, action_index, + user, successCallback, failureCallback ) => { @@ -85,6 +90,7 @@ const pushToHistory = ( time: time, action_index: action_index, history: h, + author: user, }; // Insert new entry into the history table Table.create(newHistoryEntry) @@ -252,6 +258,7 @@ const clipOver = function ( time, null, 5, + req.user, () => { if (typeof successCallback === "function") successCallback(); }, @@ -392,6 +399,7 @@ const clipUnder = function ( time, null, 7, + req.user, () => { if (typeof successCallback === "function") successCallback(); }, @@ -440,6 +448,137 @@ const clipUnder = function ( failureCallback(err); }); }; + +const _templateConform = (req, from) => { + return new Promise((resolve, reject) => { + req.body.id = req.body.file_id; + + getfile(req, { + send: (r) => { + if (r.status === "success") { + const geojson = r.body.geojson; + const template = + r.body.file?.[0]?.dataValues?.template?.template || []; + const existingProperties = JSON.parse(req.body.properties || "{}"); + const templaterProperties = {}; + + template.forEach((t, idx) => { + switch (t.type) { + case "incrementer": + const nextIncrement = _getNextIncrement( + existingProperties[t.field], + t, + geojson.features, + existingProperties, + from + ); + if (nextIncrement.error != null) { + reject(nextIncrement.error); + return; + } else templaterProperties[t.field] = nextIncrement.newValue; + break; + default: + } + }); + + req.body.properties = JSON.stringify({ + ...existingProperties, + ...templaterProperties, + }); + } + resolve(); + return; + }, + }); + + function _getNextIncrement(value, t, layer, existingProperties) { + const response = { + newValue: value, + error: null, + }; + + let usedValues = []; + const split = (t._default || t.default).split("#"); + const start = split[0]; + const end = split[1]; + + for (let i = 0; i < layer.length; i++) { + if (layer[i] == null) continue; + let geojson = layer[i]; + if (geojson?.properties?.[t.field] != null) { + let featuresVal = geojson?.properties?.[t.field]; + + featuresVal = featuresVal.replace(start, "").replace(end, ""); + + if (featuresVal !== "#") { + featuresVal = parseInt(featuresVal); + usedValues.push(featuresVal); + } + } + } + + if ((response.newValue || "").indexOf("#") !== -1) { + // Actually increment the incrementer for the first time + let bestVal = 0; + usedValues.sort(function (a, b) { + return a - b; + }); + usedValues = [...new Set(usedValues)]; // makes it unique + usedValues.forEach((v) => { + if (bestVal === v) bestVal++; + }); + response.newValue = response.newValue.replace("#", bestVal); + } else if (existingProperties) { + let numVal = response.newValue.replace(start, "").replace(end, ""); + if (numVal != "#") { + numVal = parseInt(numVal); + if (existingProperties[t.field] === response.newValue) { + // In case of a resave, make sure the id exists only once + let count = 0; + usedValues.forEach((v) => { + if (numVal === v) count++; + }); + if (count > 1) + response.error = `Incrementing field: '${t.field}' is not unique`; + } else { + // In case a manual change, make sure the id is unique + if (usedValues.indexOf(numVal) !== -1) + response.error = `Incrementing field: '${t.field}' is not unique`; + } + } + } + + // Check that the field still matches the surrounding string + const incRegex = new RegExp(`^${start}\\d+${end}$`); + if (incRegex.test(response.newValue) == false) { + response.error = `Incrementing field: '${t.field}' must follow syntax: '${start}{#}${end}'`; + } + + // Check that incrementer is unique + let numMatches = 0; + for (let i = 0; i < layer.length; i++) { + if (layer[i] == null) continue; + let geojson = layer[i]; + if (geojson?.properties?.[t.field] != null) { + let featuresVal = geojson?.properties?.[t.field]; + if ( + (value || "").indexOf("#") == -1 && + response.newValue === featuresVal && + geojson?.properties?.uuid != existingProperties.uuid + ) { + numMatches++; + } + } + } + // If we're are editing and the value did not change, allow a single match + if (numMatches > 0) { + response.error = `Incrementing field: '${t.field}' is not unique`; + } + + return response; + } + }); +}; /** * Adds a feature * { @@ -453,13 +592,23 @@ const clipUnder = function ( * geometry: (required) * } */ -const add = function ( +const add = async function ( req, res, successCallback, failureCallback1, failureCallback2 ) { + let failedTemplate = false; + await _templateConform(req, "add").catch((err) => { + failedTemplate = err; + }); + if (failedTemplate !== false) { + if (typeof failureCallback2 === "function") + failureCallback2(failedTemplate); + return; + } + let Files = req.body.test === "true" ? UserfilesTEST : Userfiles; let Features = req.body.test === "true" ? UserfeaturesTEST : Userfeatures; let Histories = req.body.test === "true" ? FilehistoriesTEST : Filehistories; @@ -475,13 +624,28 @@ const add = function ( Files.findOne({ where: { id: req.body.file_id, - [Sequelize.Op.or]: { - file_owner: req.user, - [Sequelize.Op.and]: { - file_owner: "group", - file_owner_group: { [Sequelize.Op.overlap]: groups }, + [Sequelize.Op.or]: [ + { file_owner: req.user }, + { + [Sequelize.Op.and]: { + file_owner: "group", + file_owner_group: { [Sequelize.Op.overlap]: groups }, + }, }, - }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "list_edit", + public_editors: { [Sequelize.Op.contains]: [req.user] }, + }, + }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "all_edit", + }, + }, + ], }, }).then((file) => { if (!file) { @@ -581,6 +745,7 @@ const add = function ( time, null, 0, + req.user, () => { if (typeof successCallback === "function") successCallback(created.id, created.intent); @@ -650,7 +815,16 @@ router.post("/add", function (req, res, next) { * geometry: (optional) * } */ -const edit = function (req, res, successCallback, failureCallback) { +const edit = async function (req, res, successCallback, failureCallback) { + let failedTemplate = false; + await _templateConform(req, "edit").catch((err) => { + failedTemplate = err; + }); + if (failedTemplate !== false) { + if (typeof failureCallback === "function") failureCallback(failedTemplate); + return; + } + let Files = req.body.test === "true" ? UserfilesTEST : Userfiles; let Features = req.body.test === "true" ? UserfeaturesTEST : Userfeatures; let Histories = req.body.test === "true" ? FilehistoriesTEST : Filehistories; @@ -665,13 +839,28 @@ const edit = function (req, res, successCallback, failureCallback) { Files.findOne({ where: { id: req.body.file_id, - [Sequelize.Op.or]: { - file_owner: req.user, - [Sequelize.Op.and]: { - file_owner: "group", - file_owner_group: { [Sequelize.Op.overlap]: groups }, + [Sequelize.Op.or]: [ + { file_owner: req.user }, + { + [Sequelize.Op.and]: { + file_owner: "group", + file_owner_group: { [Sequelize.Op.overlap]: groups }, + }, }, - }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "list_edit", + public_editors: { [Sequelize.Op.contains]: [req.user] }, + }, + }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "all_edit", + }, + }, + ], }, }) .then((file) => { @@ -749,6 +938,7 @@ const edit = function (req, res, successCallback, failureCallback) { time, null, 1, + req.user, () => { successCallback(createdId, createdUUID, createdIntent); }, @@ -796,7 +986,9 @@ router.post("/edit", function (req, res) { res.send({ status: "failure", message: "Failed to edit feature.", - body: {}, + body: { + error: err, + }, }); } ); @@ -822,13 +1014,28 @@ router.post("/remove", function (req, res, next) { Files.findOne({ where: { id: req.body.file_id, - [Sequelize.Op.or]: { - file_owner: req.user, - [Sequelize.Op.and]: { - file_owner: "group", - file_owner_group: { [Sequelize.Op.overlap]: groups }, + [Sequelize.Op.or]: [ + { file_owner: req.user }, + { + [Sequelize.Op.and]: { + file_owner: "group", + file_owner_group: { [Sequelize.Op.overlap]: groups }, + }, }, - }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "list_edit", + public_editors: { [Sequelize.Op.contains]: [req.user] }, + }, + }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "all_edit", + }, + }, + ], }, }).then((file) => { if (!file) { @@ -861,6 +1068,7 @@ router.post("/remove", function (req, res, next) { time, null, 2, + req.user, () => { logger("info", "Feature removed.", req.originalUrl, req); res.send({ @@ -927,13 +1135,28 @@ router.post("/undo", function (req, res, next) { Files.findOne({ where: { id: req.body.file_id, - [Sequelize.Op.or]: { - file_owner: req.user, - [Sequelize.Op.and]: { - file_owner: "group", - file_owner_group: { [Sequelize.Op.overlap]: groups }, + [Sequelize.Op.or]: [ + { file_owner: req.user }, + { + [Sequelize.Op.and]: { + file_owner: "group", + file_owner_group: { [Sequelize.Op.overlap]: groups }, + }, }, - }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "list_edit", + public_editors: { [Sequelize.Op.contains]: [req.user] }, + }, + }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "all_edit", + }, + }, + ], }, }).then((file) => { if (!file) { @@ -992,6 +1215,7 @@ router.post("/undo", function (req, res, next) { time, req.body.undo_time, 3, + req.user, () => { logger("info", "Undo successful.", req.originalUrl, req); res.send({ @@ -1052,13 +1276,28 @@ router.post("/merge", function (req, res, next) { Files.findOne({ where: { id: req.body.file_id, - [Sequelize.Op.or]: { - file_owner: req.user, - [Sequelize.Op.and]: { - file_owner: "group", - file_owner_group: { [Sequelize.Op.overlap]: groups }, + [Sequelize.Op.or]: [ + { file_owner: req.user }, + { + [Sequelize.Op.and]: { + file_owner: "group", + file_owner_group: { [Sequelize.Op.overlap]: groups }, + }, }, - }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "list_edit", + public_editors: { [Sequelize.Op.contains]: [req.user] }, + }, + }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "all_edit", + }, + }, + ], }, }).then((file) => { if (!file) { @@ -1131,6 +1370,7 @@ router.post("/merge", function (req, res, next) { time, null, 6, + req.user, () => { logger( "info", @@ -1216,13 +1456,28 @@ router.post("/split", function (req, res, next) { Files.findOne({ where: { id: req.body.file_id, - [Sequelize.Op.or]: { - file_owner: req.user, - [Sequelize.Op.and]: { - file_owner: "group", - file_owner_group: { [Sequelize.Op.overlap]: groups }, + [Sequelize.Op.or]: [ + { file_owner: req.user }, + { + [Sequelize.Op.and]: { + file_owner: "group", + file_owner_group: { [Sequelize.Op.overlap]: groups }, + }, }, - }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "list_edit", + public_editors: { [Sequelize.Op.contains]: [req.user] }, + }, + }, + { + [Sequelize.Op.and]: { + public: "1", + publicity_type: "all_edit", + }, + }, + ], }, }) .then((file) => { @@ -1290,6 +1545,7 @@ router.post("/split", function (req, res, next) { time, null, 8, + req.user, () => { res.send({ status: "success", diff --git a/API/Backend/Draw/routes/files.js b/API/Backend/Draw/routes/files.js index 16c72689d..6262b169b 100644 --- a/API/Backend/Draw/routes/files.js +++ b/API/Backend/Draw/routes/files.js @@ -48,14 +48,25 @@ router.post("/", function (req, res, next) { router.post("/getfiles", function (req, res, next) { let Table = req.body.test === "true" ? UserfilesTEST : Userfiles; + const orWhere = [ + { + file_owner: req.user, + }, + { public: "1" }, + { + public: + req.leadGroupName != null && + req.groups != null && + req.groups[req.leadGroupName] === true + ? "0" + : "1", + }, + ]; Table.findAll({ where: { //file_owner is req.user or public is '0' hidden: "0", - [Sequelize.Op.or]: { - file_owner: req.user, - public: "1", - }, + [Sequelize.Op.or]: orWhere, }, }) .then((files) => { @@ -131,7 +142,9 @@ router.post("/make", function (req, res, next) { file_description: req.body.file_description, intent: req.body.intent, public: "1", + publicity_type: "read_only", hidden: "0", + template: req.body.template ? JSON.parse(req.body.template) : null, }; // Insert new userfile into the user_files table @@ -410,6 +423,9 @@ router.post("/restore", function (req, res, next) { * file_name: (optional) * file_description: (optional) * public: <0|1> (optional) + * template: (optional) + * publicity_type: (optional) + * public_editors: (optional) * } */ router.post("/change", function (req, res, next) { @@ -432,6 +448,29 @@ router.post("/change", function (req, res, next) { ) { toUpdateTo.public = req.body.public; } + if (req.body.hasOwnProperty("template") && req.body.template != null) { + try { + toUpdateTo.template = JSON.parse(req.body.template); + } catch (err) {} + } + if ( + req.body.hasOwnProperty("publicity_type") && + [null, "read_only", "list_edit", "all_edit"].includes( + req.body.publicity_type + ) + ) { + toUpdateTo.publicity_type = req.body.publicity_type; + } + if (req.body.hasOwnProperty("public_editors")) { + try { + let public_editors = null; + if (typeof req.body.public_editors === "string") + public_editors = req.body.public_editors + .split(",") + .map((e) => e.trim()); + toUpdateTo.public_editors = public_editors; + } catch (err) {} + } let updateObj = { where: { diff --git a/API/Backend/Draw/setup.js b/API/Backend/Draw/setup.js index 90df3c54a..d74efb198 100644 --- a/API/Backend/Draw/setup.js +++ b/API/Backend/Draw/setup.js @@ -1,12 +1,14 @@ const routeFiles = require("./routes/files"); const routerFiles = routeFiles.router; const routerDraw = require("./routes/draw").router; +const ufiles = require("./models/userfiles"); +const file_histories = require("./models/filehistories"); let setup = { //Once the app initializes onceInit: (s) => { s.app.use( - "/API/files", + s.ROOT_PATH + "/API/files", s.ensureUser(), s.checkHeadersCodeInjection, s.setContentType, @@ -15,7 +17,7 @@ let setup = { ); s.app.use( - "/API/draw", + s.ROOT_PATH + "/API/draw", s.ensureUser(), s.checkHeadersCodeInjection, s.setContentType, @@ -27,6 +29,12 @@ let setup = { onceStarted: (s) => {}, //Once all tables sync onceSynced: (s) => { + if (typeof file_histories.up === "function") { + file_histories.up(); + } + if (typeof ufiles.up === "function") { + ufiles.up(); + } routeFiles.makeMasterFiles([ "roi", "campaign", diff --git a/API/Backend/Geodatasets/setup.js b/API/Backend/Geodatasets/setup.js index 7320668cc..f0769561b 100644 --- a/API/Backend/Geodatasets/setup.js +++ b/API/Backend/Geodatasets/setup.js @@ -2,9 +2,9 @@ const router = require("./routes/geodatasets"); let setup = { //Once the app initializes - onceInit: s => { + onceInit: (s) => { s.app.use( - "/API/geodatasets", + s.ROOT_PATH + "/API/geodatasets", s.ensureAdmin(), s.checkHeadersCodeInjection, s.setContentType, @@ -12,9 +12,9 @@ let setup = { ); }, //Once the server starts - onceStarted: s => {}, + onceStarted: (s) => {}, //Once all tables sync - onceSynced: s => {} + onceSynced: (s) => {}, }; module.exports = setup; diff --git a/API/Backend/LongTermToken/setup.js b/API/Backend/LongTermToken/setup.js index 30f25272a..4eed35916 100644 --- a/API/Backend/LongTermToken/setup.js +++ b/API/Backend/LongTermToken/setup.js @@ -1,9 +1,9 @@ const router = require("./routes/longtermtokens"); let setup = { //Once the app initializes - onceInit: s => { + onceInit: (s) => { s.app.use( - "/API/longtermtoken", + s.ROOT_PATH + "/API/longtermtoken", s.ensureAdmin(false, true), s.checkHeadersCodeInjection, s.setContentType, @@ -11,9 +11,9 @@ let setup = { ); }, //Once the server starts - onceStarted: s => {}, + onceStarted: (s) => {}, //Once all tables sync - onceSynced: s => {} + onceSynced: (s) => {}, }; module.exports = setup; diff --git a/API/Backend/Shortener/setup.js b/API/Backend/Shortener/setup.js index 99890f50b..d7d248aeb 100644 --- a/API/Backend/Shortener/setup.js +++ b/API/Backend/Shortener/setup.js @@ -2,9 +2,9 @@ const router = require("./routes/shortener"); let setup = { //Once the app initializes - onceInit: s => { + onceInit: (s) => { s.app.use( - "/API/shortener", + s.ROOT_PATH + "/API/shortener", s.ensureUser(), s.checkHeadersCodeInjection, s.setContentType, @@ -12,9 +12,9 @@ let setup = { ); }, //Once the server starts - onceStarted: s => {}, + onceStarted: (s) => {}, //Once all tables sync - onceSynced: s => {} + onceSynced: (s) => {}, }; module.exports = setup; diff --git a/API/Backend/Users/models/user.js b/API/Backend/Users/models/user.js index 75902637b..81df9ff98 100644 --- a/API/Backend/Users/models/user.js +++ b/API/Backend/Users/models/user.js @@ -12,7 +12,7 @@ var User = sequelize.define( username: { type: Sequelize.STRING, unique: true, - allowNull: false + allowNull: false, }, email: { type: Sequelize.STRING, @@ -20,52 +20,52 @@ var User = sequelize.define( allowNull: true, validate: { isEmail: true, - isUnique: function(value, next) { + isUnique: function (value, next) { var self = this; User.findOne({ where: { email: value } }) - .then(function(user) { + .then(function (user) { // reject if a different user wants to use the same email if (user && self.id !== user.id) { return next("User exists!"); } return next(); }) - .catch(function(err) { + .catch(function (err) { return next(err); }); - } - } + }, + }, }, password: { type: Sequelize.STRING, - allowNull: false + allowNull: false, }, permission: { type: Sequelize.ENUM, values: ["000", "001", "010", "011", "100", "101", "110", "111"], allowNull: false, - defaultValue: "000" + defaultValue: "000", }, token: { type: Sequelize.DataTypes.STRING(2048), - allowNull: true - } + allowNull: true, + }, }, { hooks: { - beforeCreate: user => { + beforeCreate: (user) => { const salt = bcrypt.genSaltSync(); user.password = bcrypt.hashSync(user.password, salt); - } - } + }, + }, }, { - timestamps: true + timestamps: true, } ); // Instance Method for validating user's password -User.prototype.validPassword = function(password, user) { +User.prototype.validPassword = function (password, user) { return bcrypt.compareSync(password, user.password); }; diff --git a/API/Backend/Users/routes/users.js b/API/Backend/Users/routes/users.js index 6f7451f2c..cfae22392 100644 --- a/API/Backend/Users/routes/users.js +++ b/API/Backend/Users/routes/users.js @@ -85,51 +85,56 @@ router.post("/signup", function (req, res, next) { if (!user) { User.create(newUser) .then((created) => { - // Save the user's info in the session - req.session.user = created.username; - req.session.uid = created.id; - req.session.token = crypto.randomBytes(128).toString("hex"); + clearLoginSession(req); + req.session.regenerate((err) => { + // Save the user's info in the session + req.session.user = created.username; + req.session.uid = created.id; + req.session.token = crypto.randomBytes(128).toString("hex"); + req.session.permission = created.permission; - User.update( - { - token: req.session.token, - }, - { - where: { - id: created.id, - username: created.username, - }, - } - ) - .then(() => { - logger( - "info", - req.body.username + " signed up.", - req.originalUrl, - req - ); - res.send({ - status: "success", - username: created.username, + User.update( + { token: req.session.token, - groups: getUserGroups(created.username, req.leadGroupName), - }); - return null; - }) - .catch((err) => { - logger( - "error", - "Only partially signed up.", - req.originalUrl, - req, - err - ); - res.send({ - status: "failure", - message: "Only partially signed up. Try logging in.", + }, + { + where: { + id: created.id, + username: created.username, + }, + } + ) + .then(() => { + logger( + "info", + req.body.username + " signed up.", + req.originalUrl, + req + ); + res.send({ + status: "success", + username: created.username, + token: req.session.token, + groups: getUserGroups(created.username, req.leadGroupName), + }); + return null; + }) + .catch((err) => { + logger( + "error", + "Only partially signed up.", + req.originalUrl, + req, + err + ); + res.send({ + status: "failure", + message: "Only partially signed up. Try logging in.", + }); + return null; }); - return null; - }); + return null; + }); return null; }) .catch((err) => { @@ -153,104 +158,118 @@ router.post("/signup", function (req, res, next) { * User login */ router.post("/login", function (req, res) { - let MMGISUser = req.cookies.MMGISUser - ? JSON.parse(req.cookies.MMGISUser) - : false; - let username = req.body.username || (MMGISUser ? MMGISUser.username : null); + clearLoginSession(req); - if (username == null) { - res.send({ status: "failure", message: "No username provided." }); - return; - } + req.session.regenerate((err) => { + let MMGISUser = req.cookies.MMGISUser + ? JSON.parse(req.cookies.MMGISUser) + : false; + let username = req.body.username || (MMGISUser ? MMGISUser.username : null); - User.findOne({ - where: { - username: username, - }, - attributes: ["id", "username", "email", "password", "permission"], - }) - .then((user) => { - if (!user) { - res.send({ - status: "failure", - message: "Invalid username or password.", - }); - } else { - function pass(err, result, again) { - if (result) { - // Save the user's info in the session - req.session.user = user.username; - req.session.uid = user.id; - req.session.token = crypto.randomBytes(128).toString("hex"); - req.session.permission = user.permission; + if (username == null) { + res.send({ status: "failure", message: "No username provided." }); + return; + } - User.update( - { - token: req.session.token, - }, - { - where: { - id: user.id, - username: user.username, - }, - } - ) - .then(() => { - res.send({ - status: "success", - username: user.username, + User.findOne({ + where: { + username: username, + }, + attributes: ["id", "username", "email", "password", "permission"], + }) + .then((user) => { + if (!user) { + res.send({ + status: "failure", + message: "Invalid username or password.", + }); + } else { + function pass(err, result, again) { + if (result) { + // Save the user's info in the session + req.session.user = user.username; + req.session.uid = user.id; + req.session.token = crypto.randomBytes(128).toString("hex"); + req.session.permission = user.permission; + + User.update( + { token: req.session.token, - groups: getUserGroups(user.username, req.leadGroupName), + }, + { + where: { + id: user.id, + username: user.username, + }, + } + ) + .then(() => { + req.session.save(() => { + res.send({ + status: "success", + username: user.username, + token: req.session.token, + groups: getUserGroups(user.username, req.leadGroupName), + additional: + process.env.THIRD_PARTY_COOKIES === "true" + ? `; SameSite=None;${ + process.env.NODE_ENV === "production" + ? " Secure" + : "" + }` + : "", + }); + }); + return null; + }) + .catch((err) => { + res.send({ status: "failure", message: "Login failed." }); + return null; }); + return null; + } else { + res.send({ + status: "failure", + message: "Invalid username or password.", + }); + return null; + } + } + + if (req.body.useToken && MMGISUser) { + if (MMGISUser.token == null) { + res.send({ status: "failure", message: "Bad token." }); + return null; + } + User.findOne({ + where: { + username: MMGISUser.username, + token: MMGISUser.token, + }, + }) + .then((user) => { + if (!user) { + res.send({ status: "failure", message: "Bad token." }); + } else { + pass(null, true, true); + } return null; }) .catch((err) => { - res.send({ status: "failure", message: "Login failed." }); - return null; + res.send({ status: "failure", message: "Bad token." }); }); return null; } else { - res.send({ - status: "failure", - message: "Invalid username or password.", - }); - return null; - } - } - - if (req.body.useToken && MMGISUser) { - if (MMGISUser.token == null) { - res.send({ status: "failure", message: "Bad token." }); - return null; + bcrypt.compare(req.body.password, user.password, pass); } - User.findOne({ - where: { - username: MMGISUser.username, - token: MMGISUser.token, - }, - }) - .then((user) => { - if (!user) { - res.send({ status: "failure", message: "Bad token." }); - } else { - pass(null, true, true); - } - return null; - }) - .catch((err) => { - res.send({ status: "failure", message: "Bad token." }); - }); return null; - } else { - bcrypt.compare(req.body.password, user.password, pass); } return null; - } - return null; - }) - .catch((err) => { - res.send({ status: "failure", message: "Bad token." }); - }); + }) + .catch((err) => { + res.send({ status: "failure", message: "Bad token." }); + }); + }); return null; }); @@ -259,10 +278,7 @@ router.post("/logout", function (req, res) { ? JSON.parse(req.cookies.MMGISUser) : false; - req.session.user = "guest"; - req.session.uid = null; - req.session.token = null; - req.session.permission = null; + clearLoginSession(req); if (MMGISUser == false) { res.send({ status: "failure", message: "No user." }); @@ -279,7 +295,11 @@ router.post("/logout", function (req, res) { } ) .then(() => { - res.send({ status: "success" }); + req.session.save(() => { + req.session.regenerate((err) => { + res.send({ status: "success" }); + }); + }); return null; }) .catch((err) => { @@ -290,6 +310,30 @@ router.post("/logout", function (req, res) { } }); +router.get("/logged_in", function (req, res) { + if ( + typeof req.session.permission === "string" && + req.session.permission[req.session.permission.length - 1] === "1" + ) + res.send({ + status: "success", + message: `'${req.session.user}' is logged in to this session.`, + body: { + loggedIn: true, + user: req.session.user, + }, + }); + else + res.send({ + status: "failure", + message: `No user is logged in to this session.`, + body: { + loggedIn: false, + user: null, + }, + }); +}); + function getUserGroups(user, leadGroupName) { let leads = process.env.LEADS ? JSON.parse(process.env.LEADS) : []; let groups = {}; @@ -299,4 +343,11 @@ function getUserGroups(user, leadGroupName) { return Object.keys(groups); } +function clearLoginSession(req) { + req.session.user = "guest"; + req.session.uid = null; + req.session.token = null; + req.session.permission = null; +} + module.exports = router; diff --git a/API/Backend/Users/setup.js b/API/Backend/Users/setup.js index 761549c34..19167d992 100644 --- a/API/Backend/Users/setup.js +++ b/API/Backend/Users/setup.js @@ -3,7 +3,7 @@ const router = require("./routes/users"); let setup = { //Once the app initializes onceInit: (s) => { - s.app.use("/API/users", s.checkHeadersCodeInjection, router); + s.app.use(s.ROOT_PATH + "/API/users", s.checkHeadersCodeInjection, router); }, //Once the server starts onceStarted: (s) => {}, diff --git a/API/Backend/Utils/routes/utils.js b/API/Backend/Utils/routes/utils.js index 04051eab1..065812c34 100644 --- a/API/Backend/Utils/routes/utils.js +++ b/API/Backend/Utils/routes/utils.js @@ -87,7 +87,8 @@ router.get("/queryTilesetTimes", function (req, res, next) { const split = name.split("Z-"); let t = split.shift(); const n = split.join(""); - t = t.replace(/_/g, ":") + "Z"; + t = t.replace(/_/g, ":"); + if (t[t.length - 1] !== "Z") t += "Z"; dirStore[relUrlSplit[0]].dirs.push({ t: t, n: n }); }); diff --git a/API/Backend/Utils/setup.js b/API/Backend/Utils/setup.js index 8cbd4281b..344d9d1cb 100644 --- a/API/Backend/Utils/setup.js +++ b/API/Backend/Utils/setup.js @@ -2,7 +2,12 @@ const router = require("./routes/utils"); let setup = { //Once the app initializes onceInit: (s) => { - s.app.use("/API/utils", s.ensureUser(), s.setContentType, router); + s.app.use( + s.ROOT_PATH + "/API/utils", + s.ensureUser(), + s.setContentType, + router + ); }, //Once the server starts onceStarted: (s) => {}, diff --git a/API/Backend/Webhooks/processes/triggerwebhooks.js b/API/Backend/Webhooks/processes/triggerwebhooks.js index 152aa630c..77135b45d 100644 --- a/API/Backend/Webhooks/processes/triggerwebhooks.js +++ b/API/Backend/Webhooks/processes/triggerwebhooks.js @@ -80,16 +80,18 @@ function drawFileUpdate(webhook, payload) { }, }; - var response = {}; + const response = {}; response.send = function (res) { - var webhookHeader = JSON.parse(webhook.header); - var webhookBody = JSON.parse(webhook.body); - var file_name = res.body?.file[0]?.file_name || null; - var geojson = res.body.geojson; + const webhookHeader = JSON.parse(webhook.header); + const webhookBody = JSON.parse(webhook.body); + const file_name = res.body?.file[0]?.file_name || null; + const file_owner = res.body?.file[0]?.file_owner || null; + const geojson = res.body.geojson; const injectableVariables = { file_id, file_name, + file_owner, geojson, }; @@ -97,7 +99,7 @@ function drawFileUpdate(webhook, payload) { buildBody(webhookBody, injectableVariables); // Build the url - var url = buildUrl(webhook.url, injectableVariables); + const url = buildUrl(webhook.url, injectableVariables); // Push to the remote webhook pushToRemote(url, webhook.type, webhookHeader, webhookBody); @@ -128,8 +130,8 @@ function buildBody(webhookBody, injectableVariables) { } function drawFileDelete(webhook, payload) { - var file_id = payload.id; - var data = { + const file_id = payload.id; + const data = { body: { id: payload.id, quick_published: false, @@ -141,23 +143,29 @@ function drawFileDelete(webhook, payload) { }, }; - var response = {}; + const response = {}; response.send = function (res) { - var webhookHeader = JSON.parse(webhook.header); - var geojson = res.body.geojson; - var file_name = res.body?.file[0]?.file_name || null; + const webhookHeader = JSON.parse(webhook.header); + const webhookBody = JSON.parse(webhook.body); + const geojson = res.body.geojson; + const file_name = res.body?.file[0]?.file_name || null; + const file_owner = res.body?.file[0]?.file_owner || null; const injectableVariables = { file_id, file_name, + file_owner, geojson, }; + // Build the body + buildBody(webhookBody, injectableVariables); + // Build the url - var url = buildUrl(webhook.url, injectableVariables); + const url = buildUrl(webhook.url, injectableVariables); // Push to the remote webhook - pushToRemote(url, webhook.type, webhookHeader, {}); + pushToRemote(url, webhook.type, webhookHeader, webhookBody); }; getfile(data, response); diff --git a/API/Backend/Webhooks/setup.js b/API/Backend/Webhooks/setup.js index 54a8a3c04..3991e0153 100644 --- a/API/Backend/Webhooks/setup.js +++ b/API/Backend/Webhooks/setup.js @@ -6,10 +6,14 @@ const routerTestWebhooks = require("./routes/testwebhooks"); let setup = { //Once the app initializes onceInit: (s) => { - s.app.use("/API/webhooks", s.checkHeadersCodeInjection, routerWebhooks); + s.app.use( + s.ROOT_PATH + "/API/webhooks", + s.checkHeadersCodeInjection, + routerWebhooks + ); if (process.env.NODE_ENV === "development") { s.app.use( - "/API/testwebhooks", + s.ROOT_PATH + "/API/testwebhooks", s.checkHeadersCodeInjection, routerTestWebhooks ); diff --git a/API/Backend/setupTemplate.js b/API/Backend/setupTemplate.js index 4b9a60904..59f567a5d 100644 --- a/API/Backend/setupTemplate.js +++ b/API/Backend/setupTemplate.js @@ -2,12 +2,12 @@ const router = require("./routes/your_router"); let setup = { //Once the app initializes - onceInit: s => {}, + onceInit: (s) => {}, //Once the server starts - onceStarted: s => {}, + onceStarted: (s) => {}, //Once all tables sync - onceSynced: s => {}, - envs: [{ name: "ENV_VAR", description: "", required: false, private: false }] + onceSynced: (s) => {}, + envs: [{ name: "ENV_VAR", description: "", required: false, private: false }], }; module.exports = setup; diff --git a/API/connection.js b/API/connection.js index a5fd9aeb1..b2f1522c8 100644 --- a/API/connection.js +++ b/API/connection.js @@ -15,8 +15,8 @@ const sequelize = new Sequelize( max: 10, min: 0, acquire: 30000, - idle: 10000 - } + idle: 10000, + }, } ); @@ -30,7 +30,7 @@ sequelize "connection" ); }) - .catch(err => { + .catch((err) => { logger( "infrastructure_error", "Unable to connect to the database.", diff --git a/API/templates/config_template.js b/API/templates/config_template.js index 0d841a7c2..5552331d2 100644 --- a/API/templates/config_template.js +++ b/API/templates/config_template.js @@ -24,8 +24,13 @@ module.exports = { look: { pagename: "MMGIS", minimalist: false, + topbar: true, + toolbar: true, + scalebar: true, + coordinates: true, zoomcontrol: false, graticule: false, + miscellaneous: true, bodycolor: "", topbarcolor: "", toolbarcolor: "", @@ -39,7 +44,7 @@ module.exports = { helpurl: "", }, panels: ["viewer", "map", "globe"], - time:["enabled"], + time: ["enabled"], tools: [ { name: "Layers", diff --git a/API/utils.js b/API/utils.js index a9898c52b..4cd526976 100644 --- a/API/utils.js +++ b/API/utils.js @@ -59,13 +59,18 @@ const Utils = { return true; }, traverseLayers: function (layers, onLayer) { + let removedUUIDs = []; depthTraversal(layers, 0, []); function depthTraversal(node, depth, path) { for (var i = 0; i < node.length; i++) { const ret = onLayer(node[i], path, i); - if (ret === "remove") { - node.splice(i, 1); + const removed = node.splice(i, 1); + if (removed.length > 0) { + // Find and store the UUIDs of the sublayers of the removed layer + const removedSubLayerUUIDs = Utils.findSubLayerUUIDs(removed); + removedUUIDs = removedUUIDs.concat(removedSubLayerUUIDs); + } i--; } //Add other feature information while we're at it @@ -82,6 +87,41 @@ const Utils = { } } } + + // Returns array of removed layer UUIDs, including all removed sublayer UUIDs + return removedUUIDs; + }, + findSubLayerUUIDs: function (layers) { + const UUIDs = []; + Utils.traverseLayers(layers, (layer) => { + UUIDs.push({ name: layer.name, uuid: layer.uuid }); + return; + }); + return UUIDs; + }, + // From https://javascript.plainenglish.io/4-ways-to-compare-objects-in-javascript-97fe9b2a949c + isEqual(obj1, obj2, isSimple) { + if (isSimple) { + return JSON.stringify(obj1) === JSON.stringify(obj2); + } else { + let props1 = Object.getOwnPropertyNames(obj1); + let props2 = Object.getOwnPropertyNames(obj2); + if (props1.length != props2.length) { + return false; + } + for (let i = 0; i < props1.length; i++) { + let prop = props1[i]; + let bothAreObjects = + typeof obj1[prop] === "object" && typeof obj2[prop] === "object"; + if ( + (!bothAreObjects && obj1[prop] !== obj2[prop]) || + (bothAreObjects && !Utils.isEqual(obj1[prop], obj2[prop])) + ) { + return false; + } + } + return true; + } }, }; diff --git a/API/websocket.js b/API/websocket.js index 103e741e2..55a3cb644 100644 --- a/API/websocket.js +++ b/API/websocket.js @@ -1,66 +1,71 @@ -const WebSocket = require('isomorphic-ws'); +const WebSocket = require("isomorphic-ws"); const logger = require("./logger"); const websocket = { - wss: null, - init: function (server) { - logger( - "info", - "Trying to init websocket...", - "websocket", - null, - "" - ); + wss: null, + init: function (server) { + logger("info", "Trying to init websocket...", "websocket", null, ""); - if (!server === null) { - logger( - "websocket_error", - "server parameter not defined.", - "error", - null, - "" - ); - return null - } + if (!server === null) { + logger( + "websocket_error", + "server parameter not defined.", + "error", + null, + "" + ); + return null; + } - logger( - "info", - "Server is valid so still trying to init websocket...", - "websocket", - null, - "" - ); + logger( + "info", + "Server is valid so still trying to init websocket...", + "websocket", + null, + "" + ); - const wss = new WebSocket.Server({ server }); - websocket.wss = wss; + const wss = new WebSocket.Server({ noServer: true }); + websocket.wss = wss; - // Broadcast to all clients - wss.broadcast = function broadcast(data, isBinary) { - wss.clients.forEach(client => { - if (client.readyState === WebSocket.OPEN && data !== undefined) { - client.send(data, { binary: isBinary }); - } - }); - }; + // Broadcast to all clients + wss.broadcast = function broadcast(data, isBinary) { + wss.clients.forEach((client) => { + if (client.readyState === WebSocket.OPEN && data !== undefined) { + client.send(data, { binary: isBinary }); + } + }); + }; - wss.on('connection', (ws) => { - ws.on('message', (message) => { - wss.broadcast(message); - }); - }); + wss.on("connection", (ws) => { + ws.on("message", (message) => { + wss.broadcast(message); + }); + }); - wss.on('close', () => { - logger( - "info", - "Websocket disconnected...", - "websocket", - null, - "" - ); - websocket.wss = null - }); - } -} + server.on("upgrade", function upgrade(request, socket, head) { + const pathname = request.url; + try { + if ( + pathname === + (process.env.WEBSOCKET_ROOT_PATH || process.env.ROOT_PATH || "") + "/" + ) { + wss.handleUpgrade(request, socket, head, function done(ws) { + wss.emit("connection", ws, request); + }); + } else { + socket.destroy(); + } + } catch (err) { + socket.destroy(); + } + }); + wss.on("close", () => { + logger("info", "Websocket disconnected...", "websocket", null, ""); + websocket.wss = null; + }); + }, +}; module.exports = { websocket }; diff --git a/CHANGELOG.md b/CHANGELOG.md index 73fe2eae5..fb130d611 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,479 +1,13 @@ -# MMGIS Changelog - -## 2.8.0 - -_Nov 14, 2022_ - -#### Summary - -#### Added - -- The Viewer Panel supports gltf models -- The configuration raw variable `markerAttachment.model` now has a `mtlProp` parameter for relative obj material files and can support multiple models per layer -- The DrawTool now supports drawing Circles and Rectangles -- The entire Configuration object has an API and can be updated via curl commands. See [ConfigureAPI](https://nasa-ammos.github.io/MMGIS/apis/configure) -- The MeasureTool now supports multiple DEMs -- Many additions to the `mmgisAPI`. See [JavaScriptAPI](https://nasa-ammos.github.io/MMGIS/apis/javascript) -- Adds an extended GeoJSON format to support per coordinate properties. See [Enhanced GeoJSON](https://nasa-ammos.github.io/MMGIS/configure/formats/enhanced-geojson) -- Deep Links now stores the layer order (if users rearranged them) -- The ability to define a primary coordinate system through a reworked [Coordinates Tab](https://nasa-ammos.github.io/MMGIS/configure/tabs/coordinates) -- DrawTool Layers can be added as regular layers by using a url of the form `api:drawn:` -- The ViewshedTool supports target heights -- The MeasureTool include line-of-sight -- Users can individually hide features through the InfoTool -- Layers can now be tagged and assigned a markdown description. Users can filter layers based on these fields as well. -- Added websockets that can notify users in real-time that a configuration has updated. Enabled with the env: `ENABLE_MMGIS_SOCKETS=true` -- Automatic labels can be rendered on features by configuring a raw variables `layerAttachments.labels` -- Added a `gdal2tiles_3.5.2.py` script to tile dem tiles with multi-processing support and tiling on top of an existing tileset -- The colorize Data Shader can now exclude up to three no data values -- Tilesets served from the MMGIS Missions directory now accept an optional `{t}` path directory to search and served tiles based on time -- The TimeUI and the bottom of the screen has been significantly reworked - -#### Changed - -- Increase GeoJSON math and export precision from `6` to `10` decimal places -- The DrawTool's tagging system has been expanded and it UI resembles folder structures -- Photosphere has better damping and rotates with "panning" directions. - -#### Fixed - -- Dragging a header in the LayersTool now drags the entire group -- Various fixes to how Time works (through the mmgisAPI, in parameterizing WMS layers, ...) -- Error if Data Layers had a space in their name -- Various mmgisAPI function fixes - ---- - -## 2.7.0 - -_Jun 9, 2022_ - -#### Summary - -This release adds geologic mapping to the DrawTool, layer reordering to the LayersTool and 3D annotations to the Globe. - -#### Added - -- [FGDC](https://ngmdb.usgs.gov/fgdc_gds/geolsymstd/download.php) Geologic patterns, linework and symbols in the DrawTool -- Annotation in the Globe View for both standard layers and drawn layers -- Ability for users to reorder layers in the LayersTool -- The Globe has 3d controls once again -- Visibility ranges can be added per feature with `minZoom` and `maxZoom` attributes under a feature's `properties.style` -- A true documentation site at https://nasa-ammos.github.io/MMGIS/ - -#### Changed - -- The vector layer `Visibility Cutoff` configuration has been deprecated (though it still works) in favor of `Minimum Zoom` and `Maximum Zoom` -- Improved the screenshot function -- Layer color indicators are more muted -- Default color scheme is a lighter black -- Lithosphere 1.3.0 => 1.5.1 - [See LithoSphere Releases](https://github.com/NASA-AMMOS/LithoSphere/releases) - -#### Fixed - -- Issue where `onLoaded` would fire multiple times -- Default MMGIS login fields are now removed from the DOM when not in use (merely hiding caused some annoyances with password extensions) -- Issue where deep link didn't position the camera in the Globe correctly -- Issue where some vector points the use DivIcons were not clickable - ---- - -## 2.6.0 - -_Mar 16, 2022_ - -#### New Requirements - -- Node.js >= v14.9.0 - -#### Summary - -This release adds a webhook manager to the configure page and improves documentation, the mmgisAPI, projection support, as well as synchronicity between the Map and Globe. - -#### Added - -- Configurable webhook manager. -- Access to a settings modal in the bottom left toolbar to toggle various UI elements' visibilities as well as the radius of tiles to query for the 3D Globe -- Raster effects (brightness, contrast, saturation, blend-mode) now apply in 3D as well -- Controlled layers can now utilized sublayers/marker-attachments -- Marker attachments, such as uncertainty ellipses, properly work for any projection -- 3D uncertainty ellipses -- Documentation for using remote virtual layers via GDAL -- PUBLIC_URL can be specified at build now in the Dockerfile -- mmgisAPI functions apply to the 3D Globe too now -- mmgisAPI can trim LineString features at the coordinates level - -#### Changed - -- LithoSphere 1.1.0 => 1.3.0 - [See LithoSphere Releases](https://github.com/NASA-AMMOS/LithoSphere/releases) -- Users can now pan the map while in the DrawTool's draw mode without placing a point -- Time controlled layers can now default to the current time for initial queries - -#### Fixed - -- Some media paths in the /configure path not working when MMGIS is served under a subdomain with PUBLIC_URL - ---- - -## 2.5.0 - -_Jan 10, 2022_ - -#### Summary - -This release contains the IsochroneTool, revives the Model layer type and includes a new Query layer type. Each vector layer can now be filtered by the user through the LayersTool, leads in the DrawTool can now draw and publish arrows and annotations, and the MeasureTool finally supports continuous elevation profiles. - -#### Added - -- Isochrone Tool! -- Model layer type! -- Query layer type! -- User filterable layers! -- More mmgisAPI functions -- Deep linking 'centerPin' parameter -- DrawTool lead Map file -- DrawTool text rotation -- Annotation and Arrows are now supported in regular (non-DrawTool) geojson -- Configurable bearings, uncertainty ellipses, models and underlaid images for vector points -- MeasureTool now supports a continuous profile -- MeasureTool csv export includes 3D distance as well -- LayersTool support sublayer visibility toggles within a layer's settings menu -- Python3 version of gdal2customtiles.py -- More Coordinates configurations -- Option in great_circle_calculator to calculate distance between points with Vincenty's formulae -- CHANGELOG.md -- Raw Variables Link has a new 'replace' section for modifying property values before injecting into a url - -#### Changed - -- LithoSphere 1.0.1 => 1.1.0 - [See LithoSphere Releases](https://github.com/NASA-AMMOS/LithoSphere/releases) -- LayersTool, LegendTool and InfoTool panels are wider -- The MMGIS M logo is now an svg -- bulk_tiles.py's colormap is now optional -- DrawTool's compile includes an inward buffer to allow for smaller drawn features to pass checks -- InfoTool now lists all intersected polygons of a layer under a mouse click - -#### Fixed - -- Viewsheds play nicely with polar maps -- Various improvements to the top search bar -- Legend items wrap to new line instead of extending off screen -- `colors` package fix -- `globeLon` deep link not working -- Uses `asHTML` for IdentifierTool again -- `apt-get update` in Dockerfile now uses -y (yes to all) flag - -#### Removed - -- Excess Globe feature highlighting - -## 2.4.0 - -_Aug 06, 2021_ - -#### Summary - -This release adds in the Viewshed Tool, time enabled layers, [LithoSphere](https://github.com/NASA-AMMOS/LithoSphere), WMS support, data layers, a JavaScript API, and more. - -#### Added - -- The Viewshed Tool! -- Time enabled layers, configurations and a time UI component. -- Full support for WMS layers and projections in 2D and 3D. -- Data layer colorize shader enabling dynamic rendering of data. -- An extensive window.mmgisAPI for interacting with MMGIS through an iframe. -- Configuration for point marker shape. -- Support for serving MMGIS at a subpath with the PUBLIC_URL environment variable. -- bulk_tiles.py auxiliary script. -- Features can be dehighlighted by clicking off on the map. -- Measure Tool supports measurements in kilometers. -- Ability to type in and go to a coordinate. -- Elevation values on mouse over. -- Configurable coordinates. -- Draw Tool features behave like regular layer features when the Draw Tool is closed. - -#### Changed - -- The Globe has been refactored and made standalone in the npm library LithoSphere. -- The Waypoint Kind now uses a top-down image of Perseverance. -- Migrated from Python2 to Python3. - -#### Fixed - -- Documentation uses only relative links for resources now. -- Issue with auth=none not working. -- Draw Tool drawings now work at the meter level. -- Draw Tool drawings now properly respect 0 valued styles. -- Data layer names now support spaces. - -#### Removed - -- All PHP dependencies. - ---- - -## 2.3.1 - -_Apr 22, 2021_ - -#### Summary - -A point release to address bug fixes. - -#### Fixed - -- WMS layers now work for full polar projections -- Raster layers obey order even if they're initially off -- Draw Tool truly accepts .json files - ---- - -## 2.3.0 - -_Apr 14, 2021_ - -#### Summary - -The Draw Tool gets its own tag filtering system. The Measure Tool now uses great arcs and is way more accurate and the map now fully supports WMS layers! - -#### Migration Details - -- The DrawTool tagging system change ideally needs more space in the `file_description` column. To increase it and not hit a tag or file description limit in drawing files, back-up the MMGIS database and run the SQL command: - -``` -ALTER TABLE user_files ALTER COLUMN file_description TYPE VARCHAR(10000); -``` - -#### Added - -- Draw Tool files can now be search for by user defined tags/keyword -- Draw Tool file options modal has been upgraded -- Admins can pin preferred tags -- Measure Tool now uses great arcs to compute measurements as well as for rendering lines -- A docker-compose.yml -- Fully functional WMS Map layers - -#### Removed - -#### Changed - -- Draw Tool requires a user to enter a file name before creating a file. (Instead of adding one as "New File") -- Draw Tool now accepts uploads of .json geojson files. (From just .geojson and .shp) -- Tools plugins are captured at build time! (You do not need to run `npm start` before building anymore) -- Info Tool contents are condensed - -#### Fixed - -- Screenshot widget no longer captures the faint bottom bar in its images -- Deep links to selected feature can now activate their info in the Info Tool -- AUTH=local allows users to sign in again -- Measure Tool profile download data is now accurate - ---- - -## 2.0.0 - -_Jan 14, 2021_ - -#### Migration Details - -- The environment variable `ALLOW_EMBED` has been replaced with `FRAME_ANCESTORS` -- `npm install` is only needed in the root directory and not in the /API directory any more -- Instead of `npm start`, use `npm run build` and then afterwards `npm run start:prod` to run the application. - _You will still need to run `npm start` before building the first time_ - -#### Added - -- Webpack! -- Production level builds -- Babel -- React support -- Icons as markers -- Configurable vector highlight color -- Graticules -- Configure page help buttons to docs - -#### Removed - -- Require.js -- Unused libraries, tools and code -- Swap widget -- FORCE_CONFIGCONFIG environment variable removed - -#### Changed - -- Info Tool upgraded! -- Measure Tool upgraded! -- Top bar search -- The environment variable ALLOW_EMBED has been replaced with FRAME_ANCESTORS -- MMGIS2 splash screen -- Various small UI changes -- Improved configure look tab -- Development logging is friendlier - -#### Fixed - -- Configure save warns of bad json -- Removed unused configure globe projection option -- Configure look tab colors work properly - ---- - -## 1.3.5 - -_Oct 19, 2020_ - -#### Added - -- ALLOW_EMBED environment variable -- DISABLE_LINK_SHORTENER environment variable - -#### Fixed - -- Tweaked various UI elements -- The Configure page Look tab now correctly reads in any existing `logourl` and `helpurl` -- Configure page now warns of invalid raw variable JSON -- Raw variable `info` values don't break when there's no text to replace in them -- Configuration endpoints no longer assume SQL output is ordered - ---- - -## 1.3.4 - -_Oct 06, 2020_ - -#### Added: - -- WMS tile support for the Map (does not yet work on the Globe). -- `AUTH` env can be set to "off" to disable user login entirely. -- gdal2customtiles.py for tiling datasets with custom projections. - ---- - -## 1.3.3 - -_Aug 07, 2020_ - -#### Added: - -- Example docker-compose - -#### Fixed: - -- 3D Globe was rendering layers in depth order instead of breadth order -- Draw Tool publishing sometimes undid the last Lead Map edits -- Draw Tool styling options sometimes hidden in FireFox - -#### Changed: - -- New short URLs are one character longer -- Draw Tool publish overlap tolerance increased - ---- - -## 1.3.2 - -_Jul 06, 2020_ - -#### Fixed - -- Draw Tool history sql commands assumed rows would be returned in order which could completely break the tool. -- Draw Tool layers would get stuck due to automatic toggling when copying to files or turning the file you're drawing in off. -- The waypoint image links on the Test mission have been fixed. - ---- - -## 1.3.1 - -_May 13, 2020_ - -#### Fixed - -- Additional authorization headers prevented access to the configure login page. - ---- - -## 1.3.0 - -_Apr 16, 2020_ - -#### New Requirements - -- Node.js >= v10.10 - -#### New Features - -- Export vector layers as geojson from the Layers Tool -- Info Tool uses a JSON viewer -- Users can now split and merge features in the Draw Tool -- Rich application logging -- ENVs that end with \_HOST are pinged at start to test connections -- Ability to configure deep links to other sites based on properties of a selected feature -- Users can upload much larger files in the Draw Tool -- Missions can be configured to use any map projection -- Globe level of detail -- Globe space themed skysphere -- Tools and Backends are included by scanning a directory for setup files instead of editing code -- The Legend Tool supports color scales -- CSV files can be uploaded as datasets and can be queried on feature click -- Early API tokens that allow .csvs to be uploaded programmatically -- An optional top bar with search functionality -- Configurable page name and logo -- On screen Globe controls -- Support both TMS and WMS tilesets -- Layer Kinds for specialized interactions -- Better documentation in /docs -- Resources cache properly - -#### Fixed - -- All tables are properly created with just one start -- Failed layers no longer crash the application -- Infinite login bug -- Vectors disappearing with string weights -- Some endpoint calls began with home slashes that broke certain setups - ---- - -## 1.2 - -_Nov 06, 2019_ - -#### Added - -- Limit access to the entire site with .env's `AUTH=local` -- Vector Tile Layers -- Store features within Postgres by uploading them with /configure's `Manage Geodatasets`. Point to them by setting the layer URL to `geodatasets:{name}`. Can serve both geojson and vector tiles. - ---- - -## 1.1.1 - -_Oct 25, 2019_ - -#### Fixed - -- Creating a new mission on the 'configure' page failed to make the appropriate mission directories (e.g. Layers). - ---- - -## 1.1 - -_Oct 02, 2019_ - -#### Summary - -MMGIS update with the Campaign Analysis Mapping and Planning (CAMP) tool. The software now runs fully in a node environment. Various other bug fixes and minor updates have been made to the code. - ---- - -## Open Source Release - -_Jun 06, 2019_ - -#### Summary - -This represents the initial release of the Multi-Mission Geographic Information System (MMGIS) software, developed under NASA-AMMOS. - -Dr. Fred J, Calef III & Tariq K. Soliman -NASA-JPL/Caltech +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [X.Y.Z](https://github.com/NASA-AMMOS//releases/tag/X.Y.Z) - 2023-MM-DD + +### Added + +- + diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..211738d94 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,135 @@ + +# Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or advances of + any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email address, + without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community administrators responsible for enforcement at +[@NikJPL (Aniwat.Nik.Sheurpukdi@jpl.nasa.gov)](https://github.com/NikJPL), +[@snnjpl (Shawn.N.Nguyen@jpl.nasa.gov)](https://github.com/snnjpl) or +[@sharimayer — Configuration Manager](https://github.com/sharimayer). +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at +[https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html +[Mozilla CoC]: https://github.com/mozilla/diversity +[FAQ]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..e2e802d13 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,350 @@ +# Contributing to MMGIS + +Thanks for taking the time to consider contributing! We very much appreciate your time and effort. This document outlines the many ways you can contribute to our project, and provides detailed guidance on best practices. We look forward to your help! + +## Prerequisites + +Before you begin contributing to our project, it'll be a good idea to ensure you've satisfied the below pre-requisites. + +### Developer Certificate of Origin (DCO) + +To accept your contributions for any kind of patch, you'll want to: +1. Understand what a Developer Certificate of Origin is. See [this](https://wiki.linuxfoundation.org/dco) guide. +2. Read and agree to the [terms](https://developercertificate.org) of the Developer Certificate of Origin. +3. Remember to add your Sign-Off for each patch contribution you submit to our project via either: + 1. By using the `-s` flag if using Git. See [these](https://git-scm.com/docs/git-commit#Documentation/git-commit.txt--s) instructions. + 2. By including the following text in your patch contributions (i.e. pull requests) + ``` + Signed-off-by: Full Name + ``` + +Reviewers reviewing your patch will look for the sign-off before deciding to accept your contribution. + +### License + +Our project has our licensing terms, including rules governing redistribution, documented in our [LICENSE](LICENSE) file. Please take a look at that file and ensure you understand the terms. This will impact how we, or others, use your contributions. + +### Code of Conduct + +Our Code of Conduct helps facilitate a positive interaction environment for everyone involved with the team, and provides guidance on what to do if you experience problematic behavior. Read more in our [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md), and make sure you agree to its terms. + +### Governance Model + +Our Governance model helps outline our project's decision making and roles-based expectations. Read more in our [GOVERNANCE.md](GOVERNANCE.md). + +### Developer Environment + +For patch contributions, see our [Developer Documentation](https://github.com/NASA-AMMOS/MMGIS/wiki) for more details on how to set up your local environment, to best contribute to our project. + +At a minimum however to submit patches (if using Git), you'll want to ensure you have: +1. An account on the Version Control System our project uses (i.e. GitHub). +2. The Version Control System client (i.e. Git) installed on your local machine. +3. The ability to edit, build, and test our project on your local machine. Again, see our [README.md](README.md) or detailed developer guide for more details + +### Communication Channels + +Before contributing changes to our project, it's a great idea to be familiar with our communication channels and to socialize your potential contributions to get feedback early. This will help give you context for your contributions, no matter their form. + +Our communication channels are: +- [Issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) - a regularly monitored area to report issues with our software or propose changes +- [Discussion board](https://github.com/NASA-AMMOS/MMGIS/discussions) - a permanently archived place to hold conversations related to our project, and to propose as well as show+tell topics to the contributor team. This resource can be searched for old discussions. + +## Our Development Process + +Our project integrates contributions from many people, and so we'd like to outline a process you can use to visualize how your contributions may be integrated if you provide something. + +```mermaid +flowchart TD + repo_proj[(Our Repository)]-->|Fork|repo_fork[(Your Forked Repository)] + repo_fork-->|Make|patch(Your Changes) + patch-->|Submit|pr(Pull Request) + pr==>|Approved|repo_proj + pr-->|Changes Requested|repo_fork +``` + +### Fork our Repository + +Forking our repository, as opposed to directly committing to a branch is the preferred way to propose changes. + +See [this GitHub guide](https://docs.github.com/en/get-started/quickstart/fork-a-repo) on forking for information specific to GitHub.com + +#### Find or File an Issue + +Make sure people are aware you're working on a patch! Check out our [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) and find an open issue you'd like to work against, or alternatively file a new issue and mention you're working on a patch. + +#### Choose the Right Branch to Fork + +Our project typically has the following branches available, make sure to fork either the default branch or a branch someone else already tagged with a particular issue ticket you're working with. +- `develop` — default branch, used for new code development +- `main` — release branch + +### Make your Modifications + +Within your local development environment, this is the stage at which you'll propose your changes, and commit those changes back to version control. See the [README.md](README.md) or development guide for more specifics on what you'll need as prerequisites to setup your local development environment. + +#### Commit Messages + +Commit messages to version control should reference a ticket in their title / summary line: + +``` +Issue #248 - Show an example commit message title +``` + +This makes sure that tickets are updated on GitHub with references to commits that are related to them. + +Commit should always be atomic. Keep solutions isolated whenever possible. Filler commits such as "clean up white space" or "fix typo" should be merged together before making a pull request, and significant sub-feature branches should be [rebased](https://www.youtube.com/results?search_query=git+rebase) to preserve commit history. Please ensure your commit history is clean and meaningful! + +Additionally, remember to "Sign-Off" on your commits to align with our [Developer Certificate of Origin (DCO) policy](#developer-certificate-of-origin-dco). + +### Submit a Pull Request + +Pull requests are the core way our project will receive your patch contributions. Navigate to your branch on your own fork within the version control system, and submit a pull request or submit the patch text to our project. + +Please make sure to provide a meaningful text description to your pull requests, whenever submitted. Our pull-request template will be auto-generated for you when you create your pull-request. See the template [here](.github/PULL_REQUEST_TEMPLATE.md). + +**Working on your first Pull Request?** See guide: [How to Contribute to an Open Source Project on GitHub](https://kcd.im/pull-request) + +### Reviewing your Pull Request + +Reviewing pull-requests, or any kinds of proposed patch changes, is an art. That being said, we follow the following best practices: +- **Intent** - is the purpose of your pull-request clearly stated? +- **Solution** - is your pull-request doing what you want it to? +- **Correctness** - is your pull-request doing what you want it to *correctly*? +- **Small Patches** - is your patch of a level of complexity and brevity that it can actually be reviewed by a human being? Or is does it involve too much content for one pull request? +- **Coding best practices** - are you following best practices in the coding / contribution language being used? +- **Readability** - is your patch readable, and ultimately maintainable, by others? +- **Reproducibility** - is your patch reproducible by others? +- **Tests** - do you have or have conducted meaningful tests? + +## Ways to Contribute + +### ⚠️ Issue Tickets + +> *Do you like to talk about new features, changes, requests?* + +Issue tickets are a very simple way to get involved in our project. It also helps new contributors get an understanding of the project more comprehensively. This is a great place to get started with the project if you're not sure where to start. + +See our list of issues at: https://github.com/NASA-AMMOS/MMGIS/issues + +#### Cleaning up Duplicate Issues + +Often we receive duplicate issues that can confuse project members on *which* issue ticket to hold conversations upon. + +Here's how you can help: +1. Scan the list of *open* issue tickets for duplicate titles, or internal wording +2. If you find duplicates, copy / paste the below message on the conversation thread of the issue ticket *that has less participants* involved + +``` +This is a duplicate issue. Please migrate conversations over to [issue-XYZ](hyperlink to issue) +``` + +#### Good First Issues + +Issue tickets can vary in complexity, and issues labeled with `good first issue` labels are often a great way to get started with the project as a newcomer. + +Take a look at our [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues), and filter by `good first issue` for issues that are low-complexity, and that will help you get familiar with our issue tracking and patch submission process. + +#### Suggesting New Issue Labels + +Labels within our [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) are a great way to quickly sort through tickets. The project may not yet have labels to cover the full variety of issue tickets. Take a look through our list of issues, and if you notice a set of issue tickets that seem similar but are not categorized with an existing label, go ahead submit a request within one of the issues you've looked at with the following text: + +``` +I've noticed several other issues that are of the same category as this issue. Shall we make a new label for these types of issues? +``` + +#### Submitting Bug Issues + +Resolving bugs is a priority for our project. We welcome bug reports. However, please make sure to do the following prior to submitting a bug report: +- **Check for duplicates** - there may be a bug report already describing your issue, so check the [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) first. + +Here's some guidance on submitting a bug issue: +1. Navigate to our [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) and file a new issue +2. Select a bug template (if available) for your issue + 1. Fill out the template fields to the best of your ability, including output snippets or screenshots where applicable +3. Follow the general guidelines below for extra information about your bug + 1. Include a code snippet if you have it showcasing the bug + 2. Provide reproducible steps of how to recreate the bug + 3. If the bug triggers an exception or error message, include the *full message* or *stacktrace* + 4. Provide information about your operating system and the version of our project you're using + +#### Submitting New Feature Issues + +We welcome new feature requests to help grow our project. However, please make sure to do the following prior to submitting a new feature request: +- **Check for duplicates** - there may be a new feature issue already describing your issue, so check the [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) first +- **Consider alternatives** - is your feature really needed? Or is there a feature within our project or with a third-party that may help you achieve what you want? + +Here's some guidance on submitting a new feature issue: +1. Navigate to our [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) and file a new issue +2. Select a new feature template (if available) for your issue + 1. Fill out the template fields to the best of your ability + +#### Submitting Security Vulnerability Issues + +Security vulnerabilities should **not** be filed to the regular issue tracking system. + +Report your security vulnerabilities to (see contact links): [@jl-0](https://github.com/jl-0) or [@jeffreypon](https://github.com/jeffreypon) + +Please be sure to: +* Indicate the severity of the vulnerability +* Provide any workarounds, if you know them +* Provide return-contact information to follow-up with you if needed + +#### Reviewing Pull Requests + +Reviewing others' contributions is a great way to learn about best practices in both contributions as well as software. + +Take a look at our [pull requests tracking system](https://github.com/NASA-AMMOS/MMGIS/pulls), and try the following options for providing a review: +1. Read the code / patch associated with the pull-request, and take note of any coding, bug, or documentation issues if found +2. Try to recreate the pull-request patch on your local machine, and report if it has issues with your system in particular +3. Scan over suggested feedback from other contributors, and provide feedback if necessary + +### 💻 Code + +⚠️ It's **highly** advised that you take a look at our [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) before considering any code contributions. Here's some guidelines: +1. Check if any duplicate issues exist that cover your code contribution idea / task, and add comments to those tickets with your thoughts. +2. If no duplicates exist, create a new issue ticket and get a conversation started before making code changes using our [communication channels](#communication-channels). + +Once you have a solid issue ticket in hand and are ready to work on code, you'll want to: +1. Ensure you have development [prerequisites](#prerequisites) cleared. +2. Have your [development environment](#developer-environment) set up properly. +3. Go through our [development process](#our-development-process), including proposing changes to our project. + +Some guidelines for code-specific contributions: +- **Do your homework** - read-up on necessary documentation, like `README.md`s, developer documentation, and pre-existing code to see the intention and context necessary to make your contribution a success. It's important to _communicate_ what you're working on through our project [communication channels](#communication-channels) and get buy-in from frequent contributors - this will help the project be more receptive to your contributions! +- **Ask questions** - its important to ask questions while you work on your contributions, to check-in with frequent contributors on the style and the set of expectations to make your code contribution work well with pre-existing project code. Use our [communication channels](#communication-channels) +- **Keep positive** - code contributions, by their nature, have direct impacts on the output and functionality of the project. Keep a positive spirit as your code is reviewed, and take it in stride if core contributors take time to review, give you suggestions for your code or respectfully decline your contribution. This is all part of the process for quality open source development. +- **Comments** - include *useful* comments throughout your code that explain the intention of a code block, not a step-by-step analysis. See our [inline code documentation](#inline-code-documentation) section for specifics. + + +### 📖 Documentation + +Documentation is the core way our users and contributors learn about the project. We place a high value on the quality, thoroughness, and readability of our documentation. Writing or editing documentation is an excellent way to contribute to our project without performing active coding. + +⚠️ It's **highly** advised that you take a look at our [issue-tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) before considering any documentation contributions. Here's some guidelines: +1. Check if any duplicate issues exist that cover your documentation contribution idea / task, and add comments to those tickets with your thoughts. +2. If no duplicates exist, create a new issue ticket and get a conversation started before making documentation changes. + +Some guidelines for documentation best practices (summarized from Google's [excellent documentation guide](https://google.github.io/styleguide/docguide/best_practices.html)): +- **Minimum viable docs** - don't do less documentation than your users / developers need, but also don't do more +- **Changed code = changed docs** - if your code has changed, remember to update your documentation +- **Delete old docs** - continually clean your documentation tree, and remove outdated docs regularly + +#### Documentation Organization + +The overall structure of our project documentation is as follows: +- Source-controlled documentation + - [README.md](README.md) - top-level information about how to run, build, and contribute to the project + - [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md) - best practices and guidance on how to work well with other people in the project, and suggestions on dealing with interpersonal issues + - [CONTRIBUTING.md](CONTRIBUTING.md) - guidance on contributing to the project + - `*.java` - inline documentation available inside code files +- Hosted documentation + - [Wiki Main-page](https://github.com/NASA-AMMOS/MMGIS/wiki) +- Discussion Boards + - [Discussions](https://github.com/NASA-AMMOS/MMGIS/discussions) + + + +For directions on contributing to our source-controlled documentation: +1. Ensure you have development [prerequisites](#prerequisites) cleared. +2. Have your [development environment](#developer-environment) set up properly. +3. Go through our [development process](#our-development-process), including proposing changes to our project. + + + +#### Writing Style + +To ensure documentation is readable and consistent by newcomers and experts alike, here are some suggestions on writing style for English: +- Use gender neutral pronouns (they/their/them) instead of he/she/his/her +- Avoid qualifiers that minimize the difficulty of a task at hand, e.g. avoid words like “easily”, “simply”, “just”, “merely”, “straightforward”, etc. Readers' expertise may not match your own, and qualifying complexity may deter some readers if the task does not match their level of experience. That being said, if a particular task is difficult or complex, do mention that. + +#### Common Wording + +Below are some commonly used words you'll want to leverage in your documentation contributions: +- **AMMOS** - Advanced Multi-Mission Operations System tool set. +- **Artifactory** - Lab-based artifact repository that stores compiled builds for reuse. +- **GitHub** - One word with capitalization of the 'G' and 'H'. +- **Jenkins** - Continuous Integration (CI) server that hosts many Lab builds. +- **MIPL** - Multimission Instrument Processing Lab which hosts the project containing this repository. +- **VICAR Java Build** - The build system that collects and compiles related code. + +#### Inline Code Documentation + +For language-specific guidance on code documentation, including style guides, see [Google's list of language style guides](https://google.github.io/styleguide/) for a variety of languages. + +Additionally, take a look at Google's recommendations on [inline code documentation](https://google.github.io/styleguide/docguide/best_practices.html#documentation-is-the-story-of-your-code) for best practices. + +#### Media + +Media, such as images, videos, sound files, etc., are an excellent way to explain documentation to a wider audience more easily. Include media in your contributions as often as possible. + +When including media into our version-control system, it is recommended to use formats such as: +- Diagrams: [Mermaid](https://mermaid-js.github.io/mermaid/#/) format +- Images: JPEG format +- Videos: H264 MPEG format +- Sounds: MP3 format + + +### ❓ Questions + +Answering questions is an excellent way to learn more about our project, as well as get better known in our project community. + +Here are just a few ways you can help answer questions for our project: +- Answer open questions in our [discussion forum](https://github.com/NASA-AMMOS/MMGIS/discussions) +- Answer open questions mentioned in our [issue tracking system](https://github.com/NASA-AMMOS/MMGIS/issues) + + +When answering questions, keep the following in mind: +- Be polite and friendly. See our [Code of Conduct](CODE_OF_CONDUCT.md) recommendations as you interact with others in the team. +- Repeat the specific question you are answering, followed by your suggestion. +- If suggesting code, repeat the line of code that needs to be altered, followed by your alteration +- Include any post-steps or checks to verify your answer can be reproduced + +### 🎨 Design + +Design files can help to guide new features and new areas of expansion for our project. We welcome these kinds of contributions. + +Here are just a few ways you can help provide design recommendations for our project: +- Create visual mockups or diagrams to increase usability of our project applications. This can apply to user interfaces, documentation structuring, or even code architecture diagrams. +- Conduct user research to understand user needs better. Save your findings within spreadsheets that the project team / contributors can review. +- Create art, such as logos or icons, to support the user experience for the project + +Each of the above can be contributed directly to repository code, and you should use our [development process](#our-development-process) to contribute your additions. + +### 🎟️ Meetups + +A great way to contribute towards our project goals is to socialize and encourage people to meet and learn more about each other. Consider ideas like: +- Propose workshops or meetups regarding some topic within our project +- Help point project contributors and community members to conferences and publications where they may socialize their unique innovations +- Schedule in-person or virtual happy-hours to help create a more social atmosphere within the project community + +For the above ideas, use our [communication channels](#communication-channels) to propose get-togethers. diff --git a/LICENSE b/LICENSE index ffd058277..17c357d72 100644 --- a/LICENSE +++ b/LICENSE @@ -175,18 +175,33 @@ END OF TERMS AND CONDITIONS - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2020 California Institute of Technology + Copyright (c) 2022 California Institute of Technology ("Caltech"). + U.S. Government sponsorship acknowledged. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, are permitted provided + that the following conditions are met: + * Redistributions of source code must retain the above copyright notice, this list of conditions and + the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions + and the following disclaimer in the documentation and/or other materials provided with the + distribution. + * Neither the name of Caltech nor its operating division, the Jet Propulsion Laboratory, nor the + names of its contributors may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index 8308fe3a4..ce1daab4b 100644 --- a/README.md +++ b/README.md @@ -35,20 +35,13 @@ ### System Requirements -1. Install the latest version of [Node.js v14.9.0+](https://nodejs.org/en/download/). Developed with v16.13.2. Note that v18+ of Node.js fails to build. +1. Install the latest version of [Node.js v16.13.2+](https://nodejs.org/en/download/). 1. Install [PostgreSQL v10.14+](https://www.enterprisedb.com/downloads/postgres-postgresql-downloads). Detailed [install instructions](https://www.postgresqltutorial.com/postgresql-getting-started/) for all platforms. 1. Install [PostGIS 2.5+](https://postgis.net/install/). From the above install, you can use the 'Application Stack Builder' to install PostGIS or the default [PostGIS install instructions](https://postgis.net/install/) for all platforms. 1. Make a new PostgreSQL database and remember the user, password and database name. Use 'pgsl' or the 'SQL Shell' to log into Postgres. It will prompt you for the username and password made during the install. - Issue the following commands: - `CREATE DATABASE mmgis;` - `\c mmgis` - `CREATE EXTENSION postgis;` - `exit` - In the above `\c` attaches to the database and `CREATE EXTENSION` enables PostGIS by creating a spatial reference table within that database. - 1. GDAL and Python are weaker dependencies (desirable but, without them, not everything will work) - GDAL [2.+](https://gdal.org/download.html) with Python bindings (Windows users may find [these](https://www.lfd.uci.edu/~gohlke/pythonlibs/#gdal) helpful) @@ -150,7 +143,7 @@ This repo contains a `docker-compose.yml` file that defines a service for the ap ### License: Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0) -Copyright (c) 2022, California Institute of Technology ("Caltech"). U.S. Government sponsorship acknowledged. +Copyright (c) 2023, California Institute of Technology ("Caltech"). U.S. Government sponsorship acknowledged. All rights reserved. diff --git a/auxiliary/gdal2customtiles/gdal2customtiles.py b/auxiliary/gdal2customtiles/gdal2customtiles.py index 2e4c1655a..c07f77267 100644 --- a/auxiliary/gdal2customtiles/gdal2customtiles.py +++ b/auxiliary/gdal2customtiles/gdal2customtiles.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -*- coding: utf-8 -*- # ****************************************************************************** # $Id$ @@ -11,12 +11,11 @@ # - support of global tiles (Spherical Mercator) for compatibility # with interactive web maps a la Google Maps # Author: Klokan Petr Pridal, klokan at klokan dot cz -# Web: http://www.klokan.cz/projects/gdal2tiles/ -# GUI: http://www.maptiler.org/ # ############################################################################### # Copyright (c) 2008, Klokan Petr Pridal -# Copyright (c) 2010-2013, Even Rouault +# Copyright (c) 2010-2013, Even Rouault +# Copyright (c) 2021, Idan Miara # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), @@ -37,25 +36,36 @@ # DEALINGS IN THE SOFTWARE. # ****************************************************************************** +from __future__ import division, print_function +import struct + +import contextlib +import glob +import json import math +import optparse import os +import shutil +import stat import sys +import tempfile +import threading +from functools import partial +from typing import Any, List, NoReturn, Optional, Tuple +from uuid import uuid4 +from xml.etree import ElementTree -from osgeo import gdal -from osgeo import osr - -import struct # 1bto4b +from osgeo import gdal, osr +Options = Any -def binary(num): # 1bto4b - # 1bto4b +# MMGIS +def binary(num): return ''.join(bin(c).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num)) -# 1bto4b - def getTilePxBounds(self, tx, ty, tz, ds): - querysize = self.tilesize + querysize = self.tile_size if self.isRasterBounded: # 'raster' profile: # tilesize in raster coordinates for actual zoom @@ -63,7 +73,7 @@ def getTilePxBounds(self, tx, ty, tz, ds): xsize = self.out_ds.fWorldXSize ysize = self.out_ds.fWorldYSize if tz >= self.tmaxz: - querysize = self.tilesize + querysize = self.tile_size rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld #print("rx", rx) @@ -77,27 +87,27 @@ def getTilePxBounds(self, tx, ty, tz, ds): self.out_ds.fRasterYOriginWorld wx, wy = 0, 0 - wxsize = int(rxsize/float(tsize) * self.tilesize) - wysize = int(rysize/float(tsize) * self.tilesize) - if wysize != self.tilesize: - wy = self.tilesize - wysize + wxsize = int(rxsize/float(tsize) * self.tile_size) + wysize = int(rysize/float(tsize) * self.tile_size) + if wysize != self.tile_size: + wy = self.tile_size - wysize if rx < 0: rxsize = tsize + rx wx = -rx - wxsize = int(rxsize/float(tsize) * self.tilesize) + wxsize = int(rxsize/float(tsize) * self.tile_size) rx = 0 if ry < 0: rysize = tsize + ry wy = -ry - wysize = int(rysize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tile_size) ry = 0 if rx + rxsize > self.out_ds.fRasterXSizeWorld: rxsize = self.out_ds.fRasterXSizeWorld - rx - wxsize = int(rxsize/float(tsize) * self.tilesize) + wxsize = int(rxsize/float(tsize) * self.tile_size) if ry + rysize > self.out_ds.fRasterYSizeWorld: rysize = self.out_ds.fRasterYSizeWorld - ry - wysize = int(rysize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tile_size) # Convert rx, ry back to non-world coordinates rx = int(float(self.out_ds.RasterXSize) * @@ -119,19 +129,246 @@ def getTilePxBounds(self, tx, ty, tz, ds): try: - from PIL import Image import numpy + from PIL import Image + import osgeo.gdal_array as gdalarray -except Exception: + + numpy_available = True +except ImportError: # 'antialias' resampling is not available - pass + numpy_available = False __version__ = "$Id$" -resampling_list = ('average', 'near', 'bilinear', 'cubic', - 'cubicspline', 'lanczos', 'antialias') -profile_list = ('mercator', 'geodetic', 'raster') -webviewer_list = ('all', 'google', 'openlayers', 'leaflet', 'none') +resampling_list = ( + "average", + "near", + "near-composite", + "bilinear", + "cubic", + "cubicspline", + "lanczos", + "antialias", + "mode", + "max", + "min", + "med", + "q1", + "q3", +) +webviewer_list = ("all", "google", "openlayers", "leaflet", "mapml", "none") + + +def makedirs(path): + """Wrapper for os.makedirs() that can work with /vsi files too""" + if path.startswith("/vsi"): + if gdal.MkdirRecursive(path, 0o755) != 0: + raise Exception(f"Cannot create {path}") + else: + os.makedirs(path, exist_ok=True) + + +def isfile(path): + """Wrapper for os.path.isfile() that can work with /vsi files too""" + if path.startswith("/vsi"): + stat_res = gdal.VSIStatL(path) + if stat is None: + return False + return stat.S_ISREG(stat_res.mode) + else: + return os.path.isfile(path) + + +class VSIFile: + """Expose a simplistic file-like API for a /vsi file""" + + def __init__(self, filename, f): + self.filename = filename + self.f = f + + def write(self, content): + if gdal.VSIFWriteL(content, 1, len(content), self.f) != len(content): + raise Exception("Error while writing into %s" % self.filename) + + +@contextlib.contextmanager +def my_open(filename, mode): + """Wrapper for open() built-in method that can work with /vsi files too""" + if filename.startswith("/vsi"): + f = gdal.VSIFOpenL(filename, mode) + if f is None: + raise Exception(f"Cannot open {filename} in {mode}") + try: + yield VSIFile(filename, f) + finally: + if gdal.VSIFCloseL(f) != 0: + raise Exception(f"Cannot close {filename}") + else: + yield open(filename, mode) + + +class UnsupportedTileMatrixSet(Exception): + pass + + +class TileMatrixSet(object): + def __init__(self) -> None: + self.identifier = None + self.srs = None + self.topleft_x = None + self.topleft_y = None + self.matrix_width = None # at zoom 0 + self.matrix_height = None # at zoom 0 + self.tile_size = None + self.resolution = None # at zoom 0 + self.level_count = None + + def GeorefCoordToTileCoord(self, x, y, z, overriden_tile_size): + res = self.resolution * self.tile_size / overriden_tile_size / (2**z) + tx = int((x - self.topleft_x) / (res * overriden_tile_size)) + # In default mode, we use a bottom-y origin + ty = int( + ( + y + - ( + self.topleft_y + - self.matrix_height * self.tile_size * self.resolution + ) + ) + / (res * overriden_tile_size) + ) + return tx, ty + + def ZoomForPixelSize(self, pixelSize, overriden_tile_size): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(self.level_count): + res = self.resolution * self.tile_size / \ + overriden_tile_size / (2**i) + if pixelSize > res: + return max(0, i - 1) # We don't want to scale up + return self.level_count - 1 + + def PixelsToMeters(self, px, py, zoom, overriden_tile_size): + "Converts pixel coordinates in given zoom level of pyramid to EPSG:3857" + + res = self.resolution * self.tile_size / \ + overriden_tile_size / (2**zoom) + mx = px * res + self.topleft_x + my = py * res + ( + self.topleft_y - self.matrix_height * self.tile_size * self.resolution + ) + return mx, my + + def TileBounds(self, tx, ty, zoom, overriden_tile_size): + "Returns bounds of the given tile in georef coordinates" + + minx, miny = self.PixelsToMeters( + tx * overriden_tile_size, + ty * overriden_tile_size, + zoom, + overriden_tile_size, + ) + maxx, maxy = self.PixelsToMeters( + (tx + 1) * overriden_tile_size, + (ty + 1) * overriden_tile_size, + zoom, + overriden_tile_size, + ) + return (minx, miny, maxx, maxy) + + @staticmethod + def parse(j: dict) -> "TileMatrixSet": + assert "identifier" in j + assert "supportedCRS" in j + assert "tileMatrix" in j + assert isinstance(j["tileMatrix"], list) + srs = osr.SpatialReference() + assert srs.SetFromUserInput(str(j["supportedCRS"])) == 0 + swapaxis = srs.EPSGTreatsAsLatLong() or srs.EPSGTreatsAsNorthingEasting() + metersPerUnit = 1.0 + if srs.IsProjected(): + metersPerUnit = srs.GetLinearUnits() + elif srs.IsGeographic(): + metersPerUnit = srs.GetSemiMajor() * math.pi / 180 + tms = TileMatrixSet() + tms.srs = srs + tms.identifier = str(j["identifier"]) + for i, tileMatrix in enumerate(j["tileMatrix"]): + assert "topLeftCorner" in tileMatrix + assert isinstance(tileMatrix["topLeftCorner"], list) + topLeftCorner = tileMatrix["topLeftCorner"] + assert len(topLeftCorner) == 2 + assert "scaleDenominator" in tileMatrix + assert "tileWidth" in tileMatrix + assert "tileHeight" in tileMatrix + + topleft_x = topLeftCorner[0] + topleft_y = topLeftCorner[1] + tileWidth = tileMatrix["tileWidth"] + tileHeight = tileMatrix["tileHeight"] + if tileWidth != tileHeight: + raise UnsupportedTileMatrixSet("Only square tiles supported") + # Convention in OGC TileMatrixSet definition. See gcore/tilematrixset.cpp + resolution = tileMatrix["scaleDenominator"] * \ + 0.28e-3 / metersPerUnit + if swapaxis: + topleft_x, topleft_y = topleft_y, topleft_x + if i == 0: + tms.topleft_x = topleft_x + tms.topleft_y = topleft_y + tms.resolution = resolution + tms.tile_size = tileWidth + + assert "matrixWidth" in tileMatrix + assert "matrixHeight" in tileMatrix + tms.matrix_width = tileMatrix["matrixWidth"] + tms.matrix_height = tileMatrix["matrixHeight"] + else: + if topleft_x != tms.topleft_x or topleft_y != tms.topleft_y: + raise UnsupportedTileMatrixSet( + "All levels should have same origin") + if abs(tms.resolution / (1 << i) - resolution) > 1e-8 * resolution: + raise UnsupportedTileMatrixSet( + "Only resolutions varying as power-of-two supported" + ) + if tileWidth != tms.tile_size: + raise UnsupportedTileMatrixSet( + "All levels should have same tile size" + ) + tms.level_count = len(j["tileMatrix"]) + return tms + + +tmsMap = {} + +profile_list = ["mercator", "geodetic", "raster"] + +# Read additional tile matrix sets from GDAL data directory +filename = gdal.FindFile("gdal", "tms_MapML_APSTILE.json") +if filename and False: + dirname = os.path.dirname(filename) + for tmsfilename in glob.glob(os.path.join(dirname, "tms_*.json")): + data = open(tmsfilename, "rb").read() + try: + j = json.loads(data.decode("utf-8")) + except Exception: + j = None + if j is None: + print("Cannot parse " + tmsfilename) + continue + try: + tms = TileMatrixSet.parse(j) + except UnsupportedTileMatrixSet: + continue + except Exception: + print("Cannot parse " + tmsfilename) + continue + tmsMap[tms.identifier] = tms + profile_list.append(tms.identifier) + +threadLocal = threading.local() # ============================================================================= # ============================================================================= @@ -201,8 +438,7 @@ class GlobalMercator(object): What is the coordinate extent of Earth in EPSG:3857? - [-20037508.342789244, -20037508.342789244, - 20037508.342789244, 20037508.342789244] + [-20037508.342789244, -20037508.342789244, 20037508.342789244, 20037508.342789244] Constant 20037508.342789244 comes from the circumference of the Earth in meters, which is 40 thousand kilometers, the coordinate origin is in the middle of extent. In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0 @@ -272,11 +508,11 @@ class GlobalMercator(object): AUTHORITY["EPSG","9001"]]] """ - def __init__(self, tileSize=256): + def __init__(self, tile_size: int = 256) -> None: "Initialize the TMS Global Mercator pyramid" - self.tileSize = tileSize - self.initialResolution = 2 * math.pi * 6378137 / self.tileSize - # 156543.03392804062 for tileSize 256 pixels + self.tile_size = tile_size + self.initialResolution = 2 * math.pi * 6378137 / self.tile_size + # 156543.03392804062 for tile_size 256 pixels self.originShift = 2 * math.pi * 6378137 / 2.0 # 20037508.342789244 @@ -296,8 +532,11 @@ def MetersToLatLon(self, mx, my): lon = (mx / self.originShift) * 180.0 lat = (my / self.originShift) * 180.0 - lat = 180 / math.pi * \ - (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) + lat = ( + 180 + / math.pi + * (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) + ) return lat, lon def PixelsToMeters(self, px, py, zoom): @@ -319,14 +558,14 @@ def MetersToPixels(self, mx, my, zoom): def PixelsToTile(self, px, py): "Returns a tile covering region in given pixel coordinates" - tx = int(math.ceil(px / float(self.tileSize)) - 1) - ty = int(math.ceil(py / float(self.tileSize)) - 1) + tx = int(math.ceil(px / float(self.tile_size)) - 1) + ty = int(math.ceil(py / float(self.tile_size)) - 1) return tx, ty def PixelsToRaster(self, px, py, zoom): "Move the origin of pixel coordinates to top-left corner" - mapSize = self.tileSize << zoom + mapSize = self.tile_size << zoom return px, mapSize - py def MetersToTile(self, mx, my, zoom): @@ -339,9 +578,10 @@ def TileBounds(self, tx, ty, zoom): "Returns bounds of the given tile in EPSG:3857 coordinates" minx, miny = self.PixelsToMeters( - tx*self.tileSize, ty*self.tileSize, zoom) + tx * self.tile_size, ty * self.tile_size, zoom) maxx, maxy = self.PixelsToMeters( - (tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom) + (tx + 1) * self.tile_size, (ty + 1) * self.tile_size, zoom + ) return (minx, miny, maxx, maxy) def TileLatLonBounds(self, tx, ty, zoom): @@ -356,7 +596,7 @@ def TileLatLonBounds(self, tx, ty, zoom): def Resolution(self, zoom): "Resolution (meters/pixel) for given zoom level (measured at Equator)" - # return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom) + # return (2 * math.pi * 6378137) / (self.tile_size * 2**zoom) return self.initialResolution / (2**zoom) def ZoomForPixelSize(self, pixelSize): @@ -364,10 +604,8 @@ def ZoomForPixelSize(self, pixelSize): for i in range(MAXZOOMLEVEL): if pixelSize > self.Resolution(i): - if i != -1: - return i-1 - else: - return 0 # We don't want to scale up + return max(0, i - 1) # We don't want to scale up + return MAXZOOMLEVEL - 1 def GoogleTile(self, tx, ty, zoom): "Converts TMS tile coordinates to Google Tile coordinates" @@ -382,7 +620,7 @@ def QuadTree(self, tx, ty, zoom): ty = (2**zoom - 1) - ty for i in range(zoom, 0, -1): digit = 0 - mask = 1 << (i-1) + mask = 1 << (i - 1) if (tx & mask) != 0: digit += 1 if (ty & mask) != 0: @@ -427,17 +665,17 @@ class GlobalGeodetic(object): WMS, KML Web Clients, Google Earth TileMapService """ - def __init__(self, tmscompatible, tileSize=256): - self.tileSize = tileSize - if tmscompatible is not None: + def __init__(self, tmscompatible: Optional[bool], tile_size: int = 256) -> None: + self.tile_size = tile_size + if tmscompatible: # Defaults the resolution factor to 0.703125 (2 tiles @ level 0) # Adhers to OSGeo TMS spec # http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification#global-geodetic - self.resFact = 180.0 / self.tileSize + self.resFact = 180.0 / self.tile_size else: # Defaults the resolution factor to 1.40625 (1 tile @ level 0) # Adheres OpenLayers, MapProxy, etc default resolution for WMTS - self.resFact = 360.0 / self.tileSize + self.resFact = 360.0 / self.tile_size def LonLatToPixels(self, lon, lat, zoom): "Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid" @@ -450,8 +688,8 @@ def LonLatToPixels(self, lon, lat, zoom): def PixelsToTile(self, px, py): "Returns coordinates of the tile covering region in pixel coordinates" - tx = int(math.ceil(px / float(self.tileSize)) - 1) - ty = int(math.ceil(py / float(self.tileSize)) - 1) + tx = int(math.ceil(px / float(self.tile_size)) - 1) + ty = int(math.ceil(py / float(self.tile_size)) - 1) return tx, ty def LonLatToTile(self, lon, lat, zoom): @@ -470,19 +708,17 @@ def ZoomForPixelSize(self, pixelSize): for i in range(MAXZOOMLEVEL): if pixelSize > self.Resolution(i): - if i != 0: - return i-1 - else: - return 0 # We don't want to scale up + return max(0, i - 1) # We don't want to scale up + return MAXZOOMLEVEL - 1 def TileBounds(self, tx, ty, zoom): "Returns bounds of the given tile" res = self.resFact / 2**zoom return ( - tx*self.tileSize*res - 180, - ty*self.tileSize*res - 90, - (tx+1)*self.tileSize*res - 180, - (ty+1)*self.tileSize*res - 90 + tx * self.tile_size * res - 180, + ty * self.tile_size * res - 90, + (tx + 1) * self.tile_size * res - 180, + (ty + 1) * self.tile_size * res - 90, ) def TileLatLonBounds(self, tx, ty, zoom): @@ -497,13 +733,13 @@ class Zoomify(object): ---------------------------------------- """ - def __init__(self, width, height, tilesize=256, tileformat='jpg'): + def __init__(self, width, height, tile_size=256, tileformat="jpg"): """Initialization of the Zoomify tile tree""" - self.tilesize = tilesize + self.tile_size = tile_size self.tileformat = tileformat imagesize = (width, height) - tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize)) + tiles = (math.ceil(width / tile_size), math.ceil(height / tile_size)) # Size (in tiles) for each tier of pyramid. self.tierSizeInTiles = [] @@ -513,11 +749,13 @@ def __init__(self, width, height, tilesize=256, tileformat='jpg'): self.tierImageSize = [] self.tierImageSize.append(imagesize) - while (imagesize[0] > tilesize or imagesize[1] > tilesize): + while imagesize[0] > tile_size or imagesize[1] > tile_size: imagesize = (math.floor( imagesize[0] / 2), math.floor(imagesize[1] / 2)) - tiles = (math.ceil(imagesize[0] / tilesize), - math.ceil(imagesize[1] / tilesize)) + tiles = ( + math.ceil(imagesize[0] / tile_size), + math.ceil(imagesize[1] / tile_size), + ) self.tierSizeInTiles.append(tiles) self.tierImageSize.append(imagesize) @@ -530,10 +768,10 @@ def __init__(self, width, height, tilesize=256, tileformat='jpg'): # Number of tiles up to the given tier of pyramid. self.tileCountUpToTier = [] self.tileCountUpToTier[0] = 0 - for i in range(1, self.numberOfTiers+1): + for i in range(1, self.numberOfTiers + 1): self.tileCountUpToTier.append( - self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] + - self.tileCountUpToTier[i-1] + self.tierSizeInTiles[i - 1][0] * self.tierSizeInTiles[i - 1][1] + + self.tileCountUpToTier[i - 1] ) def tilefilename(self, x, y, z): @@ -541,759 +779,1961 @@ def tilefilename(self, x, y, z): tileIndex = x + y * \ self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z] - return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256), - "%s-%s-%s.%s" % (z, x, y, self.tileformat)) + return os.path.join( + "TileGroup%.0f" % math.floor(tileIndex / 256), + "%s-%s-%s.%s" % (z, x, y, self.tileformat), + ) -class Gdal2TilesError(Exception): +class GDALError(Exception): pass -class GDAL2Tiles(object): +def exit_with_error(message: str, details: str = "") -> NoReturn: + # Message printing and exit code kept from the way it worked using the OptionParser (in case + # someone parses the error output) + sys.stderr.write("Usage: gdal2tiles.py [options] input_file [output]\n\n") + sys.stderr.write("gdal2tiles.py: error: %s\n" % message) + if details: + sys.stderr.write("\n\n%s\n" % details) - def process(self): - """The main processing function, runs all the main steps of processing""" + sys.exit(2) - # Opening and preprocessing of the input file - self.open_input() - # Generation of main metadata files and HTML viewers - self.generate_metadata() +def set_cache_max(cache_in_bytes: int) -> None: + # We set the maximum using `SetCacheMax` and `GDAL_CACHEMAX` to support both fork and spawn as multiprocessing start methods. + # https://github.com/OSGeo/gdal/pull/2112 + os.environ["GDAL_CACHEMAX"] = "%d" % int(cache_in_bytes / 1024 / 1024) + gdal.SetCacheMax(cache_in_bytes) - # 1bto4b - if self.isDEMtile: - for z in range(self.tminz, self.tmaxz + int(abs(math.log(self.tilesize, 2) - 8))): # 1bto4b - self.generate_base_tiles(z) - print(' Zoom ' + str(z) + ' tiles done!') - else: - # Generation of the lowest tiles - self.generate_base_tiles(self.tmaxz) - # Generation of the overview tiles (higher in the pyramid) - self.generate_overview_tiles() +def generate_kml( + tx, ty, tz, tileext, tile_size, tileswne, options, children=None, **args +): + """ + Template for the KML. Returns filled string. + """ + if not children: + children = [] + + args["tx"], args["ty"], args["tz"] = tx, ty, tz + args["tileformat"] = tileext + if "tile_size" not in args: + args["tile_size"] = tile_size + + if "minlodpixels" not in args: + args["minlodpixels"] = int(args["tile_size"] / 2) + if "maxlodpixels" not in args: + args["maxlodpixels"] = int(args["tile_size"] * 8) + if children == []: + args["maxlodpixels"] = -1 + + if tx is None: + tilekml = False + args["xml_escaped_title"] = gdal.EscapeString( + options.title, gdal.CPLES_XML) + else: + tilekml = True + args["realtiley"] = GDAL2Tiles.getYTile(ty, tz, options) + args["xml_escaped_title"] = "%d/%d/%d.kml" % ( + tz, tx, args["realtiley"]) + args["south"], args["west"], args["north"], args["east"] = tileswne( + tx, ty, tz) + + if tx == 0: + args["drawOrder"] = 2 * tz + 1 + elif tx is not None: + args["drawOrder"] = 2 * tz + else: + args["drawOrder"] = 0 - def error(self, msg, details=""): - """Print an error message and stop the processing""" - if details: - self.parser.error(msg + "\n\n" + details) + url = options.url + if not url: + if tilekml: + url = "../../" else: - self.parser.error(msg) - - def progressbar(self, complete=0.0): - """Print progressbar for float value 0..1""" - gdal.TermProgress_nocb(complete) - - def gettempfilename(self, suffix): - """Returns a temporary filename""" - if '_' in os.environ: - # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly) - if os.environ['_'].find('wine') >= 0: - tmpdir = '.' - if 'TMP' in os.environ: - tmpdir = os.environ['TMP'] - import time - import random - random.seed(time.time()) - random_part = 'file%d' % random.randint(0, 1000000000) - return os.path.join(tmpdir, random_part + suffix) - - import tempfile - return tempfile.mktemp(suffix) - - def stop(self): - """Stop the rendering immediately""" - self.stopped = True - - def __init__(self, arguments): - """Constructor function - initialization""" - self.out_drv = None - self.mem_drv = None - self.in_ds = None - self.out_ds = None - self.out_srs = None - self.nativezoom = None - self.tminmax = None - self.tsize = None - self.mercator = None - self.geodetic = None - self.alphaband = None - self.dataBandsCount = None - self.out_gt = None - self.tileswne = None - self.swne = None - self.ominx = None - self.omaxx = None - self.omaxy = None - self.ominy = None - - # MMGIS - self.isRasterBounded = False - - # 1bto4b - self.isDEMtile = False + url = "" + + s = ( + """ + + + %(xml_escaped_title)s + + """ + % args + ) + if tilekml: + s += ( + """ + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + %(minlodpixels)d + %(maxlodpixels)d + + + + %(drawOrder)d + + %(realtiley)d.%(tileformat)s + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + +""" + % args + ) - # MMGIS - self.fminx = None - self.fmaxx = None - self.fminy = None - self.fmaxy = None - self.fPixelSize = None + for cx, cy, cz in children: + csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz) + ytile = GDAL2Tiles.getYTile(cy, cz, options) + s += """ + + %d/%d/%d.%s + + + %.14f + %.14f + %.14f + %.14f + + + %d + -1 + + + + %s%d/%d/%d.kml + onRegion + + + + """ % ( + cz, + cx, + ytile, + args["tileformat"], + cnorth, + csouth, + ceast, + cwest, + args["minlodpixels"], + url, + cz, + cx, + ytile, + ) - self.stopped = False - self.input = None - self.output = None + s += """ + + """ + return s - # Tile format - self.tilesize = 256 - self.tiledriver = 'PNG' - self.tileext = 'png' - # Should we read bigger window of the input raster and scale it down? - # Note: Modified later by open_input() - # Not for 'near' resampling - # Not for Wavelet based drivers (JPEG2000, ECW, MrSID) - # Not for 'raster' profile - self.scaledquery = True - # How big should be query window be for scaling down - # Later on reset according the chosen resampling algorightm - self.querysize = 4 * self.tilesize +def scale_query_to_tile(dsquery, dstile, options, tilefilename=""): + """Scales down query dataset to the tile dataset""" - # Should we use Read on the input file for generating overview tiles? - # Note: Modified later by open_input() - # Otherwise the overview tiles are generated from existing underlying tiles - self.overviewquery = False + querysize = dsquery.RasterXSize + tile_size = dstile.RasterXSize + tilebands = dstile.RasterCount - # RUN THE ARGUMENT PARSER: + if options.resampling == "average": - self.optparse_init() - self.options, self.args = self.parser.parse_args(args=arguments) - if not self.args: - self.error("No input file specified") + # Function: gdal.RegenerateOverview() + for i in range(1, tilebands + 1): + # Black border around NODATA + res = gdal.RegenerateOverview( + dsquery.GetRasterBand(i), dstile.GetRasterBand(i), "average" + ) + if res != 0: + exit_with_error( + "RegenerateOverview() failed on %s, error %d" % (tilefilename, res) + ) - # POSTPROCESSING OF PARSED ARGUMENTS: + elif options.resampling == "antialias" and numpy_available: - # Workaround for old versions of GDAL - try: - if ((self.options.verbose and self.options.resampling == 'near') or - gdal.TermProgress_nocb): - pass - except Exception: - self.error( - "This version of GDAL is not supported. Please upgrade to 1.6+.") + if tilefilename.startswith("/vsi"): + raise Exception( + "Outputting to /vsi file systems with antialias mode is not supported" + ) - # Is output directory the last argument? + # Scaling by PIL (Python Imaging Library) - improved Lanczos + array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) + for i in range(tilebands): + array[:, :, i] = gdalarray.BandReadAsArray( + dsquery.GetRasterBand(i + 1), 0, 0, querysize, querysize + ) + im = Image.fromarray(array, "RGBA") # Always four bands + im1 = im.resize((tile_size, tile_size), Image.LANCZOS) + if os.path.exists(tilefilename): + im0 = Image.open(tilefilename) + im1 = Image.composite(im1, im0, im1) + + params = {} + if options.tiledriver == "WEBP": + if options.webp_lossless: + params["lossless"] = True + else: + params["quality"] = options.webp_quality + im1.save(tilefilename, options.tiledriver, **params) - # Test output directory, if it doesn't exist - if (os.path.isdir(self.args[-1]) or - (len(self.args) > 1 and not os.path.exists(self.args[-1]))): - self.output = self.args[-1] - self.args = self.args[:-1] + # MMGIS + elif options.resampling == "near-composite" and numpy_available: - # More files on the input not directly supported yet + if tilefilename.startswith("/vsi"): + raise Exception( + "Outputting to /vsi file systems with near-composite mode is not supported" + ) - if (len(self.args) > 1): - self.error("Processing of several input files is not supported.", - "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the " - "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(self.args)) + # Scaling by PIL (Python Imaging Library) - nearest + array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) + for i in range(tilebands): + array[:, :, i] = gdalarray.BandReadAsArray( + dsquery.GetRasterBand(i + 1), 0, 0, querysize, querysize + ) + im = Image.fromarray(array, "RGBA") # Always four bands + im1 = im.resize((tile_size, tile_size), Image.NEAREST) + + if os.path.exists(tilefilename): + im0 = Image.open(tilefilename) + + # Make mask, nodatas to alpha + nodataPixel = False + if options.srcnodata is not None: + f = str(binary(float(options.srcnodata))) + nodataPixel = [int(f[:8], 2), int(f[8:16], 2), + (int(f[16:24], 2)), int(f[24:], 2)] + + if nodataPixel is not False: + for rowI in range(len(array)): + for pixelI in range(len(array[rowI])): + if array[rowI][pixelI][0] == nodataPixel[0] and array[rowI][pixelI][1] == nodataPixel[1] and array[rowI][pixelI][2] == nodataPixel[2] and array[rowI][pixelI][3] == nodataPixel[3]: + array[rowI][pixelI] = [0, 0, 0, 0] + elif array[rowI][pixelI][0] == 0 and array[rowI][pixelI][1] == 0 and array[rowI][pixelI][2] == 0 and array[rowI][pixelI][3] == 0: + array[rowI][pixelI] = [0, 0, 0, 0] + else: + array[rowI][pixelI] = [255, 255, 255, 255] - self.input = self.args[0] + imMask = Image.fromarray(array, "RGBA") # Always four bands + im1Mask = imMask.resize((tile_size, tile_size), Image.NEAREST) - # MMGIS - if self.options.extentworld: - extentworld = self.options.extentworld.split(",") - self.isRasterBounded = True - self.fminx = float(extentworld[0]) - self.fmaxx = float(extentworld[2]) - self.fminy = float(extentworld[3]) - self.fmaxy = float(extentworld[1]) - self.fPixelSize = float(extentworld[4]) + im1 = Image.composite(im1, im0, im1Mask) - # 1bto4b - if self.options.isDEMtile: - self.isDEMtile = True - self.tilesize = 32 - self.querysize = 4 * self.tilesize - - # Default values for not given options - - if not self.output: - # Directory with input filename without extension in actual directory - self.output = os.path.splitext(os.path.basename(self.input))[0] - - if not self.options.title: - self.options.title = os.path.basename(self.input) - - if self.options.url and not self.options.url.endswith('/'): - self.options.url += '/' - if self.options.url: - self.options.url += os.path.basename(self.output) + '/' - - # Supported options - - self.resampling = None - - if self.options.resampling == 'average': - try: - if gdal.RegenerateOverview: - pass - except Exception: - self.error("'average' resampling algorithm is not available.", - "Please use -r 'near' argument or upgrade to newer version of GDAL.") - - elif self.options.resampling == 'antialias': - try: - if numpy: # pylint:disable=W0125 - pass - except Exception: - self.error("'antialias' resampling algorithm is not available.", - "Install PIL (Python Imaging Library) and numpy.") - - elif self.options.resampling == 'near': - self.resampling = gdal.GRA_NearestNeighbour - self.querysize = self.tilesize - - elif self.options.resampling == 'bilinear': - self.resampling = gdal.GRA_Bilinear - self.querysize = self.tilesize * 2 - - elif self.options.resampling == 'cubic': - self.resampling = gdal.GRA_Cubic - - elif self.options.resampling == 'cubicspline': - self.resampling = gdal.GRA_CubicSpline - - elif self.options.resampling == 'lanczos': - self.resampling = gdal.GRA_Lanczos - - # User specified zoom levels - self.tminz = None - self.tmaxz = None - if self.options.zoom: - minmax = self.options.zoom.split('-', 1) - minmax.extend(['']) - zoom_min, zoom_max = minmax[:2] - self.tminz = int(zoom_min) - if zoom_max: - self.tmaxz = int(zoom_max) + params = {} + if options.tiledriver == "WEBP": + if options.webp_lossless: + params["lossless"] = True else: - self.tmaxz = int(zoom_min) + params["quality"] = options.webp_quality + im1.save(tilefilename, options.tiledriver, **params) - # KML generation - self.kml = self.options.kml + else: + if options.resampling == "near": + gdal_resampling = gdal.GRA_NearestNeighbour - # Check if the input filename is full ascii or not - try: - os.path.basename(self.input).encode('ascii') - except UnicodeEncodeError: - full_ascii = False - else: - full_ascii = True - - # LC_CTYPE check - if not full_ascii and 'UTF-8' not in os.environ.get("LC_CTYPE", ""): - if not self.options.quiet: - print("\nWARNING: " - "You are running gdal2tiles.py with a LC_CTYPE environment variable that is " - "not UTF-8 compatible, and your input file contains non-ascii characters. " - "The generated sample googlemaps, openlayers or " - "leaflet files might contain some invalid characters as a result\n") - - # Output the results - if self.options.verbose: - print("Options:", self.options) - print("Input:", self.input) - print("Output:", self.output) - print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024)) - print('') - - def optparse_init(self): - """Prepare the option parser for input (argv)""" - - from optparse import OptionParser, OptionGroup - usage = "Usage: %prog [options] input_file(s) [output]" - p = OptionParser(usage, version="%prog " + __version__) - p.add_option("-p", "--profile", dest='profile', - type='choice', choices=profile_list, - help=("Tile cutting profile (%s) - default 'mercator' " - "(Google Maps compatible)" % ",".join(profile_list))) - p.add_option("-r", "--resampling", dest="resampling", - type='choice', choices=resampling_list, - help="Resampling method (%s) - default 'average'" % ",".join(resampling_list)) - p.add_option('-s', '--s_srs', dest="s_srs", metavar="SRS", - help="The spatial reference system used for the source input data") - p.add_option('-z', '--zoom', dest="zoom", - help="Zoom levels to render (format:'2-5' or '10').") - p.add_option('-e', '--resume', dest="resume", action="store_true", - help="Resume mode. Generate only missing files.") - p.add_option('-a', '--srcnodata', dest="srcnodata", metavar="NODATA", - help="NODATA transparency value to assign to the input data") - p.add_option('-d', '--tmscompatible', dest="tmscompatible", action="store_true", - help=("When using the geodetic profile, specifies the base resolution " - "as 0.703125 or 2 tiles at zoom level 0.")) - p.add_option("-v", "--verbose", - action="store_true", dest="verbose", - help="Print status messages to stdout") - p.add_option("-q", "--quiet", - action="store_true", dest="quiet", - help="Disable messages and status to stdout") - # MMGIS - p.add_option("-x", "--extentworld", dest="extentworld", - help="The full world meter extent (comma-separated as minx,maxx,miny,maxy,pixelsize) of an inner raster profile.") - # 1bto4b - p.add_option("-m", "--dem", action="store_true", dest="isDEMtile", - help="Indicate if the input is a Digital Elevation Model") - # KML options - g = OptionGroup(p, "KML (Google Earth) options", - "Options for generated Google Earth SuperOverlay metadata") - g.add_option("-k", "--force-kml", dest='kml', action="store_true", - help=("Generate KML for Google Earth - default for 'geodetic' profile and " - "'raster' in EPSG:4326. For a dataset with different projection use " - "with caution!")) - g.add_option("-n", "--no-kml", dest='kml', action="store_false", - help="Avoid automatic generation of KML files for EPSG:4326") - g.add_option("-u", "--url", dest='url', - help="URL address where the generated tiles are going to be published") - p.add_option_group(g) - - # HTML options - g = OptionGroup(p, "Web viewer options", - "Options for generated HTML viewers a la Google Maps") - g.add_option("-w", "--webviewer", dest='webviewer', type='choice', choices=webviewer_list, - help="Web viewer to generate (%s) - default 'all'" % ",".join(webviewer_list)) - g.add_option("-t", "--title", dest='title', - help="Title of the map") - g.add_option("-c", "--copyright", dest='copyright', - help="Copyright for the map") - g.add_option("-g", "--googlekey", dest='googlekey', - help="Google Maps API key from http://code.google.com/apis/maps/signup.html") - g.add_option("-b", "--bingkey", dest='bingkey', - help="Bing Maps API key from https://www.bingmapsportal.com/") - p.add_option_group(g) - - p.set_defaults(verbose=False, profile="mercator", kml=False, url='', - webviewer='all', copyright='', resampling='average', resume=False, - googlekey='INSERT_YOUR_KEY_HERE', bingkey='INSERT_YOUR_KEY_HERE') - - self.parser = p + elif options.resampling == "bilinear": + gdal_resampling = gdal.GRA_Bilinear - # ------------------------------------------------------------------------- - def open_input(self): - """Initialization of the input raster, reprojection if necessary""" - gdal.AllRegister() + elif options.resampling == "cubic": + gdal_resampling = gdal.GRA_Cubic - self.out_drv = gdal.GetDriverByName(self.tiledriver) - self.mem_drv = gdal.GetDriverByName('MEM') + elif options.resampling == "cubicspline": + gdal_resampling = gdal.GRA_CubicSpline - if not self.out_drv: - raise Exception("The '%s' driver was not found, is it available in this GDAL build?", - self.tiledriver) - if not self.mem_drv: - raise Exception( - "The 'MEM' driver was not found, is it available in this GDAL build?") + elif options.resampling == "lanczos": + gdal_resampling = gdal.GRA_Lanczos - # Open the input file + elif options.resampling == "mode": + gdal_resampling = gdal.GRA_Mode - if self.input: - self.in_ds = gdal.Open(self.input, gdal.GA_ReadOnly) - else: - raise Exception("No input file was specified") + elif options.resampling == "max": + gdal_resampling = gdal.GRA_Max - if self.options.verbose: - print("Input file:", - "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, - self.in_ds.RasterCount)) + elif options.resampling == "min": + gdal_resampling = gdal.GRA_Min - if not self.in_ds: - # Note: GDAL prints the ERROR message too - self.error( - "It is not possible to open the input file '%s'." % self.input) + elif options.resampling == "med": + gdal_resampling = gdal.GRA_Med - # Read metadata from the input file - if self.in_ds.RasterCount == 0: - self.error("Input file '%s' has no raster band" % self.input) - - if self.in_ds.GetRasterBand(1).GetRasterColorTable(): - self.error("Please convert this file to RGB/RGBA and run gdal2tiles on the result.", - "From paletted file you can create RGBA file (temp.vrt) by:\n" - "gdal_translate -of vrt -expand rgba %s temp.vrt\n" - "then run:\n" - "gdal2tiles temp.vrt" % self.input) - - # Get NODATA value - in_nodata = [] - for i in range(1, self.in_ds.RasterCount+1): - if self.in_ds.GetRasterBand(i).GetNoDataValue() is not None: - in_nodata.append(self.in_ds.GetRasterBand(i).GetNoDataValue()) - if self.options.srcnodata: - nds = list(map(float, self.options.srcnodata.split(','))) - if len(nds) < self.in_ds.RasterCount: - in_nodata = ( - nds * self.in_ds.RasterCount)[:self.in_ds.RasterCount] - else: - in_nodata = nds + elif options.resampling == "q1": + gdal_resampling = gdal.GRA_Q1 - if self.options.verbose: - print("NODATA: %s" % in_nodata) + elif options.resampling == "q3": + gdal_resampling = gdal.GRA_Q3 - if self.options.verbose: - print("Preprocessed file:", - "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, - self.in_ds.RasterCount)) + # Other algorithms are implemented by gdal.ReprojectImage(). + dsquery.SetGeoTransform( + ( + 0.0, + tile_size / float(querysize), + 0.0, + 0.0, + 0.0, + tile_size / float(querysize), + ) + ) + dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) - in_srs = None + res = gdal.ReprojectImage(dsquery, dstile, None, None, gdal_resampling) + if res != 0: + exit_with_error( + "ReprojectImage() failed on %s, error %d" % (tilefilename, res) + ) - if self.options.s_srs: - in_srs = osr.SpatialReference() - in_srs.SetFromUserInput(self.options.s_srs) - in_srs_wkt = in_srs.ExportToWkt() - else: - in_srs_wkt = self.in_ds.GetProjection() - if not in_srs_wkt and self.in_ds.GetGCPCount() != 0: - in_srs_wkt = self.in_ds.GetGCPProjection() - if in_srs_wkt: - in_srs = osr.SpatialReference() - in_srs.ImportFromWkt(in_srs_wkt) - - self.out_srs = osr.SpatialReference() - - if self.options.profile == 'mercator': - self.out_srs.ImportFromEPSG(3857) - elif self.options.profile == 'geodetic': - self.out_srs.ImportFromEPSG(4326) + +def setup_no_data_values(input_dataset: gdal.Dataset, options: Options) -> List[float]: + """ + Extract the NODATA values from the dataset or use the passed arguments as override if any + """ + in_nodata = [] + if options.srcnodata: + nds = list(map(float, options.srcnodata.split(","))) + if len(nds) < input_dataset.RasterCount: + in_nodata = ( + nds * input_dataset.RasterCount)[: input_dataset.RasterCount] else: - self.out_srs = in_srs + in_nodata = nds + else: + for i in range(1, input_dataset.RasterCount + 1): + band = input_dataset.GetRasterBand(i) + raster_no_data = band.GetNoDataValue() + if raster_no_data is not None: + # Ignore nodata values that are not in the range of the band data type (see https://github.com/OSGeo/gdal/pull/2299) + if band.DataType == gdal.GDT_Byte and ( + raster_no_data != int(raster_no_data) + or raster_no_data < 0 + or raster_no_data > 255 + ): + # We should possibly do similar check for other data types + in_nodata = [] + break + in_nodata.append(raster_no_data) - # Are the reference systems the same? Reproject if necessary. + if options.verbose: + print("NODATA: %s" % in_nodata) - self.out_ds = None + return in_nodata - if self.options.profile in ('mercator', 'geodetic'): - if ((self.in_ds.GetGeoTransform() == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) and - (self.in_ds.GetGCPCount() == 0)): - self.error("There is no georeference - neither affine transformation (worldfile) " - "nor GCPs. You can generate only 'raster' profile tiles.", - "Either gdal2tiles with parameter -p 'raster' or use another GIS " - "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs") +def setup_input_srs( + input_dataset: gdal.Dataset, options: Options +) -> Tuple[Optional[osr.SpatialReference], Optional[str]]: + """ + Determines and returns the Input Spatial Reference System (SRS) as an osr object and as a + WKT representation - if in_srs: - if ((in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or - (self.in_ds.GetGCPCount() != 0)): - # Generation of VRT dataset in tile projection, - # default 'nearest neighbour' warping - self.out_ds = gdal.AutoCreateWarpedVRT( - self.in_ds, in_srs_wkt, self.out_srs.ExportToWkt()) + Uses in priority the one passed in the command line arguments. If None, tries to extract them + from the input dataset + """ - if self.options.verbose: - print("Warping of the raster by AutoCreateWarpedVRT " - "(result saved into 'tiles.vrt')") - self.out_ds.GetDriver().CreateCopy("tiles.vrt", self.out_ds) - - # Correction of AutoCreateWarpedVRT for NODATA values - if in_nodata != []: - tempfilename = self.gettempfilename('-gdal2tiles.vrt') - self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) - # open as a text file - s = open(tempfilename).read() - # Add the warping options - s = s.replace( - "", - """ - - - - """) - # replace BandMapping tag for NODATA bands.... - for i in range(len(in_nodata)): - s = s.replace( - '' % ( - (i+1), (i+1)), - """ - - %i - 0 - %i - 0 - - """ % ((i+1), (i+1), in_nodata[i], in_nodata[i])) - # save the corrected VRT - open(tempfilename, "w").write(s) - # open by GDAL as self.out_ds - self.out_ds = gdal.Open(tempfilename) - # delete the temporary file - os.unlink(tempfilename) - - # set NODATA_VALUE metadata - self.out_ds.SetMetadataItem( - 'NODATA_VALUES', ' '.join([str(i) for i in in_nodata])) - - if self.options.verbose: - print("Modified warping result saved into 'tiles1.vrt'") - open("tiles1.vrt", "w").write(s) - - # Correction of AutoCreateWarpedVRT for Mono (1 band) and RGB (3 bands) files - # without NODATA: - # equivalent of gdalwarp -dstalpha - if in_nodata == [] and self.out_ds.RasterCount in [1, 3]: - tempfilename = self.gettempfilename('-gdal2tiles.vrt') - self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) - # open as a text file - s = open(tempfilename).read() - # Add the warping options - s = s.replace( - "", - """ - - Alpha - - - """ % (self.out_ds.RasterCount + 1)) - s = s.replace( - "", - """ - %i - - """ % (self.out_ds.RasterCount + 1)) - s = s.replace( - "", - """ - - - """) - # save the corrected VRT - open(tempfilename, "w").write(s) - # open by GDAL as self.out_ds - self.out_ds = gdal.Open(tempfilename) - # delete the temporary file - os.unlink(tempfilename) - - if self.options.verbose: - print( - "Modified -dstalpha warping result saved into 'tiles1.vrt'") - open("tiles1.vrt", "w").write(s) - s = ''' - ''' + input_srs = None + input_srs_wkt = None - else: - self.error("Input file has unknown SRS.", - "Use --s_srs ESPG:xyz (or similar) to provide source reference system.") - - if self.out_ds and self.options.verbose: - print("Projected file:", "tiles.vrt", "( %sP x %sL - %s bands)" % ( - self.out_ds.RasterXSize, self.out_ds.RasterYSize, self.out_ds.RasterCount)) - - if not self.out_ds: - self.out_ds = self.in_ds - - # - # Here we should have a raster (out_ds) in the correct Spatial Reference system - # - - # Get alpha band (either directly or from NODATA value) - self.alphaband = self.out_ds.GetRasterBand(1).GetMaskBand() - if ((self.alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or - self.out_ds.RasterCount == 4 or - self.out_ds.RasterCount == 2): - self.dataBandsCount = self.out_ds.RasterCount - 1 - else: - self.dataBandsCount = self.out_ds.RasterCount + if options.s_srs: + input_srs = osr.SpatialReference() + input_srs.SetFromUserInput(options.s_srs) + input_srs_wkt = input_srs.ExportToWkt() + else: + input_srs_wkt = input_dataset.GetProjection() + if not input_srs_wkt and input_dataset.GetGCPCount() != 0: + input_srs_wkt = input_dataset.GetGCPProjection() + if input_srs_wkt: + input_srs = osr.SpatialReference() + input_srs.ImportFromWkt(input_srs_wkt) - # KML test - isepsg4326 = False - srs4326 = osr.SpatialReference() - srs4326.ImportFromEPSG(4326) - if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4(): - self.kml = True - isepsg4326 = True - if self.options.verbose: - print("KML autotest OK!") + if input_srs is not None: + input_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) - # Read the georeference - self.out_gt = self.out_ds.GetGeoTransform() + return input_srs, input_srs_wkt - # Test the size of the pixel - # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile) - if (self.out_gt[2], self.out_gt[4]) != (0, 0): - self.error("Georeference of the raster contains rotation or skew. " - "Such raster is not supported. Please use gdalwarp first.") +def setup_output_srs( + input_srs: Optional[osr.SpatialReference], options: Options +) -> Optional[osr.SpatialReference]: + """ + Setup the desired SRS (based on options) + """ + output_srs = osr.SpatialReference() + + if options.profile == "mercator": + output_srs.ImportFromEPSG(3857) + elif options.profile == "geodetic": + output_srs.ImportFromEPSG(4326) + elif options.profile == "raster": + output_srs = input_srs + else: + output_srs = tmsMap[options.profile].srs.Clone() - # Here we expect: pixel is square, no rotation on the raster + if output_srs: + output_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) - # Output Bounds - coordinates in the output SRS - self.ominx = self.out_gt[0] - self.omaxx = self.out_gt[0] + self.out_ds.RasterXSize * self.out_gt[1] - self.omaxy = self.out_gt[3] - self.ominy = self.out_gt[3] - self.out_ds.RasterYSize * self.out_gt[1] + return output_srs - # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15 - # MMGIS - def linearScale(domain, rang, value): - return ( - ((rang[1] - rang[0]) * (value - domain[0])) / - (domain[1] - domain[0]) + - rang[0] - ) - # MMGIS - self.out_ds.fRasterXSize = self.out_ds.RasterXSize - self.out_ds.fRasterYSize = self.out_ds.RasterYSize - self.out_ds.fRasterXOrigin = 0 - self.out_ds.fRasterYOrigin = 0 - self.out_ds.PixelSize = self.out_gt[1] - self.out_ds.fPixelSize = self.fPixelSize - # print("ominx", self.ominx, "omaxx", self.omaxx, "ominy", self.ominy, "omaxy", self.omaxy) - # print("fminx", self.fminx, "fmaxx", self.fmaxx, "fminy", self.fminy, "fmaxy", self.fmaxy) - if self.isRasterBounded: - self.out_ds.fRasterXSize = int(math.floor(self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / ( - self.omaxx - self.ominx) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) - self.out_ds.fRasterYSize = int(math.ceil(self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / ( - self.omaxy - self.ominy) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) - self.out_ds.fRasterXSizeRaw = int(math.floor( - self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / (self.omaxx - self.ominx))) - self.out_ds.fRasterYSizeRaw = int(math.ceil( - self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / (self.omaxy - self.ominy))) - # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize ) - self.out_ds.fRasterXOrigin = int(math.floor(linearScale( - [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.out_gt[0]))) - self.out_ds.fRasterYOrigin = int(math.ceil(linearScale( - [self.fminy, self.fmaxy], [self.out_ds.fRasterYSize, 0], self.out_gt[3]))) - self.out_ds.fRasterXOriginRaw = int(math.floor(linearScale([self.fminx, self.fmaxx], [ - 0, self.out_ds.fRasterXSize], self.out_gt[0]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) - self.out_ds.fRasterYOriginRaw = int(math.ceil(linearScale([self.fminy, self.fmaxy], [ - self.out_ds.fRasterYSize, 0], self.out_gt[3]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) - self.out_ds.fRasterXWidth = int(math.floor(linearScale( - [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.omaxx))) - self.out_ds.fRasterXOrigin - self.out_ds.fRasterYHeight = int(math.ceil(linearScale( - [self.fminy, self.fmaxy], [0, self.out_ds.fRasterYSize], self.omaxy))) - self.out_ds.fRasterYOrigin +def has_georeference(dataset: gdal.Dataset) -> bool: + return ( + dataset.GetGeoTransform() != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0) + or dataset.GetGCPCount() != 0 + ) - if self.options.verbose: - print("Bounds (output srs):", round(self.ominx, 13), - self.ominy, self.omaxx, self.omaxy) - # print("Input Raster Size: ", self.out_ds.RasterXSize, self.out_ds.RasterYSize) - # print("fmaxx-fminx", self.fmaxx - self.fminx, "omaxx-ominx", self.omaxx - self.ominx, "fmaxy-fminy", self.fmaxy - self.fminy, "omaxy-ominy", self.omaxy - self.ominy) - # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize) - # print("Full Raster Size Raw: ", self.out_ds.fRasterXSizeRaw, self.out_ds.fRasterYSizeRaw) - # print("Raster Origin: ", self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin) - # print("Raster Origin Raw: ", self.out_ds.fRasterXOriginRaw, self.out_ds.fRasterYOriginRaw) - # print("Raster Width Height: ", self.out_ds.fRasterXWidth, self.out_ds.fRasterYHeight) +def reproject_dataset( + from_dataset: gdal.Dataset, + from_srs: Optional[osr.SpatialReference], + to_srs: Optional[osr.SpatialReference], + options: Optional[Options] = None, +) -> gdal.Dataset: + """ + Returns the input dataset in the expected "destination" SRS. + If the dataset is already in the correct SRS, returns it unmodified + """ + if not from_srs or not to_srs: + raise GDALError( + "from and to SRS must be defined to reproject the dataset") + + if (from_srs.ExportToProj4() != to_srs.ExportToProj4()) or ( + from_dataset.GetGCPCount() != 0 + ): + + if ( + from_srs.IsGeographic() + and to_srs.GetAuthorityName(None) == "EPSG" + and to_srs.GetAuthorityCode(None) == "3857" + ): + from_gt = from_dataset.GetGeoTransform(can_return_null=True) + if from_gt and from_gt[2] == 0 and from_gt[4] == 0 and from_gt[5] < 0: + maxlat = from_gt[3] + minlat = from_gt[3] + from_dataset.RasterYSize * from_gt[5] + MAX_LAT = 85.0511287798066 + adjustBounds = False + if maxlat > MAX_LAT: + maxlat = MAX_LAT + adjustBounds = True + if minlat < -MAX_LAT: + minlat = -MAX_LAT + adjustBounds = True + if adjustBounds: + ct = osr.CoordinateTransformation(from_srs, to_srs) + west, south = ct.TransformPoint(from_gt[0], minlat)[:2] + east, north = ct.TransformPoint( + from_gt[0] + from_dataset.RasterXSize * + from_gt[1], maxlat + )[:2] + return gdal.Warp( + "", + from_dataset, + format="VRT", + outputBounds=[west, south, east, north], + dstSRS="EPSG:3857", + ) + + to_dataset = gdal.AutoCreateWarpedVRT( + from_dataset, from_srs.ExportToWkt(), to_srs.ExportToWkt() + ) - # Calculating ranges for tiles in different zoom levels - if self.options.profile == 'mercator': + if options and options.verbose: + print( + "Warping of the raster by AutoCreateWarpedVRT (result saved into 'tiles.vrt')" + ) + to_dataset.GetDriver().CreateCopy("tiles.vrt", to_dataset) - self.mercator = GlobalMercator() + return to_dataset + else: + return from_dataset - # Function which generates SWNE in LatLong for given tile - self.tileswne = self.mercator.TileLatLonBounds - # Generate table with min max tile coordinates for all zoomlevels - self.tminmax = list(range(0, 32)) - for tz in range(0, 32): - tminx, tminy = self.mercator.MetersToTile( - self.ominx, self.ominy, tz) - tmaxx, tmaxy = self.mercator.MetersToTile( - self.omaxx, self.omaxy, tz) - # crop tiles extending world limits (+-180,+-90) - tminx, tminy = max(0, tminx), max(0, tminy) - tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy) - self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) +def add_gdal_warp_options_to_string(vrt_string, warp_options): + if not warp_options: + return vrt_string - # TODO: Maps crossing 180E (Alaska?) + vrt_root = ElementTree.fromstring(vrt_string) + options = vrt_root.find("GDALWarpOptions") - # Get the minimal zoom level (map covers area equivalent to one tile) - if self.tminz is None: - self.tminz = self.mercator.ZoomForPixelSize( - self.out_gt[1] * max(self.out_ds.RasterXSize, - self.out_ds.RasterYSize) / float(self.tilesize)) + if options is None: + return vrt_string - # Get the maximal zoom level - # (closest possible zoom level up on the resolution of raster) - if self.tmaxz is None: - self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1]) + for key, value in warp_options.items(): + tb = ElementTree.TreeBuilder() + tb.start("Option", {"name": key}) + tb.data(value) + tb.end("Option") + elem = tb.close() + options.insert(0, elem) - if self.options.verbose: - print("Bounds (latlong):", - self.mercator.MetersToLatLon(self.ominx, self.ominy), - self.mercator.MetersToLatLon(self.omaxx, self.omaxy)) - print('MinZoomLevel:', self.tminz) - print("MaxZoomLevel:", - self.tmaxz, - "(", - self.mercator.Resolution(self.tmaxz), - ")") + return ElementTree.tostring(vrt_root).decode() - if self.options.profile == 'geodetic': - self.geodetic = GlobalGeodetic(self.options.tmscompatible) +def update_no_data_values( + warped_vrt_dataset: gdal.Dataset, + nodata_values: List[float], + options: Optional[Options] = None, +) -> gdal.Dataset: + """ + Takes an array of NODATA values and forces them on the WarpedVRT file dataset passed + """ + # TODO: gbataille - Seems that I forgot tests there + assert nodata_values != [] + + vrt_string = warped_vrt_dataset.GetMetadata("xml:VRT")[0] + + vrt_string = add_gdal_warp_options_to_string( + vrt_string, {"INIT_DEST": "NO_DATA", "UNIFIED_SRC_NODATA": "YES"} + ) + + # TODO: gbataille - check the need for this replacement. Seems to work without + # # replace BandMapping tag for NODATA bands.... + # for i in range(len(nodata_values)): + # s = s.replace( + # '' % ((i+1), (i+1)), + # """ + # + # %i + # 0 + # %i + # 0 + # + # """ % ((i+1), (i+1), nodata_values[i], nodata_values[i])) + + corrected_dataset = gdal.Open(vrt_string) + + # set NODATA_VALUE metadata + corrected_dataset.SetMetadataItem( + "NODATA_VALUES", " ".join([str(i) for i in nodata_values]) + ) + + if options and options.verbose: + print("Modified warping result saved into 'tiles1.vrt'") + + with open("tiles1.vrt", "w") as f: + f.write(corrected_dataset.GetMetadata("xml:VRT")[0]) + + return corrected_dataset + + +def add_alpha_band_to_string_vrt(vrt_string: str) -> str: + # TODO: gbataille - Old code speak of this being equivalent to gdalwarp -dstalpha + # To be checked + + vrt_root = ElementTree.fromstring(vrt_string) + + index = 0 + nb_bands = 0 + for subelem in list(vrt_root): + if subelem.tag == "VRTRasterBand": + nb_bands += 1 + color_node = subelem.find("./ColorInterp") + if color_node is not None and color_node.text == "Alpha": + raise Exception("Alpha band already present") + else: + if nb_bands: + # This means that we are one element after the Band definitions + break + + index += 1 + + tb = ElementTree.TreeBuilder() + tb.start( + "VRTRasterBand", + { + "dataType": "Byte", + "band": str(nb_bands + 1), + "subClass": "VRTWarpedRasterBand", + }, + ) + tb.start("ColorInterp", {}) + tb.data("Alpha") + tb.end("ColorInterp") + tb.end("VRTRasterBand") + elem = tb.close() + + vrt_root.insert(index, elem) + + warp_options = vrt_root.find(".//GDALWarpOptions") + tb = ElementTree.TreeBuilder() + tb.start("DstAlphaBand", {}) + tb.data(str(nb_bands + 1)) + tb.end("DstAlphaBand") + elem = tb.close() + warp_options.append(elem) + + # TODO: gbataille - this is a GDALWarpOptions. Why put it in a specific place? + tb = ElementTree.TreeBuilder() + tb.start("Option", {"name": "INIT_DEST"}) + tb.data("0") + tb.end("Option") + elem = tb.close() + warp_options.append(elem) + + return ElementTree.tostring(vrt_root).decode() + + +def update_alpha_value_for_non_alpha_inputs( + warped_vrt_dataset: gdal.Dataset, options: Optional[Options] = None +) -> gdal.Dataset: + """ + Handles dataset with 1 or 3 bands, i.e. without alpha channel, in the case the nodata value has + not been forced by options + """ + if warped_vrt_dataset.RasterCount in [1, 3]: - # Function which generates SWNE in LatLong for given tile - self.tileswne = self.geodetic.TileLatLonBounds + vrt_string = warped_vrt_dataset.GetMetadata("xml:VRT")[0] - # Generate table with min max tile coordinates for all zoomlevels - self.tminmax = list(range(0, 32)) - for tz in range(0, 32): - tminx, tminy = self.geodetic.LonLatToTile( - self.ominx, self.ominy, tz) - tmaxx, tmaxy = self.geodetic.LonLatToTile( - self.omaxx, self.omaxy, tz) - # crop tiles extending world limits (+-180,+-90) - tminx, tminy = max(0, tminx), max(0, tminy) - tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy) - self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + vrt_string = add_alpha_band_to_string_vrt(vrt_string) - # TODO: Maps crossing 180E (Alaska?) + warped_vrt_dataset = gdal.Open(vrt_string) - # Get the maximal zoom level - # (closest possible zoom level up on the resolution of raster) - if self.tminz is None: - self.tminz = self.geodetic.ZoomForPixelSize( - self.out_gt[1] * max(self.out_ds.RasterXSize, - self.out_ds.RasterYSize) / float(self.tilesize)) + if options and options.verbose: + print("Modified -dstalpha warping result saved into 'tiles1.vrt'") - # Get the maximal zoom level - # (closest possible zoom level up on the resolution of raster) - if self.tmaxz is None: - self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1]) + with open("tiles1.vrt", "w") as f: + f.write(warped_vrt_dataset.GetMetadata("xml:VRT")[0]) - if self.options.verbose: - print("Bounds (latlong):", self.ominx, - self.ominy, self.omaxx, self.omaxy) + return warped_vrt_dataset - # MMGIS - if self.options.profile == 'raster' and self.isRasterBounded: - def log2(x): - return math.log10(x) / math.log10(2) +def nb_data_bands(dataset: gdal.Dataset) -> int: + """ + Return the number of data (non-alpha) bands of a gdal dataset + """ + alphaband = dataset.GetRasterBand(1).GetMaskBand() + if ( + (alphaband.GetMaskFlags() & gdal.GMF_ALPHA) + or dataset.RasterCount == 4 + or dataset.RasterCount == 2 + ): + return dataset.RasterCount - 1 + return dataset.RasterCount + + +def _get_creation_options(options): + copts = [] + if options.tiledriver == "WEBP": + if options.webp_lossless: + copts = ["LOSSLESS=True"] + else: + copts = ["QUALITY=" + str(options.webp_quality)] + return copts - # MMGIS added 'f'* - self.nativezoom = int( - max(math.ceil(log2(self.out_ds.fRasterXSizeRaw/float(self.tilesize))), - math.ceil(log2(self.out_ds.fRasterYSizeRaw/float(self.tilesize))))) - self.basenativezoom = int( - max(math.ceil(log2(self.out_ds.fRasterXSize/float(self.tilesize))), - math.ceil(log2(self.out_ds.fRasterYSize/float(self.tilesize))))) +def create_base_tile(tile_job_info: "TileJobInfo", tile_detail: "TileDetail") -> None: + + dataBandsCount = tile_job_info.nb_data_bands + output = tile_job_info.output_file_path + tileext = tile_job_info.tile_extension + tile_size = tile_job_info.tile_size + options = tile_job_info.options + + cached_ds = getattr(threadLocal, "cached_ds", None) + if cached_ds and cached_ds.GetDescription() == tile_job_info.src_file: + ds = cached_ds + else: + ds = gdal.Open(tile_job_info.src_file, gdal.GA_ReadOnly) + threadLocal.cached_ds = ds + + mem_drv = gdal.GetDriverByName("MEM") + out_drv = gdal.GetDriverByName(tile_job_info.tile_driver) + alphaband = ds.GetRasterBand(1).GetMaskBand() + + tx = tile_detail.tx + ty = tile_detail.ty + tz = tile_detail.tz + rx = tile_detail.rx + ry = tile_detail.ry + rxsize = tile_detail.rxsize + rysize = tile_detail.rysize + wx = tile_detail.wx + wy = tile_detail.wy + wxsize = tile_detail.wxsize + wysize = tile_detail.wysize + querysize = tile_detail.querysize + + # MMGIS + isDEMtile = tile_detail.isDEMtile + + tilebands = dataBandsCount + 1 + + # MMGIS + if isDEMtile == True: + tilebands = 4 + + # Tile dataset in memory + tilefilename = os.path.join(output, str( + tz), str(tx), "%s.%s" % (ty, tileext)) + dstile = mem_drv.Create("", tile_size, tile_size, tilebands) + + data = alpha = None + + if options.verbose: + print( + "\tReadRaster Extent: ", (rx, ry, rxsize, + rysize), (wx, wy, wxsize, wysize) + ) + + # Query is in 'nearest neighbour' but can be bigger in then the tile_size + # We scale down the query to the tile_size by supplied algorithm. + if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0: + try: + alpha = alphaband.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize) + + # Detect totally transparent tile and skip its creation + if tile_job_info.exclude_transparent and len(alpha) == alpha.count( + "\x00".encode("ascii") + ): + return + + data = ds.ReadRaster( + rx, + ry, + rxsize, + rysize, + wxsize, + wysize, + band_list=list(range(1, dataBandsCount + 1)), + ) + except: + pass + + # The tile in memory is a transparent file by default. Write pixel values into it if + # any + if data: + # MMGIS + if isDEMtile: + dsquery = mem_drv.Create( + '', querysize, querysize, tilebands, gdal.GDT_Byte) # 1bto4b + + data = ds.GetRasterBand(1).ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) + + data = struct.unpack('f' * wxsize * wysize, data) + data1 = [] + data2 = [] + data3 = [] + data4 = [] + for f in data: + # Because 0 is a valid value in many datasets yet still special in images being fully transparent, + # we're going to encode zero's as 2^31 (2147483648) (79, 0, 0, 0) and have the reader parse it back to 0 + if f == 0: + f = pow(2, 31) + f = str(binary(f)) + data1.append(int(f[:8], 2)) + data2.append(int(f[8:16], 2)) + data3.append(int(f[16:24], 2)) + data4.append(int(f[24:], 2)) + + data1s = b'' + data2s = b'' + data3s = b'' + data4s = b'' + indx = 0 + for v in data1: + data1s += struct.pack('B', data1[indx]) + data2s += struct.pack('B', data2[indx]) + data3s += struct.pack('B', data3[indx]) + data4s += struct.pack('B', data4[indx]) + indx += 1 + + dsquery.WriteRaster( + wx, wy, wxsize, wysize, data1s, band_list=[1], buf_type=gdal.GDT_Byte) + dsquery.WriteRaster( + wx, wy, wxsize, wysize, data2s, band_list=[2], buf_type=gdal.GDT_Byte) + dsquery.WriteRaster( + wx, wy, wxsize, wysize, data3s, band_list=[3], buf_type=gdal.GDT_Byte) + dsquery.WriteRaster( + wx, wy, wxsize, wysize, data4s, band_list=[4], buf_type=gdal.GDT_Byte) + # sys.exit('done') + + scale_query_to_tile( + dsquery, dstile, options, tilefilename=tilefilename) + del dsquery + elif tile_size == querysize: + # Use the ReadRaster result directly in tiles ('nearest neighbour' query) + dstile.WriteRaster( + wx, + wy, + wxsize, + wysize, + data, + band_list=list(range(1, dataBandsCount + 1)), + ) + dstile.WriteRaster(wx, wy, wxsize, wysize, + alpha, band_list=[tilebands]) + + # Note: For source drivers based on WaveLet compression (JPEG2000, ECW, + # MrSID) the ReadRaster function returns high-quality raster (not ugly + # nearest neighbour) + # TODO: Use directly 'near' for WaveLet files + else: + # Big ReadRaster query in memory scaled to the tile_size - all but 'near' + # algo + dsquery = mem_drv.Create("", querysize, querysize, tilebands) + # TODO: fill the null value in case a tile without alpha is produced (now + # only png tiles are supported) + dsquery.WriteRaster( + wx, + wy, + wxsize, + wysize, + data, + band_list=list(range(1, dataBandsCount + 1)), + ) + dsquery.WriteRaster(wx, wy, wxsize, wysize, + alpha, band_list=[tilebands]) + + scale_query_to_tile(dsquery, dstile, options, + tilefilename=tilefilename) + del dsquery + + del data + + if options.resampling != "antialias" and options.resampling != "near-composite": + # Write a copy of tile to png/jpg + out_drv.CreateCopy( + tilefilename, dstile, strict=0, options=_get_creation_options(options) + ) + + del dstile + + # Create a KML file for this tile. + if tile_job_info.kml: + swne = get_tile_swne(tile_job_info, options) + if swne is not None: + kmlfilename = os.path.join( + output, + str(tz), + str(tx), + "%d.kml" % GDAL2Tiles.getYTile(ty, tz, options), + ) + if not options.resume or not isfile(kmlfilename): + with my_open(kmlfilename, "wb") as f: + f.write( + generate_kml( + tx, + ty, + tz, + tile_job_info.tile_extension, + tile_job_info.tile_size, + swne, + tile_job_info.options, + ).encode("utf-8") + ) + + +def create_overview_tile( + base_tz: int, + base_tiles: List[Tuple[int, int]], + output_folder: str, + tile_job_info: "TileJobInfo", + options: Options, +): + """Generating an overview tile from no more than 4 underlying tiles(base tiles)""" + overview_tz = base_tz - 1 + overview_tx = base_tiles[0][0] >> 1 + overview_ty = base_tiles[0][1] >> 1 + overview_ty_real = GDAL2Tiles.getYTile(overview_ty, overview_tz, options) + + tilefilename = os.path.join( + output_folder, + str(overview_tz), + str(overview_tx), + "%s.%s" % (overview_ty_real, tile_job_info.tile_extension), + ) + if options.verbose: + print(tilefilename) + if options.resume and isfile(tilefilename): + if options.verbose: + print("Tile generation skipped because of --resume") + return + + mem_driver = gdal.GetDriverByName("MEM") + tile_driver = tile_job_info.tile_driver + out_driver = gdal.GetDriverByName(tile_driver) + + tilebands = tile_job_info.nb_data_bands + 1 + + # MMGIS + if options.isDEMtile == True: + tilebands = 4 + + dsquery = mem_driver.Create( + "", 2 * tile_job_info.tile_size, 2 * tile_job_info.tile_size, tilebands + ) + # TODO: fill the null value + dstile = mem_driver.Create( + "", tile_job_info.tile_size, tile_job_info.tile_size, tilebands + ) + + usable_base_tiles = [] + + for base_tile in base_tiles: + base_tx = base_tile[0] + base_ty = base_tile[1] + base_ty_real = GDAL2Tiles.getYTile(base_ty, base_tz, options) + + base_tile_path = os.path.join( + output_folder, + str(base_tz), + str(base_tx), + "%s.%s" % (base_ty_real, tile_job_info.tile_extension), + ) + if not isfile(base_tile_path): + if options.verbose: + print("\tNo usable base tiles at path", base_tile_path) + continue + + dsquerytile = gdal.Open(base_tile_path, gdal.GA_ReadOnly) + + if base_tx % 2 == 0: + tileposx = 0 + else: + tileposx = tile_job_info.tile_size + + if options.xyz and options.profile == "raster": + if base_ty % 2 == 0: + tileposy = 0 + else: + tileposy = tile_job_info.tile_size + else: + if base_ty % 2 == 0: + tileposy = tile_job_info.tile_size + else: + tileposy = 0 + + if dsquerytile.RasterCount == tilebands - 1: + # assume that the alpha band is missing and add it + tmp_ds = mem_driver.CreateCopy("", dsquerytile, 0) + tmp_ds.AddBand() + mask = bytearray( + [255] * (tile_job_info.tile_size * tile_job_info.tile_size) + ) + tmp_ds.WriteRaster( + 0, + 0, + tile_job_info.tile_size, + tile_job_info.tile_size, + mask, + band_list=[tilebands], + ) + dsquerytile = tmp_ds + elif dsquerytile.RasterCount != tilebands: + raise Exception("Unexpected number of bands in base tile") + + base_data = dsquerytile.ReadRaster( + 0, 0, tile_job_info.tile_size, tile_job_info.tile_size + ) + + dsquery.WriteRaster( + tileposx, + tileposy, + tile_job_info.tile_size, + tile_job_info.tile_size, + base_data, + band_list=list(range(1, tilebands + 1)), + ) + + usable_base_tiles.append(base_tile) + + if not usable_base_tiles: + if options.verbose: + print("\tNo usable base tiles for overview zoom", base_tz, ", base tiles:", *base_tiles) + return + + scale_query_to_tile(dsquery, dstile, options, tilefilename=tilefilename) + # Write a copy of tile to png/jpg + if options.resampling != "antialias" and options.resampling != "near-composite": + # Write a copy of tile to png/jpg + out_driver.CreateCopy( + tilefilename, dstile, strict=0, options=_get_creation_options(options) + ) + # Remove useless side car file + aux_xml = tilefilename + ".aux.xml" + if gdal.VSIStatL(aux_xml) is not None: + gdal.Unlink(aux_xml) + + if options.verbose: + print("\tbuild from zoom", base_tz, " tiles:", *base_tiles) + + # Create a KML file for this tile. + if tile_job_info.kml: + swne = get_tile_swne(tile_job_info, options) + if swne is not None: + with my_open( + os.path.join( + output_folder, + "%d/%d/%d.kml" % (overview_tz, overview_tx, + overview_ty_real), + ), + "wb", + ) as f: + f.write( + generate_kml( + overview_tx, + overview_ty, + overview_tz, + tile_job_info.tile_extension, + tile_job_info.tile_size, + swne, + options, + [(t[0], t[1], base_tz) for t in base_tiles], + ).encode("utf-8") + ) + + +def group_overview_base_tiles( + base_tz: int, output_folder: str, tile_job_info: "TileJobInfo" +) -> List[List[Tuple[int, int]]]: + """Group base tiles that belong to the same overview tile""" + + overview_to_bases = {} + tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[base_tz] + for ty in range(tmaxy, tminy - 1, -1): + overview_ty = ty >> 1 + for tx in range(tminx, tmaxx + 1): + overview_tx = tx >> 1 + base_tile = (tx, ty) + overview_tile = (overview_tx, overview_ty) + + if overview_tile not in overview_to_bases: + overview_to_bases[overview_tile] = [] + + overview_to_bases[overview_tile].append(base_tile) + + # Create directories for the tiles + overview_tz = base_tz - 1 + for tx in range(tminx, tmaxx + 1): + overview_tx = tx >> 1 + tiledirname = os.path.join( + output_folder, str(overview_tz), str(overview_tx)) + makedirs(tiledirname) + + return list(overview_to_bases.values()) + + +def count_overview_tiles(tile_job_info: "TileJobInfo") -> int: + tile_number = 0 + for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1): + tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz] + tile_number += (1 + abs(tmaxx - tminx)) * (1 + abs(tmaxy - tminy)) + + return tile_number + + +def optparse_init() -> optparse.OptionParser: + """Prepare the option parser for input (argv)""" + + usage = "Usage: %prog [options] input_file [output]" + p = optparse.OptionParser(usage, version="%prog " + __version__) + p.add_option( + "-p", + "--profile", + dest="profile", + type="choice", + choices=profile_list, + help=( + "Tile cutting profile (%s) - default 'mercator' " + "(Google Maps compatible)" % ",".join(profile_list) + ), + ) + p.add_option( + "-r", + "--resampling", + dest="resampling", + type="choice", + choices=resampling_list, + help="Resampling method (%s) - default 'average'" % ",".join(resampling_list), + ) + p.add_option( + "-s", + "--s_srs", + dest="s_srs", + metavar="SRS", + help="The spatial reference system used for the source input data", + ) + p.add_option( + "-z", + "--zoom", + dest="zoom", + help="Zoom levels to render (format:'2-5', '10-' or '10').", + ) + p.add_option( + "-e", + "--resume", + dest="resume", + action="store_true", + help="Resume mode. Generate only missing files.", + ) + p.add_option( + "-a", + "--srcnodata", + dest="srcnodata", + metavar="NODATA", + help="Value in the input dataset considered as transparent", + ) + p.add_option( + "-d", + "--tmscompatible", + dest="tmscompatible", + action="store_true", + help=( + "When using the geodetic profile, specifies the base resolution " + "as 0.703125 or 2 tiles at zoom level 0." + ), + ) + p.add_option( + "--xyz", + action="store_true", + dest="xyz", + help="Use XYZ tile numbering (OSM Slippy Map tiles) instead of TMS", + ) + p.add_option( + "-v", + "--verbose", + action="store_true", + dest="verbose", + help="Print status messages to stdout", + ) + p.add_option( + "-x", + "--exclude", + action="store_true", + dest="exclude_transparent", + help="Exclude transparent tiles from result tileset", + ) + p.add_option( + "-q", + "--quiet", + action="store_true", + dest="quiet", + help="Disable messages and status to stdout", + ) + # MMGIS + p.add_option("--extentworld", dest="extentworld", + help="The full world meter extent (comma-separated as minx,maxx,miny,maxy,pixelsize) of an inner raster profile.") + # MMGIS + p.add_option("--dem", action="store_true", dest="isDEMtile", + help="Indicate if the input is a Digital Elevation Model") + p.add_option( + "--processes", + dest="nb_processes", + type="int", + help="Number of processes to use for tiling", + ) + p.add_option( + "--mpi", + action="store_true", + dest="mpi", + help="Assume launched by mpiexec and ignore --processes. " + "User should set GDAL_CACHEMAX to size per process.", + ) + p.add_option( + "--tilesize", + dest="tilesize", + metavar="PIXELS", + type="int", + help="Width and height in pixel of a tile", + ) + p.add_option( + "--tiledriver", + dest="tiledriver", + choices=["PNG", "WEBP"], + default="PNG", + type="choice", + help="which tile driver to use for the tiles", + ) + + # KML options + g = optparse.OptionGroup( + p, + "KML (Google Earth) options", + "Options for generated Google Earth SuperOverlay metadata", + ) + g.add_option( + "-k", + "--force-kml", + dest="kml", + action="store_true", + help=( + "Generate KML for Google Earth - default for 'geodetic' profile and " + "'raster' in EPSG:4326. For a dataset with different projection use " + "with caution!" + ), + ) + g.add_option( + "-n", + "--no-kml", + dest="kml", + action="store_false", + help="Avoid automatic generation of KML files for EPSG:4326", + ) + g.add_option( + "-u", + "--url", + dest="url", + help="URL address where the generated tiles are going to be published", + ) + p.add_option_group(g) + + # HTML options + g = optparse.OptionGroup( + p, "Web viewer options", "Options for generated HTML viewers a la Google Maps" + ) + g.add_option( + "-w", + "--webviewer", + dest="webviewer", + type="choice", + choices=webviewer_list, + help="Web viewer to generate (%s) - default 'all'" % ",".join( + webviewer_list), + ) + g.add_option("-t", "--title", dest="title", help="Title of the map") + g.add_option("-c", "--copyright", dest="copyright", + help="Copyright for the map") + g.add_option( + "-g", + "--googlekey", + dest="googlekey", + help="Google Maps API key from https://developers.google.com/maps/faq?csw=1#using-google-maps-apis", + ) + g.add_option( + "-b", + "--bingkey", + dest="bingkey", + help="Bing Maps API key from https://www.bingmapsportal.com/", + ) + p.add_option_group(g) + + # MapML options + g = optparse.OptionGroup( + p, "MapML options", "Options for generated MapML file") + g.add_option( + "--mapml-template", + dest="mapml_template", + action="store_true", + help=( + "Filename of a template mapml file where variables will " + "be substituted. If not specified, the generic " + "template_tiles.mapml file from GDAL data resources " + "will be used" + ), + ) + p.add_option_group(g) + + # Webp options + g = optparse.OptionGroup(p, "WEBP options", "Options for WEBP tiledriver") + g.add_option( + "--webp-quality", + dest="webp_quality", + type=int, + default=75, + help="quality of webp image, integer between 1 and 100, default is 75", + ) + g.add_option( + "--webp-lossless", + dest="webp_lossless", + action="store_true", + help="use lossless compression for the webp image", + ) + p.add_option_group(g) + + p.set_defaults( + verbose=False, + profile="mercator", + kml=None, + url="", + webviewer="all", + copyright="", + resampling="average", + resume=False, + googlekey="INSERT_YOUR_KEY_HERE", + bingkey="INSERT_YOUR_KEY_HERE", + processes=1, + ) + + return p + + +def process_args(argv: List[str]) -> Tuple[str, str, Options]: + parser = optparse_init() + options, args = parser.parse_args(args=argv) + + # Args should be either an input file OR an input file and an output folder + if not args: + exit_with_error( + "You need to specify at least an input file as argument to the script" + ) + if len(args) > 2: + exit_with_error( + "Processing of several input files is not supported.", + "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the " + "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(args), + ) + + input_file = args[0] + if not isfile(input_file): + exit_with_error( + "The provided input file %s does not exist or is not a file" % input_file + ) + + if len(args) == 2: + output_folder = args[1] + else: + # Directory with input filename without extension in actual directory + output_folder = os.path.splitext(os.path.basename(input_file))[0] + + if options.webviewer == "mapml": + options.xyz = True + if options.profile == "geodetic": + options.tmscompatible = True + + options = options_post_processing(options, input_file, output_folder) + + return input_file, output_folder, options + + +def options_post_processing( + options: Options, input_file: str, output_folder: str +) -> Options: + if not options.title: + options.title = os.path.basename(input_file) + + # User specified zoom levels + tminz = None + tmaxz = None + if hasattr(options, "zoom") and options.zoom and isinstance(options.zoom, str): + minmax = options.zoom.split("-", 1) + zoom_min = minmax[0] + tminz = int(zoom_min) + + if len(minmax) == 2: + # Min-max zoom value + zoom_max = minmax[1] + if zoom_max: + # User-specified (non-automatically calculated) + tmaxz = int(zoom_max) + if tmaxz < tminz: + raise Exception( + "max zoom (%d) less than min zoom (%d)" % ( + tmaxz, tminz) + ) + else: + # Single zoom value (min = max) + tmaxz = tminz + options.zoom = [tminz, tmaxz] + + if options.url and not options.url.endswith("/"): + options.url += "/" + if options.url: + out_path = output_folder + if out_path.endswith("/"): + out_path = out_path[:-1] + options.url += os.path.basename(out_path) + "/" + + # Supported options + if options.resampling == "antialias" and not numpy_available: + exit_with_error( + "'antialias' resampling algorithm is not available.", + "Install PIL (Python Imaging Library) and numpy.", + ) + + if options.resampling == "near-composite" and not numpy_available: + exit_with_error( + "'near-composite' resampling algorithm is not available.", + "Install PIL (Python Imaging Library) and numpy.", + ) + + if options.tiledriver == "WEBP": + if gdal.GetDriverByName(options.tiledriver) is None: + exit_with_error("WEBP driver is not available") + + if not options.webp_lossless: + if options.webp_quality <= 0 or options.webp_quality > 100: + exit_with_error("webp_quality should be in the range [1-100]") + options.webp_quality = int(options.webp_quality) + + # Output the results + if options.verbose: + print("Options:", options) + print("Input:", input_file) + print("Output:", output_folder) + print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024)) + print("") + + return options + + +class TileDetail(object): + tx = 0 + ty = 0 + tz = 0 + rx = 0 + ry = 0 + rxsize = 0 + rysize = 0 + wx = 0 + wy = 0 + wxsize = 0 + wysize = 0 + querysize = 0 + isDEMtile = False + + def __init__(self, **kwargs): + for key in kwargs: + if hasattr(self, key): + setattr(self, key, kwargs[key]) + + def __unicode__(self): + return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz) + + def __str__(self): + return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz) + + def __repr__(self): + return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz) + + +class TileJobInfo(object): + """ + Plain object to hold tile job configuration for a dataset + """ + + src_file = "" + nb_data_bands = 0 + output_file_path = "" + tile_extension = "" + tile_size = 0 + tile_driver = None + kml = False + tminmax = [] + tminz = 0 + tmaxz = 0 + in_srs_wkt = 0 + out_geo_trans = [] + ominy = 0 + is_epsg_4326 = False + options = None + exclude_transparent = False + + def __init__(self, **kwargs): + for key in kwargs: + if hasattr(self, key): + setattr(self, key, kwargs[key]) + + def __unicode__(self): + return "TileJobInfo %s\n" % (self.src_file) + + def __str__(self): + return "TileJobInfo %s\n" % (self.src_file) + + def __repr__(self): + return "TileJobInfo %s\n" % (self.src_file) + + +class Gdal2TilesError(Exception): + pass + + +class GDAL2Tiles(object): + def __init__(self, input_file: str, output_folder: str, options: Options) -> None: + """Constructor function - initialization""" + self.out_drv = None + self.mem_drv = None + self.warped_input_dataset = None + self.out_srs = None + self.nativezoom = None + self.tminmax = None + self.tsize = None + self.mercator = None + self.geodetic = None + self.dataBandsCount = None + self.out_gt = None + self.tileswne = None + self.swne = None + self.ominx = None + self.omaxx = None + self.omaxy = None + self.ominy = None + + # MMGIS + self.isRasterBounded = False + self.isDEMtile = False + self.fminx = None + self.fmaxx = None + self.fminy = None + self.fmaxy = None + self.fPixelSize = None + + self.input_file = None + self.output_folder = None + + self.isepsg4326 = None + self.in_srs = None + self.in_srs_wkt = None + + # Tile format + self.tile_size = 256 + if options.isDEMtile: + self.tile_size = 32 + if options.tilesize: + self.tile_size = options.tilesize + + self.tiledriver = options.tiledriver + if options.tiledriver == "PNG": + self.tileext = "png" + else: + self.tileext = "webp" + if options.mpi: + makedirs(output_folder) + self.tmp_dir = tempfile.mkdtemp(dir=output_folder) + else: + self.tmp_dir = tempfile.mkdtemp() + self.tmp_vrt_filename = os.path.join( + self.tmp_dir, str(uuid4()) + ".vrt") + + # Should we read bigger window of the input raster and scale it down? + # Note: Modified later by open_input() + # Not for 'near' resampling + # Not for Wavelet based drivers (JPEG2000, ECW, MrSID) + # Not for 'raster' profile + self.scaledquery = True + # How big should be query window be for scaling down + # Later on reset according the chosen resampling algorithm + self.querysize = 4 * self.tile_size + + # Should we use Read on the input file for generating overview tiles? + # Note: Modified later by open_input() + # Otherwise the overview tiles are generated from existing underlying tiles + self.overviewquery = False + + self.input_file = input_file + self.output_folder = output_folder + self.options = options + + # MMGIS + if self.options.extentworld: + extentworld = self.options.extentworld.split(",") + self.isRasterBounded = True + self.fminx = float(extentworld[0]) + self.fmaxx = float(extentworld[2]) + self.fminy = float(extentworld[3]) + self.fmaxy = float(extentworld[1]) + self.fPixelSize = float(extentworld[4]) + + if self.options.resampling == "near": + self.querysize = self.tile_size + + elif self.options.resampling == "bilinear": + self.querysize = self.tile_size * 2 + + self.tminz, self.tmaxz = self.options.zoom + + # MMGIS + if self.options.isDEMtile: + self.isDEMtile = True + + # KML generation + self.kml = self.options.kml + + # ------------------------------------------------------------------------- + def open_input(self) -> None: + """Initialization of the input raster, reprojection if necessary""" + gdal.AllRegister() + + self.out_drv = gdal.GetDriverByName(self.tiledriver) + self.mem_drv = gdal.GetDriverByName("MEM") + + if not self.out_drv: + raise Exception( + "The '%s' driver was not found, is it available in this GDAL build?" + % self.tiledriver + ) + if not self.mem_drv: + raise Exception( + "The 'MEM' driver was not found, is it available in this GDAL build?" + ) + + # Open the input file + + if self.input_file: + input_dataset: gdal.Dataset = gdal.Open( + self.input_file, gdal.GA_ReadOnly) + else: + raise Exception("No input file was specified") + + if self.options.verbose: + print( + "Input file:", + "( %sP x %sL - %s bands)" + % ( + input_dataset.RasterXSize, + input_dataset.RasterYSize, + input_dataset.RasterCount, + ), + ) + + if not input_dataset: + # Note: GDAL prints the ERROR message too + exit_with_error( + "It is not possible to open the input file '%s'." % self.input_file + ) + + # Read metadata from the input file + if input_dataset.RasterCount == 0: + exit_with_error("Input file '%s' has no raster band" % + self.input_file) + + if input_dataset.GetRasterBand(1).GetRasterColorTable(): + exit_with_error( + "Please convert this file to RGB/RGBA and run gdal2tiles on the result.", + "From paletted file you can create RGBA file (temp.vrt) by:\n" + "gdal_translate -of vrt -expand rgba %s temp.vrt\n" + "then run:\n" + "gdal2tiles temp.vrt" % self.input_file, + ) + + if self.isDEMtile != True and input_dataset.GetRasterBand(1).DataType != gdal.GDT_Byte: + exit_with_error( + "Please convert this file to 8-bit and run gdal2tiles on the result.", + "To scale pixel values you can use:\n" + "gdal_translate -of VRT -ot Byte -scale %s temp.vrt\n" + "then run:\n" + "gdal2tiles temp.vrt" % self.input_file, + ) + + if self.isDEMtile == True and input_dataset.GetRasterBand(1).DataType != gdal.GDT_Float32: + exit_with_error( + "Please convert this file to 32-bit for its first band and run gdal2tiles on the result.", + ) + + in_nodata = setup_no_data_values(input_dataset, self.options) + + if self.options.verbose: + print( + "Preprocessed file:", + "( %sP x %sL - %s bands)" + % ( + input_dataset.RasterXSize, + input_dataset.RasterYSize, + input_dataset.RasterCount, + ), + ) + + self.in_srs, self.in_srs_wkt = setup_input_srs( + input_dataset, self.options) + + self.out_srs = setup_output_srs(self.in_srs, self.options) + + # If input and output reference systems are different, we reproject the input dataset into + # the output reference system for easier manipulation + + self.warped_input_dataset = None + + if self.options.profile != "raster": + + if not self.in_srs: + exit_with_error( + "Input file has unknown SRS.", + "Use --s_srs EPSG:xyz (or similar) to provide source reference system.", + ) + + if not has_georeference(input_dataset): + exit_with_error( + "There is no georeference - neither affine transformation (worldfile) " + "nor GCPs. You can generate only 'raster' profile tiles.", + "Either gdal2tiles with parameter -p 'raster' or use another GIS " + "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs", + ) + + if (self.in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or ( + input_dataset.GetGCPCount() != 0 + ): + self.warped_input_dataset = reproject_dataset( + input_dataset, self.in_srs, self.out_srs + ) + + if in_nodata: + self.warped_input_dataset = update_no_data_values( + self.warped_input_dataset, in_nodata, options=self.options + ) + else: + self.warped_input_dataset = update_alpha_value_for_non_alpha_inputs( + self.warped_input_dataset, options=self.options + ) + + if self.warped_input_dataset and self.options.verbose: + print( + "Projected file:", + "tiles.vrt", + "( %sP x %sL - %s bands)" + % ( + self.warped_input_dataset.RasterXSize, + self.warped_input_dataset.RasterYSize, + self.warped_input_dataset.RasterCount, + ), + ) + + if not self.warped_input_dataset: + self.warped_input_dataset = input_dataset + + gdal.GetDriverByName("VRT").CreateCopy( + self.tmp_vrt_filename, self.warped_input_dataset + ) + + self.dataBandsCount = nb_data_bands(self.warped_input_dataset) + + # KML test + self.isepsg4326 = False + srs4326 = osr.SpatialReference() + srs4326.ImportFromEPSG(4326) + srs4326.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) + if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4(): + self.isepsg4326 = True + if self.kml is None: + self.kml = True + if self.kml and self.options.verbose: + print("KML autotest OK!") + + if self.kml is None: + self.kml = False + + # Read the georeference + self.out_gt = self.warped_input_dataset.GetGeoTransform() + + # Test the size of the pixel + + # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile) + if (self.out_gt[2], self.out_gt[4]) != (0, 0): + exit_with_error( + "Georeference of the raster contains rotation or skew. " + "Such raster is not supported. Please use gdalwarp first." + ) + + # Here we expect: pixel is square, no rotation on the raster + + # Output Bounds - coordinates in the output SRS + self.ominx = self.out_gt[0] + self.omaxx = ( + self.out_gt[0] + + self.warped_input_dataset.RasterXSize * self.out_gt[1] + ) + self.omaxy = self.out_gt[3] + self.ominy = ( + self.out_gt[3] - + self.warped_input_dataset.RasterYSize * self.out_gt[1] + ) + # Note: maybe round(x, 14) to avoid the gdal_translate behavior, when 0 becomes -1e-15 + + # MMGIS + def linearScale(domain, rang, value): + return ( + ((rang[1] - rang[0]) * (value - domain[0])) / + (domain[1] - domain[0]) + + rang[0] + ) + # MMGIS + self.warped_input_dataset.fRasterXSize = self.warped_input_dataset.RasterXSize + self.warped_input_dataset.fRasterYSize = self.warped_input_dataset.RasterYSize + self.warped_input_dataset.fRasterXOrigin = 0 + self.warped_input_dataset.fRasterYOrigin = 0 + self.warped_input_dataset.PixelSize = self.out_gt[1] + self.warped_input_dataset.fPixelSize = self.fPixelSize + + + if self.isRasterBounded: + self.warped_input_dataset.fRasterXSize = self.warped_input_dataset.RasterXSize * (self.fmaxx - self.fminx) / ( + self.omaxx - self.ominx) * (self.warped_input_dataset.PixelSize / self.warped_input_dataset.fPixelSize) + self.warped_input_dataset.fRasterYSize = self.warped_input_dataset.RasterYSize * (self.fmaxy - self.fminy) / ( + self.omaxy - self.ominy) * (self.warped_input_dataset.PixelSize / self.warped_input_dataset.fPixelSize) + self.warped_input_dataset.fRasterXSizeRaw = self.warped_input_dataset.RasterXSize * (self.fmaxx - self.fminx) / (self.omaxx - self.ominx) + self.warped_input_dataset.fRasterYSizeRaw = self.warped_input_dataset.RasterYSize * (self.fmaxy - self.fminy) / (self.omaxy - self.ominy) + self.warped_input_dataset.fRasterXOrigin = linearScale( + [self.fminx, self.fmaxx], [0, self.warped_input_dataset.fRasterXSize], self.out_gt[0]) + self.warped_input_dataset.fRasterYOrigin = linearScale( + [self.fminy, self.fmaxy], [self.warped_input_dataset.fRasterYSize, 0], self.out_gt[3]) + self.warped_input_dataset.fRasterXOriginRaw = linearScale([self.fminx, self.fmaxx], [ + 0, self.warped_input_dataset.fRasterXSize], self.out_gt[0]) * (self.warped_input_dataset.fPixelSize / self.warped_input_dataset.PixelSize) + self.warped_input_dataset.fRasterYOriginRaw = linearScale([self.fminy, self.fmaxy], [ + self.warped_input_dataset.fRasterYSize, 0], self.out_gt[3]) * (self.warped_input_dataset.fPixelSize / self.warped_input_dataset.PixelSize) + self.warped_input_dataset.fRasterXWidth = linearScale( + [self.fminx, self.fmaxx], [0, self.warped_input_dataset.fRasterXSize], self.omaxx) - linearScale( + [self.fminx, self.fmaxx], [0, self.warped_input_dataset.fRasterXSize], self.ominx) + self.warped_input_dataset.fRasterYHeight = linearScale( + [self.fminy, self.fmaxy], [0, self.warped_input_dataset.fRasterYSize], self.omaxy) - linearScale( + [self.fminy, self.fmaxy], [0, self.warped_input_dataset.fRasterYSize], self.ominy) + if self.options.verbose: + print("ominx", self.ominx, "omaxx", self.omaxx, "ominy", self.ominy, "omaxy", self.omaxy) + print("fminx", self.fminx, "fmaxx", self.fmaxx, "fminy", self.fminy, "fmaxy", self.fmaxy) + print("px_size", self.warped_input_dataset.PixelSize, "fpx_size", self.warped_input_dataset.fPixelSize) + print("Orig Raster Size: ", self.warped_input_dataset.RasterXSize, self.warped_input_dataset.RasterYSize ) + print("Full Raster Size: ", self.warped_input_dataset.fRasterXSize, self.warped_input_dataset.fRasterYSize ) + print("Full Raster Size Raw: ", self.warped_input_dataset.fRasterXSizeRaw, self.warped_input_dataset.fRasterYSizeRaw ) + print("Full Raster XY Origin: ", self.warped_input_dataset.fRasterXOrigin, self.warped_input_dataset.fRasterYOrigin ) + print("Full fRasterXYOriginRaw", self.warped_input_dataset.fRasterXOriginRaw, self.warped_input_dataset.fRasterYOriginRaw) + print("Full fRasterXWidth/Height", self.warped_input_dataset.fRasterXWidth, self.warped_input_dataset.fRasterYHeight) + print( + "Bounds (output srs):", + round(self.ominx, 13), + self.ominy, + self.omaxx, + self.omaxy, + ) + + # Calculating ranges for tiles in different zoom levels + if self.options.profile == "mercator": + + self.mercator = GlobalMercator(tile_size=self.tile_size) + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.mercator.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, MAXZOOMLEVEL)) + for tz in range(0, MAXZOOMLEVEL): + tminx, tminy = self.mercator.MetersToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.mercator.MetersToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**tz - 1, tmaxx), min(2**tz - 1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the minimal zoom level (map covers area equivalent to one tile) + if self.tminz is None: + self.tminz = self.mercator.ZoomForPixelSize( + self.out_gt[1] + * max( + self.warped_input_dataset.RasterXSize, + self.warped_input_dataset.RasterYSize, + ) + / float(self.tile_size) + ) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1]) + self.tmaxz = max(self.tminz, self.tmaxz) + + self.tminz = min(self.tminz, self.tmaxz) + + if self.options.verbose: + print( + "Bounds (latlong):", + self.mercator.MetersToLatLon(self.ominx, self.ominy), + self.mercator.MetersToLatLon(self.omaxx, self.omaxy), + ) + print("MinZoomLevel:", self.tminz) + print( + "MaxZoomLevel:", + self.tmaxz, + "(", + self.mercator.Resolution(self.tmaxz), + ")", + ) + + elif self.options.profile == "geodetic": + + self.geodetic = GlobalGeodetic( + self.options.tmscompatible, tile_size=self.tile_size + ) + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.geodetic.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, MAXZOOMLEVEL)) + for tz in range(0, MAXZOOMLEVEL): + tminx, tminy = self.geodetic.LonLatToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.geodetic.LonLatToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2 ** (tz + 1) - 1, + tmaxx), min(2**tz - 1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tminz is None: + self.tminz = self.geodetic.ZoomForPixelSize( + self.out_gt[1] + * max( + self.warped_input_dataset.RasterXSize, + self.warped_input_dataset.RasterYSize, + ) + / float(self.tile_size) + ) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1]) + self.tmaxz = max(self.tminz, self.tmaxz) + + self.tminz = min(self.tminz, self.tmaxz) + + if self.options.verbose: + print( + "Bounds (latlong):", self.ominx, self.ominy, self.omaxx, self.omaxy + ) + + # MMGIS + elif self.options.profile == 'raster' and self.isRasterBounded: + + def log2(x): + return math.log10(x) / math.log10(2) # MMGIS - self.out_ds.fWorldXSize = int( - float(self.out_ds.fRasterXSize) * (2**(self.nativezoom - self.basenativezoom))) - self.out_ds.fWorldYSize = int( - float(self.out_ds.fRasterYSize) * (2**(self.nativezoom - self.basenativezoom))) - self.out_ds.fRasterXOriginWorld = int(float( - self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXOrigin) / self.out_ds.fRasterXSize)) - self.out_ds.fRasterYOriginWorld = int(float( - self.out_ds.fWorldYSize) * (float(self.out_ds.fRasterYOrigin) / self.out_ds.fRasterYSize)) - self.out_ds.fRasterXSizeWorld = int(float( - self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXWidth) / self.out_ds.fRasterXSize)) - self.out_ds.fRasterYSizeWorld = int(float( - self.out_ds.RasterYSize) * (float(self.out_ds.fRasterXSizeWorld) / self.out_ds.RasterXSize)) - # print("World Size", self.out_ds.fWorldXSize, self.out_ds.fWorldYSize) - # print("Raster Origin World", self.out_ds.fRasterXOriginWorld, self.out_ds.fRasterYOriginWorld) - # print("Raster Size World", self.out_ds.fRasterXSizeWorld, self.out_ds.fRasterYSizeWorld) + self.nativezoom = int( + max(math.ceil(log2(self.warped_input_dataset.fRasterXSizeRaw/float(self.tile_size))), + math.ceil(log2(self.warped_input_dataset.fRasterYSizeRaw/float(self.tile_size))))) + + self.basenativezoom = int( + max(math.ceil(log2(self.warped_input_dataset.fRasterXSize/float(self.tile_size))), + math.ceil(log2(self.warped_input_dataset.fRasterYSize/float(self.tile_size))))) + + # MMGIS + self.warped_input_dataset.fWorldXSize = float(self.warped_input_dataset.fRasterXSize) * (2**(self.nativezoom - self.basenativezoom)) + self.warped_input_dataset.fWorldYSize = float(self.warped_input_dataset.fRasterYSize) * (2**(self.nativezoom - self.basenativezoom)) + self.warped_input_dataset.fRasterXOriginWorld = float(self.warped_input_dataset.fWorldXSize) * (float(self.warped_input_dataset.fRasterXOrigin) / self.warped_input_dataset.fRasterXSize) + self.warped_input_dataset.fRasterYOriginWorld = float(self.warped_input_dataset.fWorldYSize) * (float(self.warped_input_dataset.fRasterYOrigin) / self.warped_input_dataset.fRasterYSize) + self.warped_input_dataset.fRasterXSizeWorld = float(self.warped_input_dataset.fWorldXSize) * (float(self.warped_input_dataset.fRasterXWidth) / self.warped_input_dataset.fRasterXSize) + self.warped_input_dataset.fRasterYSizeWorld = float(self.warped_input_dataset.RasterYSize) * (float(self.warped_input_dataset.fRasterXSizeWorld) / self.warped_input_dataset.RasterXSize) + + #self.warped_input_dataset.fRasterXSizeWorld = self.warped_input_dataset.RasterXSize + #self.warped_input_dataset.fRasterYSizeWorld = self.warped_input_dataset.RasterYSize + if self.options.verbose: + print("Raster Size Raw", self.warped_input_dataset.fRasterXSizeRaw, self.warped_input_dataset.fRasterYSizeRaw) + print("Raster Size", self.warped_input_dataset.RasterXSize, self.warped_input_dataset.RasterYSize) + print("Full Raster Size", self.warped_input_dataset.fRasterXSize, self.warped_input_dataset.fRasterYSize) + print("World Size", self.warped_input_dataset.fWorldXSize, self.warped_input_dataset.fWorldYSize) + print("Raster Origin", self.warped_input_dataset.fRasterXOrigin, self.warped_input_dataset.fRasterYOrigin) + print("Raster Origin World", self.warped_input_dataset.fRasterXOriginWorld, self.warped_input_dataset.fRasterYOriginWorld) + print("Raster Size World", self.warped_input_dataset.fRasterXSizeWorld, self.warped_input_dataset.fRasterYSizeWorld) if self.options.verbose: print("Native zoom of the raster:", self.nativezoom) + print("Base native zoom of the raster:", self.basenativezoom) + print("tile_size:", self.tile_size) # Get the minimal zoom level (whole raster in one tile) if self.tminz is None: @@ -1303,7 +2743,7 @@ def log2(x): if self.tmaxz is None: self.tmaxz = self.nativezoom - # MMGIS added 'f'* + # MMGIS # Generate table with min max tile coordinates for all zoomlevels self.tminmax = list(range(0, self.tmaxz+1)) self.tsize = list(range(0, self.tmaxz+1)) @@ -1311,37 +2751,46 @@ def log2(x): # print("Pixel Size Ratio:", (self.out_ds.fPixelSize / self.out_ds.PixelSize)) # print("nativezoom", self.nativezoom, "basenativezoom", self.basenativezoom, "tminz", self.tminz, "tmaxz", self.tmaxz) for tz in range(0, self.tmaxz+1): - tsize = 2.0**(self.tmaxz-tz)*self.tilesize - toffsetx = int(math.floor( - 2.0**(tz) * self.out_ds.fRasterXOriginRaw / self.out_ds.fRasterXSizeRaw)) - toffsety = int(math.floor( - 2.0**(tz) * (self.out_ds.fRasterYOriginRaw) / self.out_ds.fRasterYSizeRaw)) - # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) - toffsetx = int(math.floor( - self.out_ds.fRasterXOriginWorld / tsize)) - toffsety = int(math.floor( - self.out_ds.fRasterYOriginWorld / tsize)) - # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) - tmaxx = int(math.floor( - self.out_ds.fRasterXSizeWorld / tsize)) + toffsetx + 1 - - tmaxy = int(math.floor( - self.out_ds.fRasterYSizeWorld / tsize)) + toffsety + 1 - self.tsize[tz] = math.ceil(tsize) - #tminx = toffsetx - tminx = int(tmaxx - ((tmaxx - toffsetx) / (0.75))) - 1 - tminy = int(tmaxy - ((tmaxy - toffsety) / (0.75))) - 1 + + xRatio = self.warped_input_dataset.fRasterXSize / (2.0**(self.basenativezoom) * self.tile_size) + yRatio = self.warped_input_dataset.fRasterYSize / (2.0**(self.basenativezoom) * self.tile_size) + tilesWide = 2.0**(tz) + tminx = math.floor(linearScale([0, self.warped_input_dataset.fRasterXSize], [0, tilesWide], self.warped_input_dataset.fRasterXOrigin) * xRatio) + tminy = math.floor(linearScale([self.warped_input_dataset.fRasterYSize, 0], [0, tilesWide], self.warped_input_dataset.fRasterYOrigin + self.warped_input_dataset.fRasterYHeight) * yRatio) + tmaxx = math.ceil(linearScale([0, self.warped_input_dataset.fRasterXSize], [0, tilesWide], self.warped_input_dataset.fRasterXOrigin + self.warped_input_dataset.fRasterXWidth) * xRatio) + tmaxy = math.ceil(linearScale([self.warped_input_dataset.fRasterYSize, 0], [0, tilesWide], self.warped_input_dataset.fRasterYOrigin) * yRatio) + + self.tsize[tz] = math.ceil(2.0**(self.tmaxz-tz)*self.tile_size) self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) - # print("tminx", tminx, "tminy", tminy, "tmaxx", tmaxx, "tmaxy", tmaxy, "tz", tz) - elif self.options.profile == 'raster': + if self.options.verbose: + print("tminx", tminx, "tminy", tminy, "tmaxx", tmaxx, "tmaxy", tmaxy, "tz", tz, "xRatio", xRatio, "yRatio", yRatio) + + elif self.options.profile == "raster": def log2(x): return math.log10(x) / math.log10(2) - self.nativezoom = int( - max(math.ceil(log2(self.out_ds.RasterXSize/float(self.tilesize))), - math.ceil(log2(self.out_ds.RasterYSize/float(self.tilesize))))) + + self.nativezoom = max( + 0, + int( + max( + math.ceil( + log2( + self.warped_input_dataset.RasterXSize + / float(self.tile_size) + ) + ), + math.ceil( + log2( + self.warped_input_dataset.RasterYSize + / float(self.tile_size) + ) + ), + ) + ), + ) if self.options.verbose: print("Native zoom of the raster:", self.nativezoom) @@ -1353,30 +2802,69 @@ def log2(x): # Get the maximal zoom level (native resolution of the raster) if self.tmaxz is None: self.tmaxz = self.nativezoom + self.tmaxz = max(self.tminz, self.tmaxz) + + elif self.tmaxz > self.nativezoom: + # If the user requests at a higher precision than the native + # one, generate an oversample temporary VRT file, and tile from + # it + oversample_factor = 1 << (self.tmaxz - self.nativezoom) + if self.options.resampling in ("average", "antialias", "near-composite"): + resampleAlg = "average" + elif self.options.resampling in ( + "near", + "bilinear", + "cubic", + "cubicspline", + "lanczos", + "mode", + ): + resampleAlg = self.options.resampling + else: + resampleAlg = "bilinear" # fallback + gdal.Translate( + self.tmp_vrt_filename, + input_dataset, + width=self.warped_input_dataset.RasterXSize * oversample_factor, + height=self.warped_input_dataset.RasterYSize * oversample_factor, + resampleAlg=resampleAlg, + ) + self.warped_input_dataset = gdal.Open(self.tmp_vrt_filename) + self.out_gt = self.warped_input_dataset.GetGeoTransform() + self.nativezoom = self.tmaxz # Generate table with min max tile coordinates for all zoomlevels - self.tminmax = list(range(0, self.tmaxz+1)) - self.tsize = list(range(0, self.tmaxz+1)) - for tz in range(0, self.tmaxz+1): - tsize = 2.0**(self.tmaxz-tz)*self.tilesize + self.tminmax = list(range(0, self.tmaxz + 1)) + self.tsize = list(range(0, self.tmaxz + 1)) + for tz in range(0, self.tmaxz + 1): + tsize = 2.0 ** (self.nativezoom - tz) * self.tile_size tminx, tminy = 0, 0 - tmaxx = int(math.ceil(self.out_ds.RasterXSize / tsize)) - 1 - tmaxy = int(math.ceil(self.out_ds.RasterYSize / tsize)) - 1 + tmaxx = ( + int(math.ceil(self.warped_input_dataset.RasterXSize / tsize)) - 1 + ) + tmaxy = ( + int(math.ceil(self.warped_input_dataset.RasterYSize / tsize)) - 1 + ) self.tsize[tz] = math.ceil(tsize) self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) # Function which generates SWNE in LatLong for given tile - if self.kml and in_srs_wkt: - ct = osr.CoordinateTransformation(in_srs, srs4326) + if self.kml and self.in_srs_wkt: + ct = osr.CoordinateTransformation(self.in_srs, srs4326) def rastertileswne(x, y, z): - # X-pixel size in level - pixelsizex = (2**(self.tmaxz-z) * self.out_gt[1]) - west = self.out_gt[0] + x*self.tilesize*pixelsizex - east = west + self.tilesize*pixelsizex - south = self.ominy + y*self.tilesize*pixelsizex - north = south + self.tilesize*pixelsizex - if not isepsg4326: + pixelsizex = ( + 2 ** (self.tmaxz - z) * self.out_gt[1] + ) # X-pixel size in level + west = self.out_gt[0] + x * self.tile_size * pixelsizex + east = west + self.tile_size * pixelsizex + if self.options.xyz: + north = self.omaxy - y * self.tile_size * pixelsizex + south = north - self.tile_size * pixelsizex + else: + south = self.ominy + y * self.tile_size * pixelsizex + north = south + self.tile_size * pixelsizex + if not self.isepsg4326: # Transformation to EPSG:4326 (WGS84 datum) west, south = ct.TransformPoint(west, south)[:2] east, north = ct.TransformPoint(east, north)[:2] @@ -1384,18 +2872,67 @@ def rastertileswne(x, y, z): self.tileswne = rastertileswne else: - self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa + self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa - def generate_metadata(self): + else: + + tms = tmsMap[self.options.profile] + + # Function which generates SWNE in LatLong for given tile + self.tileswne = None # not implemented + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, tms.level_count + 1)) + for tz in range(0, tms.level_count + 1): + tminx, tminy = tms.GeorefCoordToTileCoord( + self.ominx, self.ominy, tz, self.tile_size + ) + tmaxx, tmaxy = tms.GeorefCoordToTileCoord( + self.omaxx, self.omaxy, tz, self.tile_size + ) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(tms.matrix_width * 2**tz - 1, tmaxx), min( + tms.matrix_height * 2**tz - 1, tmaxy + ) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # Get the minimal zoom level (map covers area equivalent to one tile) + if self.tminz is None: + self.tminz = tms.ZoomForPixelSize( + self.out_gt[1] + * max( + self.warped_input_dataset.RasterXSize, + self.warped_input_dataset.RasterYSize, + ) + / float(self.tile_size), + self.tile_size, + ) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = tms.ZoomForPixelSize( + self.out_gt[1], self.tile_size) + self.tmaxz = max(self.tminz, self.tmaxz) + + self.tminz = min(self.tminz, self.tmaxz) + + if self.options.verbose: + print( + "Bounds (georef):", self.ominx, self.ominy, self.omaxx, self.omaxy + ) + print("MinZoomLevel:", self.tminz) + print("MaxZoomLevel:", self.tmaxz) + + def generate_metadata(self) -> None: """ Generation of main metadata files and HTML viewers (metadata related to particular tiles are generated during the tile processing). """ - if not os.path.exists(self.output): - os.makedirs(self.output) + makedirs(self.output_folder) - if self.options.profile == 'mercator': + if self.options.profile == "mercator": south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy) north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy) @@ -1404,30 +2941,30 @@ def generate_metadata(self): self.swne = (south, west, north, east) # Generate googlemaps.html - if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator': - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'googlemaps.html'))): - f = open(os.path.join(self.output, 'googlemaps.html'), 'wb') - f.write(self.generate_googlemaps().encode('utf-8')) - f.close() - - # Generate openlayers.html - if self.options.webviewer in ('all', 'openlayers'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'openlayers.html'))): - f = open(os.path.join(self.output, 'openlayers.html'), 'wb') - f.write(self.generate_openlayers().encode('utf-8')) - f.close() + if ( + self.options.webviewer in ("all", "google") + and self.options.profile == "mercator" + ): + if not self.options.resume or not isfile( + os.path.join(self.output_folder, "googlemaps.html") + ): + with my_open( + os.path.join(self.output_folder, + "googlemaps.html"), "wb" + ) as f: + f.write(self.generate_googlemaps().encode("utf-8")) # Generate leaflet.html - if self.options.webviewer in ('all', 'leaflet'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'leaflet.html'))): - f = open(os.path.join(self.output, 'leaflet.html'), 'wb') - f.write(self.generate_leaflet().encode('utf-8')) - f.close() + if self.options.webviewer in ("all", "leaflet"): + if not self.options.resume or not isfile( + os.path.join(self.output_folder, "leaflet.html") + ): + with my_open( + os.path.join(self.output_folder, "leaflet.html"), "wb" + ) as f: + f.write(self.generate_leaflet().encode("utf-8")) - elif self.options.profile == 'geodetic': + elif self.options.profile == "geodetic": west, south = self.ominx, self.ominy east, north = self.omaxx, self.omaxy @@ -1435,15 +2972,7 @@ def generate_metadata(self): north, east = min(90.0, north), min(180.0, east) self.swne = (south, west, north, east) - # Generate openlayers.html - if self.options.webviewer in ('all', 'openlayers'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'openlayers.html'))): - f = open(os.path.join(self.output, 'openlayers.html'), 'wb') - f.write(self.generate_openlayers().encode('utf-8')) - f.close() - - elif self.options.profile == 'raster': + elif self.options.profile == "raster": west, south = self.ominx, self.ominy east, north = self.omaxx, self.omaxy @@ -1457,114 +2986,149 @@ def generate_metadata(self): self.swne = (south, west, north, east) - # Generate openlayers.html - if self.options.webviewer in ('all', 'openlayers'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'openlayers.html'))): - f = open(os.path.join(self.output, 'openlayers.html'), 'wb') - f.write(self.generate_openlayers().encode('utf-8')) - f.close() + else: + self.swne = None + + # Generate openlayers.html + if self.options.webviewer in ("all", "openlayers"): + if not self.options.resume or not isfile( + os.path.join(self.output_folder, "openlayers.html") + ): + with my_open( + os.path.join(self.output_folder, "openlayers.html"), "wb" + ) as f: + f.write(self.generate_openlayers().encode("utf-8")) # Generate tilemapresource.xml. - if not self.options.resume or not os.path.exists(os.path.join(self.output, 'tilemapresource.xml')): - f = open(os.path.join(self.output, 'tilemapresource.xml'), 'wb') - f.write(self.generate_tilemapresource().encode('utf-8')) - f.close() + if ( + not self.options.xyz + and self.swne is not None + and ( + not self.options.resume + or not isfile(os.path.join(self.output_folder, "tilemapresource.xml")) + ) + ): + with my_open( + os.path.join(self.output_folder, "tilemapresource.xml"), "wb" + ) as f: + f.write(self.generate_tilemapresource().encode("utf-8")) + + # Generate mapml file + if ( + self.options.webviewer in ("all", "mapml") + and self.options.xyz + and self.options.profile != "raster" + and (self.options.profile != "geodetic" or self.options.tmscompatible) + and ( + not self.options.resume + or not isfile(os.path.join(self.output_folder, "mapml.mapml")) + ) + ): + with my_open(os.path.join(self.output_folder, "mapml.mapml"), "wb") as f: + f.write(self.generate_mapml().encode("utf-8")) - if self.kml: + if self.kml and self.tileswne is not None: # TODO: Maybe problem for not automatically generated tminz # The root KML should contain links to all tiles in the tminz level children = [] xmin, ymin, xmax, ymax = self.tminmax[self.tminz] - for x in range(xmin, xmax+1): - for y in range(ymin, ymax+1): + for x in range(xmin, xmax + 1): + for y in range(ymin, ymax + 1): children.append([x, y, self.tminz]) # Generate Root KML if self.kml: - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'doc.kml'))): - f = open(os.path.join(self.output, 'doc.kml'), 'wb') - f.write(self.generate_kml( - None, None, None, children).encode('utf-8')) - f.close() - - def generate_base_tiles(self, tz): + if not self.options.resume or not isfile( + os.path.join(self.output_folder, "doc.kml") + ): + with my_open( + os.path.join(self.output_folder, "doc.kml"), "wb" + ) as f: + f.write( + generate_kml( + None, + None, + None, + self.tileext, + self.tile_size, + self.tileswne, + self.options, + children, + ).encode("utf-8") + ) + + def generate_base_tiles(self) -> Tuple[TileJobInfo, List[TileDetail]]: """ Generation of the base tiles (the lowest in the pyramid) directly from the input raster """ - if self.isDEMtile: - print("Generating Tiles at Zoom " + str(tz) + ": ") - if not self.options.quiet: print("Generating Base Tiles:") if self.options.verbose: - print('') + print("") print("Tiles generated from the max zoom level:") print("----------------------------------------") - print('') + print("") - ds = self.out_ds + # Set the bounds + tminx, tminy, tmaxx, tmaxy = self.tminmax[self.tmaxz] + ds = self.warped_input_dataset + tilebands = self.dataBandsCount + 1 querysize = self.querysize - - # 1bto4b - if self.isDEMtile: - tilebands = 4 - querysize = self.tilesize - else: - tilebands = self.dataBandsCount + 1 - tz = self.tmaxz - - try: - self.tminmax[tz] - except IndexError: - print(" Won't make zoom level " + str(tz)) - return - - # Set the bounds - tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + isDEMtile = self.isDEMtile if self.options.verbose: print("dataBandsCount: ", self.dataBandsCount) print("tilebands: ", tilebands) - tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + tcount = (1 + abs(tmaxx - tminx)) * (1 + abs(tmaxy - tminy)) ti = 0 - for ty in range(tmaxy, tminy-1, -1): - for tx in range(tminx, tmaxx+1): + tile_details = [] + + tz = self.tmaxz + + #print('xyz', tmaxx, tminx, tmaxy, tminy, tz) + + # Create directories for the tiles + for tx in range(tminx, tmaxx + 1): + tiledirname = os.path.join(self.output_folder, str(tz), str(tx)) + makedirs(tiledirname) + + for ty in range(tmaxy, tminy - 1, -1): + for tx in range(tminx, tmaxx + 1): - if self.stopped: - break ti += 1 + ytile = GDAL2Tiles.getYTile(ty, tz, self.options) tilefilename = os.path.join( - self.output, str(tz), str(tx), "%s.%s" % (ty, self.tileext)) + self.output_folder, + str(tz), + str(tx), + "%s.%s" % (ytile, self.tileext), + ) if self.options.verbose: - print(ti, '/', tcount, tilefilename) + print(ti, "/", tcount, tilefilename) - if self.options.resume and os.path.exists(tilefilename): + if self.options.resume and isfile(tilefilename): if self.options.verbose: print("Tile generation skipped because of --resume") - else: - self.progressbar(ti / float(tcount)) continue - # Create directories for the tile - if not os.path.exists(os.path.dirname(tilefilename)): - os.makedirs(os.path.dirname(tilefilename)) - - if self.options.profile == 'mercator': + if self.options.profile == "mercator": # Tile bounds in EPSG:3857 b = self.mercator.TileBounds(tx, ty, tz) - elif self.options.profile == 'geodetic': + elif self.options.profile == "geodetic": b = self.geodetic.TileBounds(tx, ty, tz) + elif self.options.profile != "raster": + b = tmsMap[self.options.profile].TileBounds( + tx, ty, tz, self.tile_size + ) # Don't scale up by nearest neighbour, better change the querysize # to the native resolution (and return smaller query tile) for scaling - if self.options.profile in ('mercator', 'geodetic'): + if self.options.profile != "raster": rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1]) # Pixel size in the raster covering query geo extent @@ -1575,25 +3139,23 @@ def generate_base_tiles(self, tz): # Tile bounds in raster coordinates for ReadRaster query rb, wb = self.geo_query( - ds, b[0], b[3], b[2], b[1], querysize=querysize) + ds, b[0], b[3], b[2], b[1], querysize=querysize + ) rx, ry, rxsize, rysize = rb wx, wy, wxsize, wysize = wb - wxsize -= 1 # 1bto4b - wysize -= 1 # 1bto4b # MMGIS - elif self.isRasterBounded: # 'raster' profile: - + elif self.isRasterBounded: # 'raster' profile: # tilesize in raster coordinates for actual zoom tsize = int(self.tsize[tz]) - xsize = self.out_ds.fWorldXSize - ysize = self.out_ds.fWorldYSize + xsize = self.warped_input_dataset.fWorldXSize + ysize = self.warped_input_dataset.fWorldYSize if tz >= self.tmaxz: - querysize = self.tilesize + querysize = self.tile_size + + rx = (tx) * tsize - self.warped_input_dataset.fRasterXOriginWorld - rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld - #print("rx", rx) rxsize = 0 rxsize = tsize @@ -1601,608 +3163,128 @@ def generate_base_tiles(self, tz): rysize = tsize ry = ysize - (ty * tsize) - rysize - \ - self.out_ds.fRasterYOriginWorld + self.warped_input_dataset.fRasterYOriginWorld wx, wy = 0, 0 - wxsize = int(rxsize/float(tsize) * self.tilesize) - wysize = int(rysize/float(tsize) * self.tilesize) - if wysize != self.tilesize: - wy = self.tilesize - wysize + wxsize = rxsize/float(tsize) * self.tile_size + wysize = rysize/float(tsize) * self.tile_size + if wysize != self.tile_size: + wy = self.tile_size - wysize if rx < 0: rxsize = tsize + rx wx = -rx - wxsize = int(rxsize/float(tsize) * self.tilesize) + wxsize = rxsize/float(tsize) * self.tile_size rx = 0 if ry < 0: rysize = tsize + ry wy = -ry - wysize = int(rysize/float(tsize) * self.tilesize) + wysize = rysize/float(tsize) * self.tile_size ry = 0 - if rx + rxsize > self.out_ds.fRasterXSizeWorld: - rxsize = self.out_ds.fRasterXSizeWorld - rx - wxsize = int(rxsize/float(tsize) * self.tilesize) - if ry + rysize > self.out_ds.fRasterYSizeWorld: - rysize = self.out_ds.fRasterYSizeWorld - ry - wysize = int(rysize/float(tsize) * self.tilesize) + if rx + rxsize > self.warped_input_dataset.fRasterXSizeWorld: + rxsize = self.warped_input_dataset.fRasterXSizeWorld - rx + wxsize = rxsize/float(tsize) * self.tile_size + if ry + rysize > self.warped_input_dataset.fRasterYSizeWorld: + rysize = self.warped_input_dataset.fRasterYSizeWorld - ry + wysize = rysize/float(tsize) * self.tile_size # Convert rx, ry back to non-world coordinates - rx = int(float(self.out_ds.RasterXSize) * - (float(rx) / self.out_ds.fRasterXSizeWorld)) - ry = int(float(self.out_ds.RasterYSize) * - (float(ry) / self.out_ds.fRasterYSizeWorld)) - rxsize = int(float(self.out_ds.RasterXSize) * - (float(rxsize) / self.out_ds.fRasterXSizeWorld)) - rysize = int(float(self.out_ds.RasterYSize) * - (float(rysize) / self.out_ds.fRasterYSizeWorld)) - - wxsize -= 1 # 1bto4b - wysize -= 1 # 1bto4b - - #print("Extent: ", (tx, ty, tz, tsize), (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize), (self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin)) - else: # 'raster' profile: - # tilesize in raster coordinates for actual zoom - tsize = int(self.tsize[tz]) - xsize = self.out_ds.RasterXSize # size of the raster in pixels - ysize = self.out_ds.RasterYSize - if tz >= self.tmaxz: - querysize = self.tilesize - - rx = (tx) * tsize + rx = float(self.warped_input_dataset.RasterXSize) * (float(rx) / self.warped_input_dataset.fRasterXSizeWorld) + ry = float(self.warped_input_dataset.RasterYSize) * (float(ry) / self.warped_input_dataset.fRasterYSizeWorld) + rxsize = float(self.warped_input_dataset.RasterXSize) * (float(rxsize) / self.warped_input_dataset.fRasterXSizeWorld) + rysize = float(self.warped_input_dataset.RasterYSize) * (float(rysize) / self.warped_input_dataset.fRasterYSizeWorld) + + if self.isDEMtile: + wxsize -= 1 # 1bto4b + wysize -= 1 # 1bto4b + + rx = round(rx) + ry = round(ry) + rxsize = math.floor(rxsize) + rysize = math.floor(rysize) + wx = round(wx) + wy = round(wy) + wxsize = round(wxsize) + wysize = round(wysize) + + + #print("Tile: ", (tz, tx, ty, tsize)) + #print("Read: ", (rx, ry, rxsize, rysize)) + #print("Write: ",(wx, wy, wxsize, wysize)) + else: # 'raster' profile: + + tsize = int( + self.tsize[tz] + ) # tile_size in raster coordinates for actual zoom + xsize = ( + self.warped_input_dataset.RasterXSize + ) # size of the raster in pixels + ysize = self.warped_input_dataset.RasterYSize + querysize = self.tile_size + + rx = tx * tsize rxsize = 0 if tx == tmaxx: rxsize = xsize % tsize if rxsize == 0: rxsize = tsize + ry = ty * tsize rysize = 0 if ty == tmaxy: rysize = ysize % tsize if rysize == 0: rysize = tsize - ry = ysize - (ty * tsize) - rysize wx, wy = 0, 0 - wxsize = int(rxsize/float(tsize) * self.tilesize) - wysize = int(rysize/float(tsize) * self.tilesize) - if wysize != self.tilesize: - wy = self.tilesize - wysize - - if self.options.verbose: - print("\tReadRaster Extent: ", - (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)) - - # Query is in 'nearest neighbour' but can be bigger in then the tilesize - # We scale down the query to the tilesize by supplied algorithm. + wxsize = int(rxsize / float(tsize) * self.tile_size) + wysize = int(rysize / float(tsize) * self.tile_size) - # Tile dataset in memory + if not self.options.xyz: + ry = ysize - (ty * tsize) - rysize + if wysize != self.tile_size: + wy = self.tile_size - wysize - # 1bto4b - if self.isDEMtile: - dstile = self.mem_drv.Create( - '', self.tilesize, self.tilesize, tilebands, gdal.GDT_Byte) - else: - dstile = self.mem_drv.Create( - '', self.tilesize, self.tilesize, tilebands) - - data = alpha = None # Read the source raster if anything is going inside the tile as per the computed # geo_query - if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0: - # 1bto4b - if self.isDEMtile: - data = ds.GetRasterBand(1).ReadRaster( - rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) - else: - data = ds.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize, - band_list=list(range(1, self.dataBandsCount+1))) - alpha = self.alphaband.ReadRaster( - rx, ry, rxsize, rysize, wxsize, wysize) - - # The tile in memory is a transparent file by default. Write pixel values into it if - # any - if data: - # 1bto4b - both this full if and else - if self.isDEMtile: - if (wxsize * wysize) > 0: - data = struct.unpack('f' * wxsize * wysize, data) - else: - return - - if self.tilesize == querysize: - # Interpolate the values from four surrounding - - # This takes our 1d list of WxH data and pads it with a rect of none values - dataPad = list(data) - for i in reversed(range(1, wysize)): - dataPad.insert(wxsize * i, 0) - dataPad.insert(wxsize * i, 0) - for i in range(wxsize + 3): - dataPad.insert(0, 0) - for i in range(wxsize + 3): - dataPad.append(0) - - dataIn = [] - # Resample based on average of four - # averaging over: i, i + 1, i + wxsize, i + wxsize + 1 - for y in range(wysize+2 - 1): - for x in range(wxsize+2 - 1): - i = x+(y*(wxsize+2)) - nW = dataPad[i] - nE = dataPad[i+1] - sW = dataPad[i+(wxsize+2)] - sE = dataPad[i+(wxsize+2)+1] - dataIn.append((nW + nE + sW + sE)/float(4)) - - # Get the surrounding eight tiles - # Get NW - if tx - 1 >= tminx and ty + 1 <= tmaxy: - rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW = getTilePxBounds(self, - tx - 1, ty + 1, tz, ds) - wxsizeNW -= 1 - wysizeNW -= 1 - if wxsizeNW != 0 and wysizeNW != 0: - dataNW = ds.GetRasterBand(1).ReadRaster( - rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW, buf_type=gdal.GDT_Float32) - if dataNW is not None and (wxsizeNW * wysizeNW) > 0: - dataNW = struct.unpack( - 'f' * wxsizeNW * wysizeNW, dataNW) - else: - dataNW = None - else: - dataNW = None - - # Get N - if ty + 1 <= tmaxy: - rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN = getTilePxBounds( - self, tx, ty + 1, tz, ds) - wxsizeN -= 1 - wysizeN -= 1 - if wxsizeN != 0 and wysizeN != 0: - dataN = ds.GetRasterBand(1).ReadRaster( - rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN, buf_type=gdal.GDT_Float32) - if dataN is not None and (wxsizeN * wysizeN) > 0: - dataN = struct.unpack( - 'f' * wxsizeN * wysizeN, dataN) - else: - dataN = None - else: - dataN = None - # Get NE - if tx + 1 <= tmaxx and ty + 1 <= tmaxy: - rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE = getTilePxBounds( - self, tx + 1, ty + 1, tz, ds) - wxsizeNE -= 1 - wysizeNE -= 1 - if wxsizeNE != 0 and wysizeNE != 0: - dataNE = ds.GetRasterBand(1).ReadRaster( - rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE, buf_type=gdal.GDT_Float32) - if dataNE is not None and (wxsizeNE * wysizeNE) > 0: - dataNE = struct.unpack( - 'f' * wxsizeNE * wysizeNE, dataNE) - else: - dataNE = None - else: - dataNE = None - # Get E - if tx + 1 <= tmaxx: - rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE = getTilePxBounds( - self, tx + 1, ty, tz, ds) - wxsizeE -= 1 - wysizeE -= 1 - if wxsizeE != 0 and wysizeE != 0: - dataE = ds.GetRasterBand(1).ReadRaster( - rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE, buf_type=gdal.GDT_Float32) - if dataE is not None and (wxsizeE * wysizeE) > 0: - dataE = struct.unpack( - 'f' * wxsizeE * wysizeE, dataE) - else: - dataE = None - else: - dataE = None - # Get SE - if tx + 1 <= tmaxx and ty - 1 >= tminy: - rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE = getTilePxBounds( - self, tx + 1, ty - 1, tz, ds) - wxsizeSE -= 1 - wysizeSE -= 1 - if wxsizeSE != 0 and wysizeSE != 0: - dataSE = ds.GetRasterBand(1).ReadRaster( - rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE, buf_type=gdal.GDT_Float32) - if dataSE is not None and (wxsizeSE * wysizeSE) > 0: - dataSE = struct.unpack( - 'f' * wxsizeSE * wysizeSE, dataSE) - else: - dataSE = None - else: - dataSE = None - # Get S - if ty - 1 >= tminy: - rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS = getTilePxBounds( - self, tx, ty - 1, tz, ds) - wxsizeS -= 1 - wysizeS -= 1 - if wxsizeS != 0 and wysizeS != 0: - dataS = ds.GetRasterBand(1).ReadRaster( - rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS, buf_type=gdal.GDT_Float32) - if dataS is not None and (wxsizeS * wysizeS) > 0: - dataS = struct.unpack( - 'f' * wxsizeS * wysizeS, dataS) - else: - dataS = None - else: - dataS = None - # Get SW - if tx - 1 >= tminx and ty - 1 >= tminy: - rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW = getTilePxBounds( - self, tx - 1, ty - 1, tz, ds) - wxsizeSW -= 1 - wysizeSW -= 1 - if wxsizeSW != 0 and wysizeSW != 0: - dataSW = ds.GetRasterBand(1).ReadRaster( - rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW, buf_type=gdal.GDT_Float32) - if dataSW is not None and (wxsizeSW * wysizeSW) > 0: - dataSW = struct.unpack( - 'f' * wxsizeSW * wysizeSW, dataSW) - else: - dataSW = None - else: - dataSW = None - # Get W - if tx - 1 >= tminx: - rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW = getTilePxBounds( - self, tx - 1, ty, tz, ds) - wxsizeW -= 1 - wysizeW -= 1 - if wxsizeW != 0 and wysizeW != 0: - dataW = ds.GetRasterBand(1).ReadRaster( - rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW, buf_type=gdal.GDT_Float32) - if dataW is not None and (wxsizeW * wysizeW) > 0: - dataW = struct.unpack( - 'f' * wxsizeW * wysizeW, dataW) - else: - dataW = None - else: - dataW = None - - # NW (uses N, NW, W) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataN is not None: - fN = dataN[len(dataN)-wxsizeN] - values = values + 1 - if dataNW is not None: - fNW = dataNW[len(dataNW)-1] - values = values + 1 - if dataW is not None: - fW = dataW[wxsizeW-1] - values = values + 1 - dataIn[0] = ((dataIn[0]*4) + fN + - fNW + fW)/float(values) - - # NE (uses N, NE, E) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataN is not None: - fN = dataN[len(dataN)-1] - values = values + 1 - if dataNE is not None: - fNE = dataNE[len(dataNE)-wxsizeNE] - values = values + 1 - if dataE is not None: - fE = dataE[0] - values = values + 1 - dataIn[wxsize] = ( - (dataIn[wxsize]*4) + fN + fNE + fE)/float(values) - - # SE (uses S, SE, E) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataS is not None: - fS = dataS[wxsizeS-1] - values = values + 1 - if dataSE is not None: - fSE = dataSE[0] - values = values + 1 - if dataE is not None: - fE = dataE[len(dataE)-wxsizeE] - values = values + 1 - dataIn[len(dataIn)-1] = ((dataIn[len(dataIn)-1] - * 4) + fS + fSE + fE)/float(values) - - # SW (uses S, SW, W) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataS is not None: - fS = dataS[0] - values = values + 1 - if dataSW is not None: - fSW = dataSW[wxsizeSW-1] - values = values + 1 - if dataW is not None: - fW = dataW[len(dataW)-1] - values = values + 1 - dataIn[len( - dataIn)-wxsize-1] = ((dataIn[len(dataIn)-wxsize-1]*4) + fS + fSW + fW)/float(values) - - # Then the edges minus corners - # N - if dataN is not None: - for i in range(1, wxsize): - dataIn[i] = ( - (dataIn[i]*4) + dataN[len(dataN)-wxsizeN-1+i] + dataN[len(dataN)-wxsizeN-1+i+1])/float(4) - else: - for i in range(1, wxsize): - dataIn[i] = (dataIn[i]*4)/float(2) - - # E - if dataE is not None: - for i in range(1, wysize): - dataIn[((i+1)*(wxsize+1)-1)] = ((dataIn[((i+1)*(wxsize+1)-1)] - * 4) + dataE[(i-1)*wxsizeE] + dataE[i*wxsizeE])/float(4) - else: - for i in range(1, wysize): - dataIn[( - (i+1)*(wxsize+1)-1)] = (dataIn[((i+1)*(wxsize+1)-1)]*4)/float(2) - - # S - if dataS is not None: - for i in range(1, wxsize): - dataIn[len(dataIn)-wxsize-1+i] = ( - (dataIn[len(dataIn)-wxsize-1+i]*4) + dataS[i-1] + dataS[i])/float(4) - else: - for i in range(1, wxsize): - dataIn[len( - dataIn)-wxsize-1+i] = (dataIn[len(dataIn)-wxsize-1+i]*4)/float(2) - - # W - if dataW is not None: - for i in range(1, wysize): - dataIn[(i)*(wxsize+1)] = ((dataIn[(i)*(wxsize+1)]*4) + - dataW[i*wxsizeW-1] + dataW[(i+1)*wxsizeW-1])/float(4) - else: - for i in range(1, wysize): - dataIn[(i)*(wxsize+1)] = (dataIn[(i) - * (wxsize+1)]*4)/float(2) - - data1 = [] - data2 = [] - data3 = [] - data4 = [] - for f in dataIn: - f = str(binary(f)) - data1.append(int(f[:8], 2)) - data2.append(int(f[8:16], 2)) - data3.append(int(f[16:24], 2)) - data4.append(int(f[24:], 2)) - - data1s = b'' - data2s = b'' - data3s = b'' - data4s = b'' - indx = 0 - for v in data1: - data1s += struct.pack('B', data1[indx]) - data2s += struct.pack('B', data2[indx]) - data3s += struct.pack('B', data3[indx]) - data4s += struct.pack('B', data4[indx]) - indx += 1 - dstile.GetRasterBand(1).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data1s, buf_type=gdal.GDT_Byte) - dstile.GetRasterBand(2).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data2s, buf_type=gdal.GDT_Byte) - dstile.GetRasterBand(3).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data3s, buf_type=gdal.GDT_Byte) - dstile.GetRasterBand(4).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data4s, buf_type=gdal.GDT_Byte) - elif wxsize != 0 and wysize != 0: - # Big ReadRaster query in memory scaled to the tilesize - all but 'near' algo - dsquery = self.mem_drv.Create( - '', querysize, querysize, tilebands, gdal.GDT_Byte) # 1bto4b - # TODO: fill the null value in case a tile without alpha is produced (now only png tiles are supported) - # for i in range(1, tilebands+1): - # dsquery.GetRasterBand(1).Fill(tilenodata) - # dsquery.WriteRaster(wx, wy, wxsize, wysize, data, band_list=list(range(1,self.dataBandsCount+1)))###############1bto4b - # dsquery.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands])###############################1bto4b - - # 1bto4b - data = ds.GetRasterBand(1).ReadRaster( - rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) - - data = struct.unpack('f' * wxsize * wysize, data) - data1 = [] - data2 = [] - data3 = [] - data4 = [] - for f in data: - f = str(binary(f)) - data1.append(int(f[:8], 2)) - data2.append(int(f[8:16], 2)) - data3.append(int(f[16:24], 2)) - data4.append(int(f[24:], 2)) - - data1s = b'' - data2s = b'' - data3s = b'' - data4s = b'' - indx = 0 - for v in data1: - data1s += struct.pack('B', data1[indx]) - data2s += struct.pack('B', data2[indx]) - data3s += struct.pack('B', data3[indx]) - data4s += struct.pack('B', data4[indx]) - indx += 1 - - dsquery.GetRasterBand(1).WriteRaster( - wx, wy, wxsize, wysize, data1s, buf_type=gdal.GDT_Byte) - dsquery.GetRasterBand(2).WriteRaster( - wx, wy, wxsize, wysize, data2s, buf_type=gdal.GDT_Byte) - dsquery.GetRasterBand(3).WriteRaster( - wx, wy, wxsize, wysize, data3s, buf_type=gdal.GDT_Byte) - dsquery.GetRasterBand(4).WriteRaster( - wx, wy, wxsize, wysize, data4s, buf_type=gdal.GDT_Byte) - # sys.exit('done') - # 1bto4b - - self.scale_query_to_tile( - dsquery, dstile, tilefilename) - del dsquery - - else: - if self.tilesize == querysize: - # Use the ReadRaster result directly in tiles ('nearest neighbour' query) - dstile.WriteRaster(wx, wy, wxsize, wysize, data, - band_list=list(range(1, self.dataBandsCount+1))) - dstile.WriteRaster( - wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) - - # Note: For source drivers based on WaveLet compression (JPEG2000, ECW, - # MrSID) the ReadRaster function returns high-quality raster (not ugly - # nearest neighbour) - # TODO: Use directly 'near' for WaveLet files - else: - # Big ReadRaster query in memory scaled to the tilesize - all but 'near' - # algo - dsquery = self.mem_drv.Create( - '', querysize, querysize, tilebands) - # TODO: fill the null value in case a tile without alpha is produced (now - # only png tiles are supported) - dsquery.WriteRaster(wx, wy, wxsize, wysize, data, - band_list=list(range(1, self.dataBandsCount+1))) - dsquery.WriteRaster( - wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) - - self.scale_query_to_tile( - dsquery, dstile, tilefilename) - del dsquery - - del data - - if self.options.resampling != 'antialias': - # Write a copy of tile to png/jpg - self.out_drv.CreateCopy(tilefilename, dstile, strict=0) - - del dstile - - # Create a KML file for this tile. - if self.kml: - kmlfilename = os.path.join( - self.output, str(tz), str(tx), '%d.kml' % ty) - if not self.options.resume or not os.path.exists(kmlfilename): - f = open(kmlfilename, 'wb') - f.write(self.generate_kml(tx, ty, tz).encode('utf-8')) - f.close() - - if not self.options.verbose and not self.options.quiet: - self.progressbar(ti / float(tcount)) - - def generate_overview_tiles(self): - """Generation of the overview tiles (higher in the pyramid) based on existing tiles""" - - if not self.options.quiet: - print("Generating Overview Tiles:") - - # 1bto4b - if self.isDEMtile: - tilebands = 4 - else: - tilebands = self.dataBandsCount + 1 - - # Usage of existing tiles: from 4 underlying tiles generate one as overview. - - tcount = 0 - for tz in range(self.tmaxz-1, self.tminz-1, -1): - tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] - tcount += (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) - - ti = 0 - - for tz in range(self.tmaxz-1, self.tminz-1, -1): - tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] - for ty in range(tmaxy, tminy-1, -1): - for tx in range(tminx, tmaxx+1): - - if self.stopped: - break - - ti += 1 - tilefilename = os.path.join(self.output, - str(tz), - str(tx), - "%s.%s" % (ty, self.tileext)) - - if self.options.verbose: - print(ti, '/', tcount, tilefilename) - - if self.options.resume and os.path.exists(tilefilename): - if self.options.verbose: - print("Tile generation skipped because of --resume") - else: - self.progressbar(ti / float(tcount)) - continue - - # Create directories for the tile - if not os.path.exists(os.path.dirname(tilefilename)): - os.makedirs(os.path.dirname(tilefilename)) - - dsquery = self.mem_drv.Create( - '', 2*self.tilesize, 2*self.tilesize, tilebands) - # TODO: fill the null value - dstile = self.mem_drv.Create( - '', self.tilesize, self.tilesize, tilebands) - - # TODO: Implement more clever walking on the tiles with cache functionality - # probably walk should start with reading of four tiles from top left corner - # Hilbert curve - - children = [] - # Read the tiles and write them to query window - for y in range(2*ty, 2*ty+2): - for x in range(2*tx, 2*tx+2): - minx, miny, maxx, maxy = self.tminmax[tz+1] - if x >= minx and x <= maxx and y >= miny and y <= maxy: - dsquerytile = gdal.Open( - os.path.join(self.output, str(tz+1), str(x), - "%s.%s" % (y, self.tileext)), - gdal.GA_ReadOnly) - if (ty == 0 and y == 1) or (ty != 0 and (y % (2*ty)) != 0): - tileposy = 0 - else: - tileposy = self.tilesize - if tx: - tileposx = x % (2*tx) * self.tilesize - elif tx == 0 and x == 1: - tileposx = self.tilesize - else: - tileposx = 0 - dsquery.WriteRaster( - tileposx, tileposy, self.tilesize, self.tilesize, - dsquerytile.ReadRaster( - 0, 0, self.tilesize, self.tilesize), - band_list=list(range(1, tilebands+1))) - children.append([x, y, tz+1]) - - self.scale_query_to_tile(dsquery, dstile, tilefilename) - # Write a copy of tile to png/jpg - if self.options.resampling != 'antialias': - # Write a copy of tile to png/jpg - self.out_drv.CreateCopy(tilefilename, dstile, strict=0) - - if self.options.verbose: - print("\tbuild from zoom", tz+1, - " tiles:", (2*tx, 2*ty), (2*tx+1, 2*ty), - (2*tx, 2*ty+1), (2*tx+1, 2*ty+1)) - - # Create a KML file for this tile. - if self.kml: - f = open(os.path.join( - self.output, '%d/%d/%d.kml' % (tz, tx, ty)), 'wb') - f.write(self.generate_kml( - tx, ty, tz, children).encode('utf-8')) - f.close() + tile_details.append( + TileDetail( + tx=tx, + ty=ytile, + tz=tz, + rx=rx, + ry=ry, + rxsize=rxsize, + rysize=rysize, + wx=wx, + wy=wy, + wxsize=wxsize, + wysize=wysize, + querysize=querysize, + isDEMtile=isDEMtile + ) + ) + + conf = TileJobInfo( + src_file=self.tmp_vrt_filename, + nb_data_bands=self.dataBandsCount, + output_file_path=self.output_folder, + tile_extension=self.tileext, + tile_driver=self.tiledriver, + tile_size=self.tile_size, + kml=self.kml, + tminmax=self.tminmax, + tminz=self.tminz, + tmaxz=self.tmaxz, + in_srs_wkt=self.in_srs_wkt, + out_geo_trans=self.out_gt, + ominy=self.ominy, + is_epsg_4326=self.isepsg4326, + options=self.options, + exclude_transparent=self.options.exclude_transparent, + ) - if not self.options.verbose and not self.options.quiet: - self.progressbar(ti / float(tcount)) + return conf, tile_details def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): """ @@ -2215,8 +3297,8 @@ def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): geotran = ds.GetGeoTransform() rx = int((ulx - geotran[0]) / geotran[1] + 0.001) ry = int((uly - geotran[3]) / geotran[5] + 0.001) - rxsize = int((lrx - ulx) / geotran[1] + 0.5) - rysize = int((lry - uly) / geotran[5] + 0.5) + rxsize = max(1, int((lrx - ulx) / geotran[1] + 0.5)) + rysize = max(1, int((lry - uly) / geotran[5] + 0.5)) if not querysize: wxsize, wysize = rxsize, rysize @@ -2231,7 +3313,7 @@ def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): wxsize = wxsize - wx rxsize = rxsize - int(rxsize * (float(rxshift) / rxsize)) rx = 0 - if rx+rxsize > ds.RasterXSize: + if rx + rxsize > ds.RasterXSize: wxsize = int(wxsize * (float(ds.RasterXSize - rx) / rxsize)) rxsize = ds.RasterXSize - rx @@ -2242,368 +3324,652 @@ def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): wysize = wysize - wy rysize = rysize - int(rysize * (float(ryshift) / rysize)) ry = 0 - if ry+rysize > ds.RasterYSize: + if ry + rysize > ds.RasterYSize: wysize = int(wysize * (float(ds.RasterYSize - ry) / rysize)) rysize = ds.RasterYSize - ry return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize) - def scale_query_to_tile(self, dsquery, dstile, tilefilename=''): - """Scales down query dataset to the tile dataset""" - - querysize = dsquery.RasterXSize - tilesize = dstile.RasterXSize - tilebands = dstile.RasterCount - - if self.options.resampling == 'average': - - # Function: gdal.RegenerateOverview() - for i in range(1, tilebands+1): - # Black border around NODATA - res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i), - 'average') - if res != 0: - self.error("RegenerateOverview() failed on %s, error %d" % ( - tilefilename, res)) - - elif self.options.resampling == 'antialias': - - # Scaling by PIL (Python Imaging Library) - improved Lanczos - array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) - for i in range(tilebands): - array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i+1), - 0, 0, querysize, querysize) - im = Image.fromarray(array, 'RGBA') # Always four bands - im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS) - if os.path.exists(tilefilename): - im0 = Image.open(tilefilename) - im1 = Image.composite(im1, im0, im1) - im1.save(tilefilename, self.tiledriver) - - else: - - # Other algorithms are implemented by gdal.ReprojectImage(). - dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0, - tilesize / float(querysize))) - dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) - - res = gdal.ReprojectImage( - dsquery, dstile, None, None, self.resampling) - if res != 0: - self.error("ReprojectImage() failed on %s, error %d" % - (tilefilename, res)) - - def generate_tilemapresource(self): + def generate_tilemapresource(self) -> str: """ Template for tilemapresource.xml. Returns filled string. Expected variables: title, north, south, east, west, isepsg4326, projection, publishurl, - zoompixels, tilesize, tileformat, profile + zoompixels, tile_size, tileformat, profile """ args = {} - args['title'] = self.options.title - args['south'], args['west'], args['north'], args['east'] = self.swne - args['tilesize'] = self.tilesize - args['tileformat'] = self.tileext - args['publishurl'] = self.options.url - args['profile'] = self.options.profile - - if self.options.profile == 'mercator': - args['srs'] = "EPSG:3857" - elif self.options.profile == 'geodetic': - args['srs'] = "EPSG:4326" + args["xml_escaped_title"] = gdal.EscapeString( + self.options.title, gdal.CPLES_XML + ) + args["south"], args["west"], args["north"], args["east"] = self.swne + args["tile_size"] = self.tile_size + args["tileformat"] = self.tileext + args["publishurl"] = self.options.url + args["profile"] = self.options.profile + + if self.options.profile == "mercator": + args["srs"] = "EPSG:3857" + elif self.options.profile == "geodetic": + args["srs"] = "EPSG:4326" elif self.options.s_srs: - args['srs'] = self.options.s_srs + args["srs"] = self.options.s_srs elif self.out_srs: - args['srs'] = self.out_srs.ExportToWkt() + args["srs"] = self.out_srs.ExportToWkt() else: - args['srs'] = "" + args["srs"] = "" - s = """ + s = ( + """ - %(title)s + %(xml_escaped_title)s %(srs)s - + -""" % args # noqa - for z in range(self.tminz, self.tmaxz+1): - if self.options.profile == 'raster': - s += """ \n""" % ( - args['publishurl'], z, (2**(self.nativezoom-z) * self.out_gt[1]), z) - elif self.options.profile == 'mercator': - s += """ \n""" % ( - args['publishurl'], z, 156543.0339/2**z, z) - elif self.options.profile == 'geodetic': - s += """ \n""" % ( - args['publishurl'], z, 0.703125/2**z, z) +""" + % args + ) # noqa + for z in range(self.tminz, self.tmaxz + 1): + if self.options.profile == "raster": + s += ( + """ \n""" + % ( + args["publishurl"], + z, + (2 ** (self.nativezoom - z) * self.out_gt[1]), + z, + ) + ) + elif self.options.profile == "mercator": + s += ( + """ \n""" + % (args["publishurl"], z, 156543.0339 / 2**z, z) + ) + elif self.options.profile == "geodetic": + s += ( + """ \n""" + % (args["publishurl"], z, 0.703125 / 2**z, z) + ) s += """ """ return s - def generate_kml(self, tx, ty, tz, children=None, **args): - """ - Template for the KML. Returns filled string. - """ - if not children: - children = [] - - args['tx'], args['ty'], args['tz'] = tx, ty, tz - args['tileformat'] = self.tileext - if 'tilesize' not in args: - args['tilesize'] = self.tilesize - - if 'minlodpixels' not in args: - args['minlodpixels'] = int(args['tilesize'] / 2) - if 'maxlodpixels' not in args: - args['maxlodpixels'] = int(args['tilesize'] * 8) - if children == []: - args['maxlodpixels'] = -1 - - if tx is None: - tilekml = False - args['title'] = self.options.title - else: - tilekml = True - args['title'] = "%d/%d/%d.kml" % (tz, tx, ty) - args['south'], args['west'], args['north'], args['east'] = self.tileswne( - tx, ty, tz) - - if tx == 0: - args['drawOrder'] = 2 * tz + 1 - elif tx is not None: - args['drawOrder'] = 2 * tz - else: - args['drawOrder'] = 0 - - url = self.options.url - if not url: - if tilekml: - url = "../../" - else: - url = "" - - s = """ - - - %(title)s - - """ % args - if tilekml: - s += """ - - - %(north).14f - %(south).14f - %(east).14f - %(west).14f - - - %(minlodpixels)d - %(maxlodpixels)d - - - - %(drawOrder)d - - %(ty)d.%(tileformat)s - - - %(north).14f - %(south).14f - %(east).14f - %(west).14f - - - """ % args - - for cx, cy, cz in children: - csouth, cwest, cnorth, ceast = self.tileswne(cx, cy, cz) - s += """ - - %d/%d/%d.%s - - - %.14f - %.14f - %.14f - %.14f - - - %d - -1 - - - - %s%d/%d/%d.kml - onRegion - - - - """ % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest, - args['minlodpixels'], url, cz, cx, cy) - - s += """ - - """ - return s - - def generate_googlemaps(self): + def generate_googlemaps(self) -> str: """ Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile. It returns filled string. Expected variables: - title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, + title, googlemapskey, north, south, east, west, minzoom, maxzoom, tile_size, tileformat, publishurl """ args = {} - args['title'] = self.options.title - args['googlemapskey'] = self.options.googlekey - args['south'], args['west'], args['north'], args['east'] = self.swne - args['minzoom'] = self.tminz - args['maxzoom'] = self.tmaxz - args['tilesize'] = self.tilesize - args['tileformat'] = self.tileext - args['publishurl'] = self.options.url - args['copyright'] = self.options.copyright - - s = r""" + args["xml_escaped_title"] = gdal.EscapeString( + self.options.title, gdal.CPLES_XML + ) + args["googlemapsurl"] = "https://maps.googleapis.com/maps/api/js" + if self.options.googlekey != "INSERT_YOUR_KEY_HERE": + args["googlemapsurl"] += "?key=" + self.options.googlekey + args["googlemapsurl_hint"] = "" + else: + args[ + "googlemapsurl_hint" + ] = "" + args["south"], args["west"], args["north"], args["east"] = self.swne + args["minzoom"] = self.tminz + args["maxzoom"] = self.tmaxz + args["tile_size"] = self.tile_size + args["tileformat"] = self.tileext + args["publishurl"] = self.options.url + args["copyright"] = self.options.copyright + + # Logic below inspired from https://www.gavinharriss.com/code/opacity-control + # which borrowed on gdal2tiles itself to migrate from Google Maps V2 to V3 + + args[ + "custom_tile_overlay_js" + ] = """ +// Beginning of https://github.com/gavinharriss/google-maps-v3-opacity-control/blob/master/CustomTileOverlay.js +// with CustomTileOverlay.prototype.getTileUrl() method customized for gdal2tiles needs. + +/******************************************************************************* +Copyright (c) 2010-2012. Gavin Harriss +Site: http://www.gavinharriss.com/ +Originally developed for: http://www.topomap.co.nz/ +Licences: Creative Commons Attribution 3.0 New Zealand License +http://creativecommons.org/licenses/by/3.0/nz/ +******************************************************************************/ + +CustomTileOverlay = function (map, opacity) { + this.tileSize = new google.maps.Size(256, 256); // Change to tile size being used + + this.map = map; + this.opacity = opacity; + this.tiles = []; + + this.visible = false; + this.initialized = false; + + this.self = this; +} + +CustomTileOverlay.prototype = new google.maps.OverlayView(); + +CustomTileOverlay.prototype.getTile = function (p, z, ownerDocument) { + // If tile already exists then use it + for (var n = 0; n < this.tiles.length; n++) { + if (this.tiles[n].id == 't_' + p.x + '_' + p.y + '_' + z) { + return this.tiles[n]; + } + } + + // If tile doesn't exist then create it + var tile = ownerDocument.createElement('div'); + var tp = this.getTileUrlCoord(p, z); + tile.id = 't_' + tp.x + '_' + tp.y + '_' + z + tile.style.width = this.tileSize.width + 'px'; + tile.style.height = this.tileSize.height + 'px'; + tile.style.backgroundImage = 'url(' + this.getTileUrl(tp, z) + ')'; + tile.style.backgroundRepeat = 'no-repeat'; + + if (!this.visible) { + tile.style.display = 'none'; + } + + this.tiles.push(tile) + + this.setObjectOpacity(tile); + + return tile; +} + +// Save memory / speed up the display by deleting tiles out of view +// Essential for use on iOS devices such as iPhone and iPod! +CustomTileOverlay.prototype.deleteHiddenTiles = function (zoom) { + var bounds = this.map.getBounds(); + var tileNE = this.getTileUrlCoordFromLatLng(bounds.getNorthEast(), zoom); + var tileSW = this.getTileUrlCoordFromLatLng(bounds.getSouthWest(), zoom); + + var minX = tileSW.x - 1; + var maxX = tileNE.x + 1; + var minY = tileSW.y - 1; + var maxY = tileNE.y + 1; + + var tilesToKeep = []; + var tilesLength = this.tiles.length; + for (var i = 0; i < tilesLength; i++) { + var idParts = this.tiles[i].id.split("_"); + var tileX = Number(idParts[1]); + var tileY = Number(idParts[2]); + var tileZ = Number(idParts[3]); + if (( + (minX < maxX && (tileX >= minX && tileX <= maxX)) + || (minX > maxX && ((tileX >= minX && tileX <= (Math.pow(2, zoom) - 1)) || (tileX >= 0 && tileX <= maxX))) // Lapped the earth! + ) + && (tileY >= minY && tileY <= maxY) + && tileZ == zoom) { + tilesToKeep.push(this.tiles[i]); + } + else { + delete this.tiles[i]; + } + } + + this.tiles = tilesToKeep; +}; + +CustomTileOverlay.prototype.pointToTile = function (point, z) { + var projection = this.map.getProjection(); + var worldCoordinate = projection.fromLatLngToPoint(point); + var pixelCoordinate = new google.maps.Point(worldCoordinate.x * Math.pow(2, z), worldCoordinate.y * Math.pow(2, z)); + var tileCoordinate = new google.maps.Point(Math.floor(pixelCoordinate.x / this.tileSize.width), Math.floor(pixelCoordinate.y / this.tileSize.height)); + return tileCoordinate; +} + +CustomTileOverlay.prototype.getTileUrlCoordFromLatLng = function (latlng, zoom) { + return this.getTileUrlCoord(this.pointToTile(latlng, zoom), zoom) +} + +CustomTileOverlay.prototype.getTileUrlCoord = function (coord, zoom) { + var tileRange = 1 << zoom; + var y = tileRange - coord.y - 1; + var x = coord.x; + if (x < 0 || x >= tileRange) { + x = (x % tileRange + tileRange) % tileRange; + } + return new google.maps.Point(x, y); +} + +// Modified for gdal2tiles needs +CustomTileOverlay.prototype.getTileUrl = function (tile, zoom) { + + if ((zoom < mapMinZoom) || (zoom > mapMaxZoom)) { + return "https://gdal.org/resources/gdal2tiles/none.png"; + } + var ymax = 1 << zoom; + var y = ymax - tile.y -1; + var tileBounds = new google.maps.LatLngBounds( + fromMercatorPixelToLatLng( new google.maps.Point( (tile.x)*256, (y+1)*256 ) , zoom ), + fromMercatorPixelToLatLng( new google.maps.Point( (tile.x+1)*256, (y)*256 ) , zoom ) + ); + if (mapBounds.intersects(tileBounds)) { + return zoom+"/"+tile.x+"/"+tile.y+".png"; + } else { + return "https://gdal.org/resources/gdal2tiles/none.png"; + } + +} + +CustomTileOverlay.prototype.initialize = function () { + if (this.initialized) { + return; + } + var self = this.self; + this.map.overlayMapTypes.insertAt(0, self); + this.initialized = true; +} + +CustomTileOverlay.prototype.hide = function () { + this.visible = false; + + var tileCount = this.tiles.length; + for (var n = 0; n < tileCount; n++) { + this.tiles[n].style.display = 'none'; + } +} + +CustomTileOverlay.prototype.show = function () { + this.initialize(); + this.visible = true; + var tileCount = this.tiles.length; + for (var n = 0; n < tileCount; n++) { + this.tiles[n].style.display = ''; + } +} + +CustomTileOverlay.prototype.releaseTile = function (tile) { + tile = null; +} + +CustomTileOverlay.prototype.setOpacity = function (op) { + this.opacity = op; + + var tileCount = this.tiles.length; + for (var n = 0; n < tileCount; n++) { + this.setObjectOpacity(this.tiles[n]); + } +} + +CustomTileOverlay.prototype.setObjectOpacity = function (obj) { + if (this.opacity > 0) { + if (typeof (obj.style.filter) == 'string') { obj.style.filter = 'alpha(opacity:' + this.opacity + ')'; } + if (typeof (obj.style.KHTMLOpacity) == 'string') { obj.style.KHTMLOpacity = this.opacity / 100; } + if (typeof (obj.style.MozOpacity) == 'string') { obj.style.MozOpacity = this.opacity / 100; } + if (typeof (obj.style.opacity) == 'string') { obj.style.opacity = this.opacity / 100; } + } +} + +// End of https://github.com/gavinharriss/google-maps-v3-opacity-control/blob/master/CustomTileOverlay.js +""" + + args[ + "ext_draggable_object_js" + ] = """ +// Beginning of https://github.com/gavinharriss/google-maps-v3-opacity-control/blob/master/ExtDraggableObject.js + +/** + * @name ExtDraggableObject + * @version 1.0 + * @author Gabriel Schneider + * @copyright (c) 2009 Gabriel Schneider + * @fileoverview This sets up a given DOM element to be draggable + * around the page. + */ + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Sets up a DOM element to be draggable. The options available + * within {@link ExtDraggableObjectOptions} are: top, left, container, + * draggingCursor, draggableCursor, intervalX, intervalY, + * toleranceX, toleranceY, restrictX, and restrictY. + * @param {HTMLElement} src The element to make draggable + * @param {ExtDraggableObjectOptions} [opts] options + * @constructor + */ +function ExtDraggableObject(src, opt_drag) { + var me = this; + var event_ = (window["GEvent"]||google.maps.Event||google.maps.event); + var opt_drag_=opt_drag||{}; + var draggingCursor_ = opt_drag_.draggingCursor||"default"; + var draggableCursor_ = opt_drag_.draggableCursor||"default"; + var moving_ = false, preventDefault_; + var currentX_, currentY_, formerY_, formerX_, formerMouseX_, formerMouseY_; + var top_, left_; + var mouseDownEvent_, mouseUpEvent_, mouseMoveEvent_; + var originalX_, originalY_; + var halfIntervalX_ = Math.round(opt_drag_.intervalX/2); + var halfIntervalY_ = Math.round(opt_drag_.intervalY/2); + var target_ = src.setCapture?src:document; + + if (typeof opt_drag_.intervalX !== "number") { + opt_drag_.intervalX = 1; + } + if (typeof opt_drag_.intervalY !== "number") { + opt_drag_.intervalY = 1; + } + if (typeof opt_drag_.toleranceX !== "number") { + opt_drag_.toleranceX = Infinity; + } + if (typeof opt_drag_.toleranceY !== "number") { + opt_drag_.toleranceY = Infinity; + } + + mouseDownEvent_ = event_.addDomListener(src, "mousedown", mouseDown_); + mouseUpEvent_ = event_.addDomListener(target_, "mouseup", mouseUp_); + + setCursor_(false); + if (opt_drag_.container) { + + } + src.style.position = "absolute"; + opt_drag_.left = opt_drag_.left||src.offsetLeft; + opt_drag_.top = opt_drag_.top||src.offsetTop; + opt_drag_.interval = opt_drag_.interval||1; + moveTo_(opt_drag_.left, opt_drag_.top, false); + + /** + * Set the cursor for {@link src} based on whether or not + * the element is currently being dragged. + * @param {Boolean} a Is the element being dragged? + * @private + */ + function setCursor_(a) { + if(a) { + src.style.cursor = draggingCursor_; + } else { + src.style.cursor = draggableCursor_; + } + } + + /** + * Moves the element {@link src} to the given + * location. + * @param {Number} x The left position to move to. + * @param {Number} y The top position to move to. + * @param {Boolean} prevent Prevent moving? + * @private + */ + function moveTo_(x, y, prevent) { + var roundedIntervalX_, roundedIntervalY_; + left_ = Math.round(x); + top_ = Math.round(y); + if (opt_drag_.intervalX>1) { + roundedIntervalX_ = Math.round(left_%opt_drag_.intervalX); + left_ = (roundedIntervalX_1) { + roundedIntervalY_ = Math.round(top_%opt_drag_.intervalY); + top_ = (roundedIntervalY_opt_drag_.toleranceX||(currentX_-(left_+src.offsetWidth))>opt_drag_.toleranceX)||((top_-currentY_)>opt_drag_.toleranceY||(currentY_-(top_+src.offsetHeight))>opt_drag_.toleranceY)) { + left_ = originalX_; + top_ = originalY_; + } + } + if(!opt_drag_.restrictX&&!prevent) { + src.style.left = left_ + "px"; + } + if(!opt_drag_.restrictY&&!prevent) { + src.style.top = top_ + "px"; + } + } + + /** + * Handles the mousemove event. + * @param {event} ev The event data sent by the browser. + * @private + */ + function mouseMove_(ev) { + var e=ev||event; + currentX_ = formerX_+((e.pageX||(e.clientX+document.body.scrollLeft+document.documentElement.scrollLeft))-formerMouseX_); + currentY_ = formerY_+((e.pageY||(e.clientY+document.body.scrollTop+document.documentElement.scrollTop))-formerMouseY_); + formerX_ = currentX_; + formerY_ = currentY_; + formerMouseX_ = e.pageX||(e.clientX+document.body.scrollLeft+document.documentElement.scrollLeft); + formerMouseY_ = e.pageY||(e.clientY+document.body.scrollTop+document.documentElement.scrollTop); + if (moving_) { + moveTo_(currentX_,currentY_, preventDefault_); + event_.trigger(me, "drag", {mouseX: formerMouseX_, mouseY: formerMouseY_, startLeft: originalX_, startTop: originalY_, event:e}); + } + } + + /** + * Handles the mousedown event. + * @param {event} ev The event data sent by the browser. + * @private + */ + function mouseDown_(ev) { + var e=ev||event; + setCursor_(true); + event_.trigger(me, "mousedown", e); + if (src.style.position !== "absolute") { + src.style.position = "absolute"; + return; + } + formerMouseX_ = e.pageX||(e.clientX+document.body.scrollLeft+document.documentElement.scrollLeft); + formerMouseY_ = e.pageY||(e.clientY+document.body.scrollTop+document.documentElement.scrollTop); + originalX_ = src.offsetLeft; + originalY_ = src.offsetTop; + formerX_ = originalX_; + formerY_ = originalY_; + mouseMoveEvent_ = event_.addDomListener(target_, "mousemove", mouseMove_); + if (src.setCapture) { + src.setCapture(); + } + if (e.preventDefault) { + e.preventDefault(); + e.stopPropagation(); + } else { + e.cancelBubble=true; + e.returnValue=false; + } + moving_ = true; + event_.trigger(me, "dragstart", {mouseX: formerMouseX_, mouseY: formerMouseY_, startLeft: originalX_, startTop: originalY_, event:e}); + } + + /** + * Handles the mouseup event. + * @param {event} ev The event data sent by the browser. + * @private + */ + function mouseUp_(ev) { + var e=ev||event; + if (moving_) { + setCursor_(false); + event_.removeListener(mouseMoveEvent_); + if (src.releaseCapture) { + src.releaseCapture(); + } + moving_ = false; + event_.trigger(me, "dragend", {mouseX: formerMouseX_, mouseY: formerMouseY_, startLeft: originalX_, startTop: originalY_, event:e}); + } + currentX_ = currentY_ = null; + event_.trigger(me, "mouseup", e); + } + + /** + * Move the element {@link src} to the given location. + * @param {Point} point An object with an x and y property + * that represents the location to move to. + */ + me.moveTo = function(point) { + moveTo_(point.x, point.y, false); + }; + + /** + * Move the element {@link src} by the given amount. + * @param {Size} size An object with an x and y property + * that represents distance to move the element. + */ + me.moveBy = function(size) { + moveTo_(src.offsetLeft + size.width, src.offsetHeight + size.height, false); + } + + /** + * Sets the cursor for the dragging state. + * @param {String} cursor The name of the cursor to use. + */ + me.setDraggingCursor = function(cursor) { + draggingCursor_ = cursor; + setCursor_(moving_); + }; + + /** + * Sets the cursor for the draggable state. + * @param {String} cursor The name of the cursor to use. + */ + me.setDraggableCursor = function(cursor) { + draggableCursor_ = cursor; + setCursor_(moving_); + }; + + /** + * Returns the current left location. + * @return {Number} + */ + me.left = function() { + return left_; + }; + + /** + * Returns the current top location. + * @return {Number} + */ + me.top = function() { + return top_; + }; + + /** + * Returns the number of intervals the element has moved + * along the X axis. Useful for scrollbar type + * applications. + * @return {Number} + */ + me.valueX = function() { + var i = opt_drag_.intervalX||1; + return Math.round(left_ / i); + }; + + /** + * Returns the number of intervals the element has moved + * along the Y axis. Useful for scrollbar type + * applications. + * @return {Number} + */ + me.valueY = function() { + var i = opt_drag_.intervalY||1; + return Math.round(top_ / i); + }; + + /** + * Sets the left position of the draggable object based on + * intervalX. + * @param {Number} value The location to move to. + */ + me.setValueX = function(value) { + moveTo_(value * opt_drag_.intervalX, top_, false); + }; + + /** + * Sets the top position of the draggable object based on + * intervalY. + * @param {Number} value The location to move to. + */ + me.setValueY = function(value) { + moveTo_(left_, value * opt_drag_.intervalY, false); + }; + + /** + * Prevents the default movement behavior of the object. + * The object can still be moved by other methods. + */ + me.preventDefaultMovement = function(prevent) { + preventDefault_ = prevent; + }; +} + /** + * @name ExtDraggableObjectOptions + * @class This class represents the optional parameter passed into constructor of + * ExtDraggableObject. + * @property {Number} [top] Top pixel + * @property {Number} [left] Left pixel + * @property {HTMLElement} [container] HTMLElement as container. + * @property {String} [draggingCursor] Dragging Cursor + * @property {String} [draggableCursor] Draggable Cursor + * @property {Number} [intervalX] Interval in X direction + * @property {Number} [intervalY] Interval in Y direction + * @property {Number} [toleranceX] Tolerance X in pixel + * @property {Number} [toleranceY] Tolerance Y in pixel + * @property {Boolean} [restrictX] Whether to restrict move in X direction + * @property {Boolean} [restrictY] Whether to restrict move in Y direction + */ + + // End of https://github.com/gavinharriss/google-maps-v3-opacity-control/blob/master/ExtDraggableObject.js +""" + + s = ( + r""" - %(title)s + %(xml_escaped_title)s - + %(googlemapsurl_hint)s + - -
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC
- """ % args # noqa + """ + % args + ) # noqa + + # TODO? when there is self.kml, before the transition to GoogleMapsV3 API, + # we used to offer a way to display the KML file in Google Earth + # cf https://github.com/OSGeo/gdal/blob/32f32a69bbf5c408c6c8ac2cc6f1d915a7a1c576/swig/python/gdal-utils/osgeo_utils/gdal2tiles.py#L3203 to #L3243 return s - def generate_leaflet(self): + def generate_leaflet(self) -> str: """ Template for leaflet.html implementing overlay of tiles for 'mercator' profile. It returns filled string. Expected variables: - title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + title, north, south, east, west, minzoom, maxzoom, tile_size, tileformat, publishurl """ args = {} - args['title'] = self.options.title.replace('"', '\\"') - args['htmltitle'] = self.options.title - args['south'], args['west'], args['north'], args['east'] = self.swne - args['centerlon'] = (args['north'] + args['south']) / 2. - args['centerlat'] = (args['west'] + args['east']) / 2. - args['minzoom'] = self.tminz - args['maxzoom'] = self.tmaxz - args['beginzoom'] = self.tmaxz - args['tilesize'] = self.tilesize # not used - args['tileformat'] = self.tileext - args['publishurl'] = self.options.url # not used - args['copyright'] = self.options.copyright.replace('"', '\\"') - - s = """ + args["double_quote_escaped_title"] = self.options.title.replace( + '"', '\\"') + args["xml_escaped_title"] = gdal.EscapeString( + self.options.title, gdal.CPLES_XML + ) + args["south"], args["west"], args["north"], args["east"] = self.swne + args["centerlon"] = (args["north"] + args["south"]) / 2.0 + args["centerlat"] = (args["west"] + args["east"]) / 2.0 + args["minzoom"] = self.tminz + args["maxzoom"] = self.tmaxz + args["beginzoom"] = self.tmaxz + args["tile_size"] = self.tile_size # not used + args["tileformat"] = self.tileext + args["publishurl"] = self.options.url # not used + args["copyright"] = self.options.copyright.replace('"', '\\"') + + if self.options.xyz: + args["tms"] = 0 + else: + args["tms"] = 1 + + s = ( + """ - %(htmltitle)s + %(xml_escaped_title)s - - + + + + + + + + + +
Generated by GDAL2Tiles    
+
+
+ - """ % args + projection: 'EPSG:4326',""" + elif self.options.profile != "mercator": + if ( + self.in_srs + and self.in_srs.IsProjected() + and self.in_srs.GetAuthorityName(None) == "EPSG" + ): + s += """ + projection: new ol.proj.Projection({code: 'EPSG:%s', units:'m'}),""" % self.in_srs.GetAuthorityCode( + None + ) s += """ - - + +""" - map.addLayers([gmap, gsat, ghyb, gter, - broad, baer, bhyb, - osm, tmsoverlay]); + return s - var switcherControl = new OpenLayers.Control.LayerSwitcher(); - map.addControl(switcherControl); - switcherControl.maximizeControl(); + def generate_mapml(self) -> str: - map.zoomToExtent(mapBounds.transform(map.displayProjection, map.projection)); - """ % args # noqa + if self.options.mapml_template: + template = self.options.mapml_template + else: + template = gdal.FindFile("gdal", "template_tiles.mapml") + s = open(template, "rb").read().decode("utf-8") - elif self.options.profile == 'geodetic': - s += """ - var options = { - div: "map", - controls: [], - projection: "EPSG:4326" - }; - map = new OpenLayers.Map(options); - - var wms = new OpenLayers.Layer.WMS("VMap0", - "http://tilecache.osgeo.org/wms-c/Basic.py?", - { - layers: 'basic', - format: 'image/png' - } - ); - var tmsoverlay = new OpenLayers.Layer.TMS("TMS Overlay", "", - { - serviceVersion: '.', - layername: '.', - alpha: true, - type: '%(tileformat)s', - isBaseLayer: false, - getURL: getURL - }); - if (OpenLayers.Util.alphaHack() == false) { - tmsoverlay.setOpacity(0.7); - } + if self.options.profile == "mercator": + tiling_scheme = "OSMTILE" + elif self.options.profile == "geodetic": + tiling_scheme = "WGS84" + else: + tiling_scheme = self.options.profile + + s = s.replace("${TILING_SCHEME}", tiling_scheme) + s = s.replace("${URL}", self.options.url if self.options.url else "./") + tminx, tminy, tmaxx, tmaxy = self.tminmax[self.tmaxz] + s = s.replace("${MINTILEX}", str(tminx)) + s = s.replace( + "${MINTILEY}", str(GDAL2Tiles.getYTile( + tmaxy, self.tmaxz, self.options)) + ) + s = s.replace("${MAXTILEX}", str(tmaxx)) + s = s.replace( + "${MAXTILEY}", str(GDAL2Tiles.getYTile( + tminy, self.tmaxz, self.options)) + ) + s = s.replace("${CURZOOM}", str(self.tmaxz)) + s = s.replace("${MINZOOM}", str(self.tminz)) + s = s.replace("${MAXZOOM}", str(self.tmaxz)) + s = s.replace("${TILEEXT}", str(self.tileext)) - map.addLayers([wms,tmsoverlay]); + return s - var switcherControl = new OpenLayers.Control.LayerSwitcher(); - map.addControl(switcherControl); - switcherControl.maximizeControl(); + @staticmethod + def getYTile(ty, tz, options): + """ + Calculates the y-tile number based on whether XYZ or TMS (default) system is used + :param ty: The y-tile number + :param tz: The z-tile number + :return: The transformed y-tile number + """ + if options.xyz and options.profile != "raster": + if options.profile in ("mercator", "geodetic"): + # Convert from TMS to XYZ numbering system + return (2**tz - 1) - ty - map.zoomToExtent(mapBounds); - """ % args # noqa + tms = tmsMap[options.profile] + return ( + tms.matrix_height * 2**tz - 1 + ) - ty # Convert from TMS to XYZ numbering system + + return ty + + +def worker_tile_details( + input_file: str, output_folder: str, options: Options +) -> Tuple[TileJobInfo, List[TileDetail]]: + gdal2tiles = GDAL2Tiles(input_file, output_folder, options) + gdal2tiles.open_input() + gdal2tiles.generate_metadata() + tile_job_info, tile_details = gdal2tiles.generate_base_tiles() + return tile_job_info, tile_details + + +class ProgressBar(object): + def __init__(self, total_items: int) -> None: + self.total_items = total_items + self.nb_items_done = 0 + self.current_progress = 0 + self.STEP = 2.5 + + def start(self) -> None: + sys.stdout.write("0") + + def log_progress(self, nb_items: int = 1) -> None: + self.nb_items_done += nb_items + progress = float(self.nb_items_done) / self.total_items * 100 + if progress >= self.current_progress + self.STEP: + done = False + while not done: + if self.current_progress + self.STEP <= progress: + self.current_progress += self.STEP + if self.current_progress % 10 == 0: + sys.stdout.write(str(int(self.current_progress))) + if self.current_progress == 100: + sys.stdout.write("\n") + else: + sys.stdout.write(".") + else: + done = True + sys.stdout.flush() + + def done(self) -> None: + sys.stdout.write("\r0...10...20...30...40...50...60...70...80...90...100\nDone!") + sys.stdout.flush() + + +def get_tile_swne(tile_job_info, options): + if options.profile == "mercator": + mercator = GlobalMercator() + tile_swne = mercator.TileLatLonBounds + elif options.profile == "geodetic": + geodetic = GlobalGeodetic(options.tmscompatible) + tile_swne = geodetic.TileLatLonBounds + elif options.profile == "raster": + srs4326 = osr.SpatialReference() + srs4326.ImportFromEPSG(4326) + srs4326.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) + if tile_job_info.kml and tile_job_info.in_srs_wkt: + in_srs = osr.SpatialReference() + in_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) + in_srs.ImportFromWkt(tile_job_info.in_srs_wkt) + ct = osr.CoordinateTransformation(in_srs, srs4326) + + def rastertileswne(x, y, z): + pixelsizex = ( + 2 ** (tile_job_info.tmaxz - z) * + tile_job_info.out_geo_trans[1] + ) + west = ( + tile_job_info.out_geo_trans[0] + + x * tile_job_info.tile_size * pixelsizex + ) + east = west + tile_job_info.tile_size * pixelsizex + if options.xyz: + north = ( + tile_job_info.out_geo_trans[3] + - y * tile_job_info.tile_size * pixelsizex + ) + south = north - tile_job_info.tile_size * pixelsizex + else: + south = ( + tile_job_info.ominy + y * tile_job_info.tile_size * pixelsizex + ) + north = south + tile_job_info.tile_size * pixelsizex + if not tile_job_info.is_epsg_4326: + # Transformation to EPSG:4326 (WGS84 datum) + west, south = ct.TransformPoint(west, south)[:2] + east, north = ct.TransformPoint(east, north)[:2] + return south, west, north, east + + tile_swne = rastertileswne + else: + def tile_swne(x, y, z): return (0, 0, 0, 0) # noqa + else: + tile_swne = None - elif self.options.profile == 'raster': - s += """ - var options = { - div: "map", - controls: [], - maxExtent: new OpenLayers.Bounds(%(west)s, %(south)s, %(east)s, %(north)s), - maxResolution: %(rastermaxresolution)f, - numZoomLevels: %(rasterzoomlevels)d - }; - map = new OpenLayers.Map(options); - - var layer = new OpenLayers.Layer.TMS("TMS Layer", "", - { - serviceVersion: '.', - layername: '.', - alpha: true, - type: '%(tileformat)s', - getURL: getURL - }); - - map.addLayer(layer); - map.zoomToExtent(mapBounds); - """ % args # noqa + return tile_swne - s += """ - map.addControls([new OpenLayers.Control.PanZoomBar(), - new OpenLayers.Control.Navigation(), - new OpenLayers.Control.MousePosition(), - new OpenLayers.Control.ArgParser(), - new OpenLayers.Control.Attribution()]); - } - """ % args - - if self.options.profile == 'mercator': - s += """ - function getURL(bounds) { - bounds = this.adjustBounds(bounds); - var res = this.getServerResolution(); - var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w)); - var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h)); - var z = this.getServerZoom(); - if (this.map.baseLayer.CLASS_NAME === 'OpenLayers.Layer.Bing') { - z+=1; - } - var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type; - var url = this.url; - if (OpenLayers.Util.isArray(url)) { - url = this.selectUrl(path, url); - } - if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) { - return url + path; - } else { - return emptyTileURL; - } - } - """ % args # noqa - elif self.options.profile == 'geodetic': - s += """ - function getURL(bounds) { - bounds = this.adjustBounds(bounds); - var res = this.getServerResolution(); - var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w)); - var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h)); - var z = this.getServerZoom()%(tmsoffset)s; - var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type; - var url = this.url; - if (OpenLayers.Util.isArray(url)) { - url = this.selectUrl(path, url); - } - if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) { - return url + path; - } else { - return emptyTileURL; - } - } - """ % args # noqa +def single_threaded_tiling( + input_file: str, output_folder: str, options: Options +) -> None: + """ + Keep a single threaded version that stays clear of multiprocessing, for platforms that would not + support it + """ + if options.verbose: + print("Begin tiles details calc") + conf, tile_details = worker_tile_details( + input_file, output_folder, options) - elif self.options.profile == 'raster': - s += """ - function getURL(bounds) { - bounds = this.adjustBounds(bounds); - var res = this.getServerResolution(); - var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w)); - var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h)); - var z = this.getServerZoom(); - var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type; - var url = this.url; - if (OpenLayers.Util.isArray(url)) { - url = this.selectUrl(path, url); - } - if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) { - return url + path; - } else { - return emptyTileURL; - } - } - """ % args # noqa + if options.verbose: + print("Tiles details calc complete.") - s += """ - function getWindowHeight() { - if (self.innerHeight) return self.innerHeight; - if (document.documentElement && document.documentElement.clientHeight) - return document.documentElement.clientHeight; - if (document.body) return document.body.clientHeight; - return 0; - } + if not options.verbose and not options.quiet: + base_progress_bar = ProgressBar(len(tile_details)) + base_progress_bar.start() - function getWindowWidth() { - if (self.innerWidth) return self.innerWidth; - if (document.documentElement && document.documentElement.clientWidth) - return document.documentElement.clientWidth; - if (document.body) return document.body.clientWidth; - return 0; - } + for tile_detail in tile_details: + create_base_tile(conf, tile_detail) - function resize() { - var map = document.getElementById("map"); - var header = document.getElementById("header"); - var subheader = document.getElementById("subheader"); - map.style.height = (getWindowHeight()-80) + "px"; - map.style.width = (getWindowWidth()-20) + "px"; - header.style.width = (getWindowWidth()-20) + "px"; - subheader.style.width = (getWindowWidth()-20) + "px"; - if (map.updateSize) { map.updateSize(); }; - } + if not options.verbose and not options.quiet: + base_progress_bar.log_progress() - onresize=function(){ resize(); }; + if getattr(threadLocal, "cached_ds", None): + del threadLocal.cached_ds - - - - -
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC - -
-
- - - """ % args # noqa + if not options.quiet: + count = count_overview_tiles(conf) + if count: + print("Generating Overview Tiles:") - return s + if not options.verbose: + overview_progress_bar = ProgressBar(count) + overview_progress_bar.start() + for base_tz in range(conf.tmaxz, conf.tminz, -1): + base_tile_groups = group_overview_base_tiles( + base_tz, output_folder, conf) + for base_tiles in base_tile_groups: + create_overview_tile(base_tz, base_tiles, + output_folder, conf, options) + if not options.verbose and not options.quiet: + overview_progress_bar.log_progress() + + shutil.rmtree(os.path.dirname(conf.src_file)) + + if not options.verbose and not options.quiet: + overview_progress_bar.done() + + +def multi_threaded_tiling( + input_file: str, output_folder: str, options: Options, pool +) -> None: + nb_processes = options.nb_processes or 1 + + if options.verbose: + print("Begin tiles details calc") + + conf, tile_details = worker_tile_details( + input_file, output_folder, options) + + if options.verbose: + print("Tiles details calc complete.") + + if not options.verbose and not options.quiet: + base_progress_bar = ProgressBar(len(tile_details)) + base_progress_bar.start() + + # TODO: gbataille - check the confs for which each element is an array... one useless level? + # TODO: gbataille - assign an ID to each job for print in verbose mode "ReadRaster Extent ..." + chunksize = max(1, min(128, len(tile_details) // nb_processes)) + for _ in pool.imap_unordered( + partial(create_base_tile, conf), tile_details, chunksize=chunksize + ): + if not options.verbose and not options.quiet: + base_progress_bar.log_progress() + + if not options.quiet: + count = count_overview_tiles(conf) + if count: + print("Generating Overview Tiles:") + + if not options.verbose: + overview_progress_bar = ProgressBar(count) + overview_progress_bar.start() + + for base_tz in range(conf.tmaxz, conf.tminz, -1): + base_tile_groups = group_overview_base_tiles( + base_tz, output_folder, conf) + chunksize = max(1, min(128, len(base_tile_groups) // nb_processes)) + for _ in pool.imap_unordered( + partial( + create_overview_tile, + base_tz, + output_folder=output_folder, + tile_job_info=conf, + options=options, + ), + base_tile_groups, + chunksize=chunksize, + ): + if not options.verbose and not options.quiet: + overview_progress_bar.log_progress() + + shutil.rmtree(os.path.dirname(conf.src_file)) + + +class UseExceptions(object): + def __enter__(self): + self.old_used_exceptions = gdal.GetUseExceptions() + if not self.old_used_exceptions: + gdal.UseExceptions() + + def __exit__(self, type, value, tb): + if not self.old_used_exceptions: + gdal.DontUseExceptions() + + +class DividedCache(object): + def __init__(self, nb_processes): + self.nb_processes = nb_processes + + def __enter__(self): + self.gdal_cache_max = gdal.GetCacheMax() + # Make sure that all processes do not consume more than `gdal.GetCacheMax()` + gdal_cache_max_per_process = max( + 1024 * 1024, math.floor(self.gdal_cache_max / self.nb_processes) + ) + set_cache_max(gdal_cache_max_per_process) + + def __exit__(self, type, value, tb): + # Set the maximum cache back to the original value + set_cache_max(self.gdal_cache_max) + + +def main(argv: List[str] = sys.argv) -> int: + # TODO: gbataille - use mkdtemp to work in a temp directory + # TODO: gbataille - debug intermediate tiles.vrt not produced anymore? + # TODO: gbataille - Refactor generate overview tiles to not depend on self variables + + # For multiprocessing, we need to propagate the configuration options to + # the environment, so that forked processes can inherit them. + for i in range(len(argv)): + if argv[i] == "--config" and i + 2 < len(argv): + os.environ[argv[i + 1]] = argv[i + 2] + + if "--mpi" in argv: + from mpi4py import MPI + from mpi4py.futures import MPICommExecutor + + with UseExceptions(), MPICommExecutor(MPI.COMM_WORLD, root=0) as pool: + if pool is None: + return 0 + # add interface of multiprocessing.Pool to MPICommExecutor + pool.imap_unordered = partial(pool.map, unordered=True) + return submain(argv, pool, MPI.COMM_WORLD.Get_size()) + else: + return submain(argv) + + +def submain(argv: List[str], pool=None, pool_size=0) -> int: + argv = gdal.GeneralCmdLineProcessor(argv) + if argv is None: + return 0 + input_file, output_folder, options = process_args(argv[1:]) + if pool_size: + options.nb_processes = pool_size + nb_processes = options.nb_processes or 1 -def main(): - argv = gdal.GeneralCmdLineProcessor(sys.argv) - if argv: - gdal2tiles = GDAL2Tiles(argv[1:]) - gdal2tiles.process() + with UseExceptions(): + if pool is not None: # MPI + multi_threaded_tiling(input_file, output_folder, options, pool) + elif nb_processes == 1: + single_threaded_tiling(input_file, output_folder, options) + else: + # Trick inspired from https://stackoverflow.com/questions/45720153/python-multiprocessing-error-attributeerror-module-main-has-no-attribute + # and https://bugs.python.org/issue42949 + import __main__ + + if not hasattr(__main__, "__spec__"): + __main__.__spec__ = None + from multiprocessing import Pool + with DividedCache(nb_processes), Pool(processes=nb_processes) as pool: + multi_threaded_tiling(input_file, output_folder, options, pool) + + return 0 -if __name__ == '__main__': - main() # vim: set tabstop=4 shiftwidth=4 expandtab: + +# Running main() must be protected that way due to use of multiprocessing on Windows: +# https://docs.python.org/3/library/multiprocessing.html#the-spawn-and-forkserver-start-methods +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/auxiliary/gdal2customtiles/legacy/gdal2customtiles.py b/auxiliary/gdal2customtiles/legacy/gdal2customtiles.py new file mode 100644 index 000000000..f698bebb7 --- /dev/null +++ b/auxiliary/gdal2customtiles/legacy/gdal2customtiles.py @@ -0,0 +1,3218 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# ****************************************************************************** +# $Id$ +# +# Project: Google Summer of Code 2007, 2008 (http://code.google.com/soc/) +# Support: BRGM (http://www.brgm.fr) +# Purpose: Convert a raster into TMS (Tile Map Service) tiles in a directory. +# - generate Google Earth metadata (KML SuperOverlay) +# - generate simple HTML viewer based on Google Maps and OpenLayers +# - support of global tiles (Spherical Mercator) for compatibility +# with interactive web maps a la Google Maps +# Author: Klokan Petr Pridal, klokan at klokan dot cz +# Web: http://www.klokan.cz/projects/gdal2tiles/ +# GUI: http://www.maptiler.org/ +# +############################################################################### +# Copyright (c) 2008, Klokan Petr Pridal +# Copyright (c) 2010-2013, Even Rouault +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +# ****************************************************************************** + +import math +import os +import sys + +from osgeo import gdal +from osgeo import osr + +import struct # 1bto4b + + +def binary(num): # 1bto4b + # 1bto4b + return ''.join(bin(c).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num)) + +# 1bto4b + +def getTilePxBounds(self, tx, ty, tz, ds): + + querysize = self.tilesize + + if self.isRasterBounded: # 'raster' profile: + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.fWorldXSize + ysize = self.out_ds.fWorldYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld + #print("rx", rx) + rxsize = 0 + rxsize = tsize + + rysize = 0 + rysize = tsize + + ry = ysize - (ty * tsize) - rysize - \ + self.out_ds.fRasterYOriginWorld + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if rx < 0: + rxsize = tsize + rx + wx = -rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + rx = 0 + if ry < 0: + rysize = tsize + ry + wy = -ry + wysize = int(rysize/float(tsize) * self.tilesize) + ry = 0 + if rx + rxsize > self.out_ds.fRasterXSizeWorld: + rxsize = self.out_ds.fRasterXSizeWorld - rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + if ry + rysize > self.out_ds.fRasterYSizeWorld: + rysize = self.out_ds.fRasterYSizeWorld - ry + wysize = int(rysize/float(tsize) * self.tilesize) + + # Convert rx, ry back to non-world coordinates + rx = int(float(self.out_ds.RasterXSize) * + (float(rx) / self.out_ds.fRasterXSizeWorld)) + ry = int(float(self.out_ds.RasterYSize) * + (float(ry) / self.out_ds.fRasterYSizeWorld)) + rxsize = int(float(self.out_ds.RasterXSize) * + (float(rxsize) / self.out_ds.fRasterXSizeWorld)) + rysize = int(float(self.out_ds.RasterYSize) * + (float(rysize) / self.out_ds.fRasterYSizeWorld)) + else: + b = self.mercator.TileBounds(tx, ty, tz) + rb, wb = self.geo_query( + ds, b[0], b[3], b[2], b[1], querysize=querysize) + rx, ry, rxsize, rysize = rb + wx, wy, wxsize, wysize = wb + + return [rx, ry, rxsize, rysize, wxsize, wysize] + + +try: + from PIL import Image + import numpy + import osgeo.gdal_array as gdalarray +except Exception: + # 'antialias' resampling is not available + pass + +__version__ = "$Id$" + +resampling_list = ('average', 'near', 'bilinear', 'cubic', + 'cubicspline', 'lanczos', 'antialias') +profile_list = ('mercator', 'geodetic', 'raster') +webviewer_list = ('all', 'google', 'openlayers', 'leaflet', 'none') + +# ============================================================================= +# ============================================================================= +# ============================================================================= + +__doc__globalmaptiles = """ +globalmaptiles.py + +Global Map Tiles as defined in Tile Map Service (TMS) Profiles +============================================================== + +Functions necessary for generation of global tiles used on the web. +It contains classes implementing coordinate conversions for: + + - GlobalMercator (based on EPSG:3857) + for Google Maps, Yahoo Maps, Bing Maps compatible tiles + - GlobalGeodetic (based on EPSG:4326) + for OpenLayers Base Map and Google Earth compatible tiles + +More info at: + +http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification +http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation +http://msdn.microsoft.com/en-us/library/bb259689.aspx +http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates + +Created by Klokan Petr Pridal on 2008-07-03. +Google Summer of Code 2008, project GDAL2Tiles for OSGEO. + +In case you use this class in your product, translate it to another language +or find it useful for your project please let me know. +My email: klokan at klokan dot cz. +I would like to know where it was used. + +Class is available under the open-source GDAL license (www.gdal.org). +""" + +MAXZOOMLEVEL = 32 + + +class GlobalMercator(object): + r""" + TMS Global Mercator Profile + --------------------------- + + Functions necessary for generation of tiles in Spherical Mercator projection, + EPSG:3857. + + Such tiles are compatible with Google Maps, Bing Maps, Yahoo Maps, + UK Ordnance Survey OpenSpace API, ... + and you can overlay them on top of base maps of those web mapping applications. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Mercator tiles:: + + LatLon <-> Meters <-> Pixels <-> Tile + + WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid + lat/lon XY in meters XY pixels Z zoom XYZ from TMS + EPSG:4326 EPSG:387 + .----. --------- -- TMS + / \ <-> | | <-> /----/ <-> Google + \ / | | /--------/ QuadTree + ----- --------- /------------/ + KML, public WebMapService Web Clients TileMapService + + What is the coordinate extent of Earth in EPSG:3857? + + [-20037508.342789244, -20037508.342789244, + 20037508.342789244, 20037508.342789244] + Constant 20037508.342789244 comes from the circumference of the Earth in meters, + which is 40 thousand kilometers, the coordinate origin is in the middle of extent. + In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0 + $ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:3857 + Polar areas with abs(latitude) bigger then 85.05112878 are clipped off. + + What are zoom level constants (pixels/meter) for pyramid with EPSG:3857? + + whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile, + every lower zoom level resolution is always divided by two + initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062 + + What is the difference between TMS and Google Maps/QuadTree tile name convention? + + The tile raster itself is the same (equal extent, projection, pixel size), + there is just different identification of the same raster tile. + Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ. + Google placed the origin [0,0] to the top-left corner, reference is XYZ. + Microsoft is referencing tiles by a QuadTree name, defined on the website: + http://msdn2.microsoft.com/en-us/library/bb259689.aspx + + The lat/lon coordinates are using WGS84 datum, yes? + + Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum. + Well, the web clients like Google Maps are projecting those coordinates by + Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if + the were on the WGS84 ellipsoid. + + From MSDN documentation: + To simplify the calculations, we use the spherical form of projection, not + the ellipsoidal form. Since the projection is used only for map display, + and not for displaying numeric coordinates, we don't need the extra precision + of an ellipsoidal projection. The spherical projection causes approximately + 0.33 percent scale distortion in the Y direction, which is not visually + noticeable. + + How do I create a raster in EPSG:3857 and convert coordinates with PROJ.4? + + You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform. + All of the tools supports -t_srs 'epsg:3857'. + + For other GIS programs check the exact definition of the projection: + More info at http://spatialreference.org/ref/user/google-projection/ + The same projection is designated as EPSG:3857. WKT definition is in the + official EPSG database. + + Proj4 Text: + +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 + +k=1.0 +units=m +nadgrids=@null +no_defs + + Human readable WKT format of EPSG:3857: + PROJCS["Google Maps Global Mercator", + GEOGCS["WGS 84", + DATUM["WGS_1984", + SPHEROID["WGS 84",6378137,298.257223563, + AUTHORITY["EPSG","7030"]], + AUTHORITY["EPSG","6326"]], + PRIMEM["Greenwich",0], + UNIT["degree",0.0174532925199433], + AUTHORITY["EPSG","4326"]], + PROJECTION["Mercator_1SP"], + PARAMETER["central_meridian",0], + PARAMETER["scale_factor",1], + PARAMETER["false_easting",0], + PARAMETER["false_northing",0], + UNIT["metre",1, + AUTHORITY["EPSG","9001"]]] + """ + + def __init__(self, tileSize=256): + "Initialize the TMS Global Mercator pyramid" + self.tileSize = tileSize + self.initialResolution = 2 * math.pi * 6378137 / self.tileSize + # 156543.03392804062 for tileSize 256 pixels + self.originShift = 2 * math.pi * 6378137 / 2.0 + # 20037508.342789244 + + def LatLonToMeters(self, lat, lon): + "Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:3857" + + mx = lon * self.originShift / 180.0 + my = math.log(math.tan((90 + lat) * math.pi / 360.0)) / \ + (math.pi / 180.0) + + my = my * self.originShift / 180.0 + return mx, my + + def MetersToLatLon(self, mx, my): + "Converts XY point from Spherical Mercator EPSG:3857 to lat/lon in WGS84 Datum" + + lon = (mx / self.originShift) * 180.0 + lat = (my / self.originShift) * 180.0 + + lat = 180 / math.pi * \ + (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) + return lat, lon + + def PixelsToMeters(self, px, py, zoom): + "Converts pixel coordinates in given zoom level of pyramid to EPSG:3857" + + res = self.Resolution(zoom) + mx = px * res - self.originShift + my = py * res - self.originShift + return mx, my + + def MetersToPixels(self, mx, my, zoom): + "Converts EPSG:3857 to pyramid pixel coordinates in given zoom level" + + res = self.Resolution(zoom) + px = (mx + self.originShift) / res + py = (my + self.originShift) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns a tile covering region in given pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def PixelsToRaster(self, px, py, zoom): + "Move the origin of pixel coordinates to top-left corner" + + mapSize = self.tileSize << zoom + return px, mapSize - py + + def MetersToTile(self, mx, my, zoom): + "Returns tile for given mercator coordinates" + + px, py = self.MetersToPixels(mx, my, zoom) + return self.PixelsToTile(px, py) + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in EPSG:3857 coordinates" + + minx, miny = self.PixelsToMeters( + tx*self.tileSize, ty*self.tileSize, zoom) + maxx, maxy = self.PixelsToMeters( + (tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom) + return (minx, miny, maxx, maxy) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in latitude/longitude using WGS84 datum" + + bounds = self.TileBounds(tx, ty, zoom) + minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1]) + maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3]) + + return (minLat, minLon, maxLat, maxLon) + + def Resolution(self, zoom): + "Resolution (meters/pixel) for given zoom level (measured at Equator)" + + # return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom) + return self.initialResolution / (2**zoom) + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != -1: + return i-1 + else: + return 0 # We don't want to scale up + + def GoogleTile(self, tx, ty, zoom): + "Converts TMS tile coordinates to Google Tile coordinates" + + # coordinate origin is moved from bottom-left to top-left corner of the extent + return tx, (2**zoom - 1) - ty + + def QuadTree(self, tx, ty, zoom): + "Converts TMS tile coordinates to Microsoft QuadTree" + + quadKey = "" + ty = (2**zoom - 1) - ty + for i in range(zoom, 0, -1): + digit = 0 + mask = 1 << (i-1) + if (tx & mask) != 0: + digit += 1 + if (ty & mask) != 0: + digit += 2 + quadKey += str(digit) + + return quadKey + + +class GlobalGeodetic(object): + r""" + TMS Global Geodetic Profile + --------------------------- + + Functions necessary for generation of global tiles in Plate Carre projection, + EPSG:4326, "unprojected profile". + + Such tiles are compatible with Google Earth (as any other EPSG:4326 rasters) + and you can overlay the tiles on top of OpenLayers base map. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Geodetic tiles? + + Global Geodetic tiles are using geodetic coordinates (latitude,longitude) + directly as planar coordinates XY (it is also called Unprojected or Plate + Carre). We need only scaling to pixel pyramid and cutting to tiles. + Pyramid has on top level two tiles, so it is not square but rectangle. + Area [-180,-90,180,90] is scaled to 512x256 pixels. + TMS has coordinate origin (for pixels and tiles) in bottom-left corner. + Rasters are in EPSG:4326 and therefore are compatible with Google Earth. + + LatLon <-> Pixels <-> Tiles + + WGS84 coordinates Pixels in pyramid Tiles in pyramid + lat/lon XY pixels Z zoom XYZ from TMS + EPSG:4326 + .----. ---- + / \ <-> /--------/ <-> TMS + \ / /--------------/ + ----- /--------------------/ + WMS, KML Web Clients, Google Earth TileMapService + """ + + def __init__(self, tmscompatible, tileSize=256): + self.tileSize = tileSize + if tmscompatible is not None: + # Defaults the resolution factor to 0.703125 (2 tiles @ level 0) + # Adhers to OSGeo TMS spec + # http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification#global-geodetic + self.resFact = 180.0 / self.tileSize + else: + # Defaults the resolution factor to 1.40625 (1 tile @ level 0) + # Adheres OpenLayers, MapProxy, etc default resolution for WMTS + self.resFact = 360.0 / self.tileSize + + def LonLatToPixels(self, lon, lat, zoom): + "Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid" + + res = self.resFact / 2**zoom + px = (180 + lon) / res + py = (90 + lat) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns coordinates of the tile covering region in pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def LonLatToTile(self, lon, lat, zoom): + "Returns the tile for zoom which covers given lon/lat coordinates" + + px, py = self.LonLatToPixels(lon, lat, zoom) + return self.PixelsToTile(px, py) + + def Resolution(self, zoom): + "Resolution (arc/pixel) for given zoom level (measured at Equator)" + + return self.resFact / 2**zoom + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != 0: + return i-1 + else: + return 0 # We don't want to scale up + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile" + res = self.resFact / 2**zoom + return ( + tx*self.tileSize*res - 180, + ty*self.tileSize*res - 90, + (tx+1)*self.tileSize*res - 180, + (ty+1)*self.tileSize*res - 90 + ) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in the SWNE form" + b = self.TileBounds(tx, ty, zoom) + return (b[1], b[0], b[3], b[2]) + + +class Zoomify(object): + """ + Tiles compatible with the Zoomify viewer + ---------------------------------------- + """ + + def __init__(self, width, height, tilesize=256, tileformat='jpg'): + """Initialization of the Zoomify tile tree""" + + self.tilesize = tilesize + self.tileformat = tileformat + imagesize = (width, height) + tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize)) + + # Size (in tiles) for each tier of pyramid. + self.tierSizeInTiles = [] + self.tierSizeInTiles.append(tiles) + + # Image size in pixels for each pyramid tierself + self.tierImageSize = [] + self.tierImageSize.append(imagesize) + + while (imagesize[0] > tilesize or imagesize[1] > tilesize): + imagesize = (math.floor( + imagesize[0] / 2), math.floor(imagesize[1] / 2)) + tiles = (math.ceil(imagesize[0] / tilesize), + math.ceil(imagesize[1] / tilesize)) + self.tierSizeInTiles.append(tiles) + self.tierImageSize.append(imagesize) + + self.tierSizeInTiles.reverse() + self.tierImageSize.reverse() + + # Depth of the Zoomify pyramid, number of tiers (zoom levels) + self.numberOfTiers = len(self.tierSizeInTiles) + + # Number of tiles up to the given tier of pyramid. + self.tileCountUpToTier = [] + self.tileCountUpToTier[0] = 0 + for i in range(1, self.numberOfTiers+1): + self.tileCountUpToTier.append( + self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] + + self.tileCountUpToTier[i-1] + ) + + def tilefilename(self, x, y, z): + """Returns filename for tile with given coordinates""" + + tileIndex = x + y * \ + self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z] + return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256), + "%s-%s-%s.%s" % (z, x, y, self.tileformat)) + + +class Gdal2TilesError(Exception): + pass + + +class GDAL2Tiles(object): + + def process(self): + """The main processing function, runs all the main steps of processing""" + + # Opening and preprocessing of the input file + self.open_input() + + # Generation of main metadata files and HTML viewers + self.generate_metadata() + + # 1bto4b + if self.isDEMtile: + for z in range(self.tminz, self.tmaxz + int(abs(math.log(self.tilesize, 2) - 8))): # 1bto4b + self.generate_base_tiles(z) + print(' Zoom ' + str(z) + ' tiles done!') + else: + # Generation of the lowest tiles + self.generate_base_tiles(self.tmaxz) + + # Generation of the overview tiles (higher in the pyramid) + self.generate_overview_tiles() + + def error(self, msg, details=""): + """Print an error message and stop the processing""" + if details: + self.parser.error(msg + "\n\n" + details) + else: + self.parser.error(msg) + + def progressbar(self, complete=0.0): + """Print progressbar for float value 0..1""" + gdal.TermProgress_nocb(complete) + + def gettempfilename(self, suffix): + """Returns a temporary filename""" + if '_' in os.environ: + # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly) + if os.environ['_'].find('wine') >= 0: + tmpdir = '.' + if 'TMP' in os.environ: + tmpdir = os.environ['TMP'] + import time + import random + random.seed(time.time()) + random_part = 'file%d' % random.randint(0, 1000000000) + return os.path.join(tmpdir, random_part + suffix) + + import tempfile + return tempfile.mktemp(suffix) + + def stop(self): + """Stop the rendering immediately""" + self.stopped = True + + def __init__(self, arguments): + """Constructor function - initialization""" + self.out_drv = None + self.mem_drv = None + self.in_ds = None + self.out_ds = None + self.out_srs = None + self.nativezoom = None + self.tminmax = None + self.tsize = None + self.mercator = None + self.geodetic = None + self.alphaband = None + self.dataBandsCount = None + self.out_gt = None + self.tileswne = None + self.swne = None + self.ominx = None + self.omaxx = None + self.omaxy = None + self.ominy = None + + # MMGIS + self.isRasterBounded = False + + # 1bto4b + self.isDEMtile = False + + # MMGIS + self.fminx = None + self.fmaxx = None + self.fminy = None + self.fmaxy = None + self.fPixelSize = None + + self.stopped = False + self.input = None + self.output = None + + # Tile format + self.tilesize = 256 + self.tiledriver = 'PNG' + self.tileext = 'png' + + # Should we read bigger window of the input raster and scale it down? + # Note: Modified later by open_input() + # Not for 'near' resampling + # Not for Wavelet based drivers (JPEG2000, ECW, MrSID) + # Not for 'raster' profile + self.scaledquery = True + # How big should be query window be for scaling down + # Later on reset according the chosen resampling algorightm + self.querysize = 4 * self.tilesize + + # Should we use Read on the input file for generating overview tiles? + # Note: Modified later by open_input() + # Otherwise the overview tiles are generated from existing underlying tiles + self.overviewquery = False + + # RUN THE ARGUMENT PARSER: + + self.optparse_init() + self.options, self.args = self.parser.parse_args(args=arguments) + if not self.args: + self.error("No input file specified") + + # POSTPROCESSING OF PARSED ARGUMENTS: + + # Workaround for old versions of GDAL + try: + if ((self.options.verbose and self.options.resampling == 'near') or + gdal.TermProgress_nocb): + pass + except Exception: + self.error( + "This version of GDAL is not supported. Please upgrade to 1.6+.") + + # Is output directory the last argument? + + # Test output directory, if it doesn't exist + if (os.path.isdir(self.args[-1]) or + (len(self.args) > 1 and not os.path.exists(self.args[-1]))): + self.output = self.args[-1] + self.args = self.args[:-1] + + # More files on the input not directly supported yet + + if (len(self.args) > 1): + self.error("Processing of several input files is not supported.", + "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the " + "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(self.args)) + + self.input = self.args[0] + + # MMGIS + if self.options.extentworld: + extentworld = self.options.extentworld.split(",") + self.isRasterBounded = True + self.fminx = float(extentworld[0]) + self.fmaxx = float(extentworld[2]) + self.fminy = float(extentworld[3]) + self.fmaxy = float(extentworld[1]) + self.fPixelSize = float(extentworld[4]) + + # 1bto4b + if self.options.isDEMtile: + self.isDEMtile = True + self.tilesize = 32 + self.querysize = 4 * self.tilesize + + # Default values for not given options + + if not self.output: + # Directory with input filename without extension in actual directory + self.output = os.path.splitext(os.path.basename(self.input))[0] + + if not self.options.title: + self.options.title = os.path.basename(self.input) + + if self.options.url and not self.options.url.endswith('/'): + self.options.url += '/' + if self.options.url: + self.options.url += os.path.basename(self.output) + '/' + + # Supported options + + self.resampling = None + + if self.options.resampling == 'average': + try: + if gdal.RegenerateOverview: + pass + except Exception: + self.error("'average' resampling algorithm is not available.", + "Please use -r 'near' argument or upgrade to newer version of GDAL.") + + elif self.options.resampling == 'antialias': + try: + if numpy: # pylint:disable=W0125 + pass + except Exception: + self.error("'antialias' resampling algorithm is not available.", + "Install PIL (Python Imaging Library) and numpy.") + + elif self.options.resampling == 'near': + self.resampling = gdal.GRA_NearestNeighbour + self.querysize = self.tilesize + + elif self.options.resampling == 'bilinear': + self.resampling = gdal.GRA_Bilinear + self.querysize = self.tilesize * 2 + + elif self.options.resampling == 'cubic': + self.resampling = gdal.GRA_Cubic + + elif self.options.resampling == 'cubicspline': + self.resampling = gdal.GRA_CubicSpline + + elif self.options.resampling == 'lanczos': + self.resampling = gdal.GRA_Lanczos + + # User specified zoom levels + self.tminz = None + self.tmaxz = None + if self.options.zoom: + minmax = self.options.zoom.split('-', 1) + minmax.extend(['']) + zoom_min, zoom_max = minmax[:2] + self.tminz = int(zoom_min) + if zoom_max: + self.tmaxz = int(zoom_max) + else: + self.tmaxz = int(zoom_min) + + # KML generation + self.kml = self.options.kml + + # Check if the input filename is full ascii or not + try: + os.path.basename(self.input).encode('ascii') + except UnicodeEncodeError: + full_ascii = False + else: + full_ascii = True + + # LC_CTYPE check + if not full_ascii and 'UTF-8' not in os.environ.get("LC_CTYPE", ""): + if not self.options.quiet: + print("\nWARNING: " + "You are running gdal2tiles.py with a LC_CTYPE environment variable that is " + "not UTF-8 compatible, and your input file contains non-ascii characters. " + "The generated sample googlemaps, openlayers or " + "leaflet files might contain some invalid characters as a result\n") + + # Output the results + if self.options.verbose: + print("Options:", self.options) + print("Input:", self.input) + print("Output:", self.output) + print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024)) + print('') + + def optparse_init(self): + """Prepare the option parser for input (argv)""" + + from optparse import OptionParser, OptionGroup + usage = "Usage: %prog [options] input_file(s) [output]" + p = OptionParser(usage, version="%prog " + __version__) + p.add_option("-p", "--profile", dest='profile', + type='choice', choices=profile_list, + help=("Tile cutting profile (%s) - default 'mercator' " + "(Google Maps compatible)" % ",".join(profile_list))) + p.add_option("-r", "--resampling", dest="resampling", + type='choice', choices=resampling_list, + help="Resampling method (%s) - default 'average'" % ",".join(resampling_list)) + p.add_option('-s', '--s_srs', dest="s_srs", metavar="SRS", + help="The spatial reference system used for the source input data") + p.add_option('-z', '--zoom', dest="zoom", + help="Zoom levels to render (format:'2-5' or '10').") + p.add_option('-e', '--resume', dest="resume", action="store_true", + help="Resume mode. Generate only missing files.") + p.add_option('-a', '--srcnodata', dest="srcnodata", metavar="NODATA", + help="NODATA transparency value to assign to the input data") + p.add_option('-d', '--tmscompatible', dest="tmscompatible", action="store_true", + help=("When using the geodetic profile, specifies the base resolution " + "as 0.703125 or 2 tiles at zoom level 0.")) + p.add_option("-v", "--verbose", + action="store_true", dest="verbose", + help="Print status messages to stdout") + p.add_option("-q", "--quiet", + action="store_true", dest="quiet", + help="Disable messages and status to stdout") + # MMGIS + p.add_option("-x", "--extentworld", dest="extentworld", + help="The full world meter extent (comma-separated as minx,maxx,miny,maxy,pixelsize) of an inner raster profile.") + # 1bto4b + p.add_option("-m", "--dem", action="store_true", dest="isDEMtile", + help="Indicate if the input is a Digital Elevation Model") + # KML options + g = OptionGroup(p, "KML (Google Earth) options", + "Options for generated Google Earth SuperOverlay metadata") + g.add_option("-k", "--force-kml", dest='kml', action="store_true", + help=("Generate KML for Google Earth - default for 'geodetic' profile and " + "'raster' in EPSG:4326. For a dataset with different projection use " + "with caution!")) + g.add_option("-n", "--no-kml", dest='kml', action="store_false", + help="Avoid automatic generation of KML files for EPSG:4326") + g.add_option("-u", "--url", dest='url', + help="URL address where the generated tiles are going to be published") + p.add_option_group(g) + + # HTML options + g = OptionGroup(p, "Web viewer options", + "Options for generated HTML viewers a la Google Maps") + g.add_option("-w", "--webviewer", dest='webviewer', type='choice', choices=webviewer_list, + help="Web viewer to generate (%s) - default 'all'" % ",".join(webviewer_list)) + g.add_option("-t", "--title", dest='title', + help="Title of the map") + g.add_option("-c", "--copyright", dest='copyright', + help="Copyright for the map") + g.add_option("-g", "--googlekey", dest='googlekey', + help="Google Maps API key from http://code.google.com/apis/maps/signup.html") + g.add_option("-b", "--bingkey", dest='bingkey', + help="Bing Maps API key from https://www.bingmapsportal.com/") + p.add_option_group(g) + + p.set_defaults(verbose=False, profile="mercator", kml=False, url='', + webviewer='all', copyright='', resampling='average', resume=False, + googlekey='INSERT_YOUR_KEY_HERE', bingkey='INSERT_YOUR_KEY_HERE') + + self.parser = p + + # ------------------------------------------------------------------------- + def open_input(self): + """Initialization of the input raster, reprojection if necessary""" + gdal.AllRegister() + + self.out_drv = gdal.GetDriverByName(self.tiledriver) + self.mem_drv = gdal.GetDriverByName('MEM') + + if not self.out_drv: + raise Exception("The '%s' driver was not found, is it available in this GDAL build?", + self.tiledriver) + if not self.mem_drv: + raise Exception( + "The 'MEM' driver was not found, is it available in this GDAL build?") + + # Open the input file + + if self.input: + self.in_ds = gdal.Open(self.input, gdal.GA_ReadOnly) + else: + raise Exception("No input file was specified") + + if self.options.verbose: + print("Input file:", + "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, + self.in_ds.RasterCount)) + + if not self.in_ds: + # Note: GDAL prints the ERROR message too + self.error( + "It is not possible to open the input file '%s'." % self.input) + + # Read metadata from the input file + if self.in_ds.RasterCount == 0: + self.error("Input file '%s' has no raster band" % self.input) + + if self.in_ds.GetRasterBand(1).GetRasterColorTable(): + self.error("Please convert this file to RGB/RGBA and run gdal2tiles on the result.", + "From paletted file you can create RGBA file (temp.vrt) by:\n" + "gdal_translate -of vrt -expand rgba %s temp.vrt\n" + "then run:\n" + "gdal2tiles temp.vrt" % self.input) + + # Get NODATA value + in_nodata = [] + for i in range(1, self.in_ds.RasterCount+1): + if self.in_ds.GetRasterBand(i).GetNoDataValue() is not None: + in_nodata.append(self.in_ds.GetRasterBand(i).GetNoDataValue()) + if self.options.srcnodata: + nds = list(map(float, self.options.srcnodata.split(','))) + if len(nds) < self.in_ds.RasterCount: + in_nodata = ( + nds * self.in_ds.RasterCount)[:self.in_ds.RasterCount] + else: + in_nodata = nds + + if self.options.verbose: + print("NODATA: %s" % in_nodata) + + if self.options.verbose: + print("Preprocessed file:", + "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, + self.in_ds.RasterCount)) + + in_srs = None + + if self.options.s_srs: + in_srs = osr.SpatialReference() + in_srs.SetFromUserInput(self.options.s_srs) + in_srs_wkt = in_srs.ExportToWkt() + else: + in_srs_wkt = self.in_ds.GetProjection() + if not in_srs_wkt and self.in_ds.GetGCPCount() != 0: + in_srs_wkt = self.in_ds.GetGCPProjection() + if in_srs_wkt: + in_srs = osr.SpatialReference() + in_srs.ImportFromWkt(in_srs_wkt) + + self.out_srs = osr.SpatialReference() + + if self.options.profile == 'mercator': + self.out_srs.ImportFromEPSG(3857) + elif self.options.profile == 'geodetic': + self.out_srs.ImportFromEPSG(4326) + else: + self.out_srs = in_srs + + # Are the reference systems the same? Reproject if necessary. + + self.out_ds = None + + if self.options.profile in ('mercator', 'geodetic'): + + if ((self.in_ds.GetGeoTransform() == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) and + (self.in_ds.GetGCPCount() == 0)): + self.error("There is no georeference - neither affine transformation (worldfile) " + "nor GCPs. You can generate only 'raster' profile tiles.", + "Either gdal2tiles with parameter -p 'raster' or use another GIS " + "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs") + + if in_srs: + if ((in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or + (self.in_ds.GetGCPCount() != 0)): + # Generation of VRT dataset in tile projection, + # default 'nearest neighbour' warping + self.out_ds = gdal.AutoCreateWarpedVRT( + self.in_ds, in_srs_wkt, self.out_srs.ExportToWkt()) + + if self.options.verbose: + print("Warping of the raster by AutoCreateWarpedVRT " + "(result saved into 'tiles.vrt')") + self.out_ds.GetDriver().CreateCopy("tiles.vrt", self.out_ds) + + # Correction of AutoCreateWarpedVRT for NODATA values + if in_nodata != []: + tempfilename = self.gettempfilename('-gdal2tiles.vrt') + self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) + # open as a text file + s = open(tempfilename).read() + # Add the warping options + s = s.replace( + "", + """ + + + + """) + # replace BandMapping tag for NODATA bands.... + for i in range(len(in_nodata)): + s = s.replace( + '' % ( + (i+1), (i+1)), + """ + + %i + 0 + %i + 0 + + """ % ((i+1), (i+1), in_nodata[i], in_nodata[i])) + # save the corrected VRT + open(tempfilename, "w").write(s) + # open by GDAL as self.out_ds + self.out_ds = gdal.Open(tempfilename) + # delete the temporary file + os.unlink(tempfilename) + + # set NODATA_VALUE metadata + self.out_ds.SetMetadataItem( + 'NODATA_VALUES', ' '.join([str(i) for i in in_nodata])) + + if self.options.verbose: + print("Modified warping result saved into 'tiles1.vrt'") + open("tiles1.vrt", "w").write(s) + + # Correction of AutoCreateWarpedVRT for Mono (1 band) and RGB (3 bands) files + # without NODATA: + # equivalent of gdalwarp -dstalpha + if in_nodata == [] and self.out_ds.RasterCount in [1, 3]: + tempfilename = self.gettempfilename('-gdal2tiles.vrt') + self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) + # open as a text file + s = open(tempfilename).read() + # Add the warping options + s = s.replace( + "", + """ + + Alpha + + + """ % (self.out_ds.RasterCount + 1)) + s = s.replace( + "", + """ + %i + + """ % (self.out_ds.RasterCount + 1)) + s = s.replace( + "", + """ + + + """) + # save the corrected VRT + open(tempfilename, "w").write(s) + # open by GDAL as self.out_ds + self.out_ds = gdal.Open(tempfilename) + # delete the temporary file + os.unlink(tempfilename) + + if self.options.verbose: + print( + "Modified -dstalpha warping result saved into 'tiles1.vrt'") + open("tiles1.vrt", "w").write(s) + s = ''' + ''' + + else: + self.error("Input file has unknown SRS.", + "Use --s_srs ESPG:xyz (or similar) to provide source reference system.") + + if self.out_ds and self.options.verbose: + print("Projected file:", "tiles.vrt", "( %sP x %sL - %s bands)" % ( + self.out_ds.RasterXSize, self.out_ds.RasterYSize, self.out_ds.RasterCount)) + + if not self.out_ds: + self.out_ds = self.in_ds + + # + # Here we should have a raster (out_ds) in the correct Spatial Reference system + # + + # Get alpha band (either directly or from NODATA value) + self.alphaband = self.out_ds.GetRasterBand(1).GetMaskBand() + if ((self.alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or + self.out_ds.RasterCount == 4 or + self.out_ds.RasterCount == 2): + self.dataBandsCount = self.out_ds.RasterCount - 1 + else: + self.dataBandsCount = self.out_ds.RasterCount + + # KML test + isepsg4326 = False + srs4326 = osr.SpatialReference() + srs4326.ImportFromEPSG(4326) + if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4(): + self.kml = True + isepsg4326 = True + if self.options.verbose: + print("KML autotest OK!") + + # Read the georeference + self.out_gt = self.out_ds.GetGeoTransform() + + # Test the size of the pixel + + # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile) + if (self.out_gt[2], self.out_gt[4]) != (0, 0): + self.error("Georeference of the raster contains rotation or skew. " + "Such raster is not supported. Please use gdalwarp first.") + + # Here we expect: pixel is square, no rotation on the raster + + # Output Bounds - coordinates in the output SRS + self.ominx = self.out_gt[0] + self.omaxx = self.out_gt[0] + self.out_ds.RasterXSize * self.out_gt[1] + self.omaxy = self.out_gt[3] + self.ominy = self.out_gt[3] - self.out_ds.RasterYSize * self.out_gt[1] + + # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15 + + # MMGIS + def linearScale(domain, rang, value): + return ( + ((rang[1] - rang[0]) * (value - domain[0])) / + (domain[1] - domain[0]) + + rang[0] + ) + # MMGIS + self.out_ds.fRasterXSize = self.out_ds.RasterXSize + self.out_ds.fRasterYSize = self.out_ds.RasterYSize + self.out_ds.fRasterXOrigin = 0 + self.out_ds.fRasterYOrigin = 0 + self.out_ds.PixelSize = self.out_gt[1] + self.out_ds.fPixelSize = self.fPixelSize + # print("ominx", self.ominx, "omaxx", self.omaxx, "ominy", self.ominy, "omaxy", self.omaxy) + # print("fminx", self.fminx, "fmaxx", self.fmaxx, "fminy", self.fminy, "fmaxy", self.fmaxy) + if self.isRasterBounded: + self.out_ds.fRasterXSize = int(math.floor(self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / ( + self.omaxx - self.ominx) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) + self.out_ds.fRasterYSize = int(math.ceil(self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / ( + self.omaxy - self.ominy) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) + self.out_ds.fRasterXSizeRaw = int(math.floor( + self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / (self.omaxx - self.ominx))) + self.out_ds.fRasterYSizeRaw = int(math.ceil( + self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / (self.omaxy - self.ominy))) + # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize ) + self.out_ds.fRasterXOrigin = int(math.floor(linearScale( + [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.out_gt[0]))) + self.out_ds.fRasterYOrigin = int(math.ceil(linearScale( + [self.fminy, self.fmaxy], [self.out_ds.fRasterYSize, 0], self.out_gt[3]))) + self.out_ds.fRasterXOriginRaw = int(math.floor(linearScale([self.fminx, self.fmaxx], [ + 0, self.out_ds.fRasterXSize], self.out_gt[0]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) + self.out_ds.fRasterYOriginRaw = int(math.ceil(linearScale([self.fminy, self.fmaxy], [ + self.out_ds.fRasterYSize, 0], self.out_gt[3]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) + self.out_ds.fRasterXWidth = int(math.floor(linearScale( + [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.omaxx))) - self.out_ds.fRasterXOrigin + self.out_ds.fRasterYHeight = int(math.ceil(linearScale( + [self.fminy, self.fmaxy], [0, self.out_ds.fRasterYSize], self.omaxy))) - self.out_ds.fRasterYOrigin + + if self.options.verbose: + print("Bounds (output srs):", round(self.ominx, 13), + self.ominy, self.omaxx, self.omaxy) + + # print("Input Raster Size: ", self.out_ds.RasterXSize, self.out_ds.RasterYSize) + # print("fmaxx-fminx", self.fmaxx - self.fminx, "omaxx-ominx", self.omaxx - self.ominx, "fmaxy-fminy", self.fmaxy - self.fminy, "omaxy-ominy", self.omaxy - self.ominy) + # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize) + # print("Full Raster Size Raw: ", self.out_ds.fRasterXSizeRaw, self.out_ds.fRasterYSizeRaw) + # print("Raster Origin: ", self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin) + # print("Raster Origin Raw: ", self.out_ds.fRasterXOriginRaw, self.out_ds.fRasterYOriginRaw) + # print("Raster Width Height: ", self.out_ds.fRasterXWidth, self.out_ds.fRasterYHeight) + + # Calculating ranges for tiles in different zoom levels + if self.options.profile == 'mercator': + + self.mercator = GlobalMercator() + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.mercator.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.mercator.MetersToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.mercator.MetersToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the minimal zoom level (map covers area equivalent to one tile) + if self.tminz is None: + self.tminz = self.mercator.ZoomForPixelSize( + self.out_gt[1] * max(self.out_ds.RasterXSize, + self.out_ds.RasterYSize) / float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", + self.mercator.MetersToLatLon(self.ominx, self.ominy), + self.mercator.MetersToLatLon(self.omaxx, self.omaxy)) + print('MinZoomLevel:', self.tminz) + print("MaxZoomLevel:", + self.tmaxz, + "(", + self.mercator.Resolution(self.tmaxz), + ")") + + if self.options.profile == 'geodetic': + + self.geodetic = GlobalGeodetic(self.options.tmscompatible) + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.geodetic.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.geodetic.LonLatToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.geodetic.LonLatToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tminz is None: + self.tminz = self.geodetic.ZoomForPixelSize( + self.out_gt[1] * max(self.out_ds.RasterXSize, + self.out_ds.RasterYSize) / float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", self.ominx, + self.ominy, self.omaxx, self.omaxy) + + # MMGIS + if self.options.profile == 'raster' and self.isRasterBounded: + + def log2(x): + return math.log10(x) / math.log10(2) + + # MMGIS added 'f'* + self.nativezoom = int( + max(math.ceil(log2(self.out_ds.fRasterXSizeRaw/float(self.tilesize))), + math.ceil(log2(self.out_ds.fRasterYSizeRaw/float(self.tilesize))))) + + self.basenativezoom = int( + max(math.ceil(log2(self.out_ds.fRasterXSize/float(self.tilesize))), + math.ceil(log2(self.out_ds.fRasterYSize/float(self.tilesize))))) + + # MMGIS + self.out_ds.fWorldXSize = int( + float(self.out_ds.fRasterXSize) * (2**(self.nativezoom - self.basenativezoom))) + self.out_ds.fWorldYSize = int( + float(self.out_ds.fRasterYSize) * (2**(self.nativezoom - self.basenativezoom))) + self.out_ds.fRasterXOriginWorld = int(float( + self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXOrigin) / self.out_ds.fRasterXSize)) + self.out_ds.fRasterYOriginWorld = int(float( + self.out_ds.fWorldYSize) * (float(self.out_ds.fRasterYOrigin) / self.out_ds.fRasterYSize)) + self.out_ds.fRasterXSizeWorld = int(float( + self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXWidth) / self.out_ds.fRasterXSize)) + self.out_ds.fRasterYSizeWorld = int(float( + self.out_ds.RasterYSize) * (float(self.out_ds.fRasterXSizeWorld) / self.out_ds.RasterXSize)) + # print("World Size", self.out_ds.fWorldXSize, self.out_ds.fWorldYSize) + # print("Raster Origin World", self.out_ds.fRasterXOriginWorld, self.out_ds.fRasterYOriginWorld) + # print("Raster Size World", self.out_ds.fRasterXSizeWorld, self.out_ds.fRasterYSizeWorld) + + if self.options.verbose: + print("Native zoom of the raster:", self.nativezoom) + + # Get the minimal zoom level (whole raster in one tile) + if self.tminz is None: + self.tminz = 0 + + # Get the maximal zoom level (native resolution of the raster) + if self.tmaxz is None: + self.tmaxz = self.nativezoom + + # MMGIS added 'f'* + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, self.tmaxz+1)) + self.tsize = list(range(0, self.tmaxz+1)) + # print("Raster Size:", self.out_ds.RasterXSize,self.out_ds.RasterYSize) + # print("Pixel Size Ratio:", (self.out_ds.fPixelSize / self.out_ds.PixelSize)) + # print("nativezoom", self.nativezoom, "basenativezoom", self.basenativezoom, "tminz", self.tminz, "tmaxz", self.tmaxz) + for tz in range(0, self.tmaxz+1): + tsize = 2.0**(self.tmaxz-tz)*self.tilesize + toffsetx = int(math.floor( + 2.0**(tz) * self.out_ds.fRasterXOriginRaw / self.out_ds.fRasterXSizeRaw)) + toffsety = int(math.floor( + 2.0**(tz) * (self.out_ds.fRasterYOriginRaw) / self.out_ds.fRasterYSizeRaw)) + # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) + toffsetx = int(math.floor( + self.out_ds.fRasterXOriginWorld / tsize)) + toffsety = int(math.floor( + self.out_ds.fRasterYOriginWorld / tsize)) + # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) + tmaxx = int(math.floor( + self.out_ds.fRasterXSizeWorld / tsize)) + toffsetx + 1 + + tmaxy = int(math.floor( + self.out_ds.fRasterYSizeWorld / tsize)) + toffsety + 1 + self.tsize[tz] = math.ceil(tsize) + #tminx = toffsetx + tminx = int(tmaxx - ((tmaxx - toffsetx) / (0.75))) - 1 + tminy = int(tmaxy - ((tmaxy - toffsety) / (0.75))) - 1 + + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + # print("tminx", tminx, "tminy", tminy, "tmaxx", tmaxx, "tmaxy", tmaxy, "tz", tz) + + elif self.options.profile == 'raster': + + def log2(x): + return math.log10(x) / math.log10(2) + self.nativezoom = int( + max(math.ceil(log2(self.out_ds.RasterXSize/float(self.tilesize))), + math.ceil(log2(self.out_ds.RasterYSize/float(self.tilesize))))) + + if self.options.verbose: + print("Native zoom of the raster:", self.nativezoom) + + # Get the minimal zoom level (whole raster in one tile) + if self.tminz is None: + self.tminz = 0 + + # Get the maximal zoom level (native resolution of the raster) + if self.tmaxz is None: + self.tmaxz = self.nativezoom + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, self.tmaxz+1)) + self.tsize = list(range(0, self.tmaxz+1)) + for tz in range(0, self.tmaxz+1): + tsize = 2.0**(self.tmaxz-tz)*self.tilesize + tminx, tminy = 0, 0 + tmaxx = int(math.ceil(self.out_ds.RasterXSize / tsize)) - 1 + tmaxy = int(math.ceil(self.out_ds.RasterYSize / tsize)) - 1 + self.tsize[tz] = math.ceil(tsize) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # Function which generates SWNE in LatLong for given tile + if self.kml and in_srs_wkt: + ct = osr.CoordinateTransformation(in_srs, srs4326) + + def rastertileswne(x, y, z): + # X-pixel size in level + pixelsizex = (2**(self.tmaxz-z) * self.out_gt[1]) + west = self.out_gt[0] + x*self.tilesize*pixelsizex + east = west + self.tilesize*pixelsizex + south = self.ominy + y*self.tilesize*pixelsizex + north = south + self.tilesize*pixelsizex + if not isepsg4326: + # Transformation to EPSG:4326 (WGS84 datum) + west, south = ct.TransformPoint(west, south)[:2] + east, north = ct.TransformPoint(east, north)[:2] + return south, west, north, east + + self.tileswne = rastertileswne + else: + self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa + + def generate_metadata(self): + """ + Generation of main metadata files and HTML viewers (metadata related to particular + tiles are generated during the tile processing). + """ + + if not os.path.exists(self.output): + os.makedirs(self.output) + + if self.options.profile == 'mercator': + + south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy) + north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy) + south, west = max(-85.05112878, south), max(-180.0, west) + north, east = min(85.05112878, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate googlemaps.html + if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator': + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'googlemaps.html'))): + f = open(os.path.join(self.output, 'googlemaps.html'), 'wb') + f.write(self.generate_googlemaps().encode('utf-8')) + f.close() + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + # Generate leaflet.html + if self.options.webviewer in ('all', 'leaflet'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'leaflet.html'))): + f = open(os.path.join(self.output, 'leaflet.html'), 'wb') + f.write(self.generate_leaflet().encode('utf-8')) + f.close() + + elif self.options.profile == 'geodetic': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + south, west = max(-90.0, south), max(-180.0, west) + north, east = min(90.0, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + elif self.options.profile == 'raster': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + + # MMGIS + if self.isRasterBounded: + west = self.fminx + east = self.fmaxx + south = self.fminy + north = self.fmaxy + + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + # Generate tilemapresource.xml. + if not self.options.resume or not os.path.exists(os.path.join(self.output, 'tilemapresource.xml')): + f = open(os.path.join(self.output, 'tilemapresource.xml'), 'wb') + f.write(self.generate_tilemapresource().encode('utf-8')) + f.close() + + if self.kml: + # TODO: Maybe problem for not automatically generated tminz + # The root KML should contain links to all tiles in the tminz level + children = [] + xmin, ymin, xmax, ymax = self.tminmax[self.tminz] + for x in range(xmin, xmax+1): + for y in range(ymin, ymax+1): + children.append([x, y, self.tminz]) + # Generate Root KML + if self.kml: + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'doc.kml'))): + f = open(os.path.join(self.output, 'doc.kml'), 'wb') + f.write(self.generate_kml( + None, None, None, children).encode('utf-8')) + f.close() + + def generate_base_tiles(self, tz): + """ + Generation of the base tiles (the lowest in the pyramid) directly from the input raster + """ + + if self.isDEMtile: + print("Generating Tiles at Zoom " + str(tz) + ": ") + + if not self.options.quiet: + print("Generating Base Tiles:") + + if self.options.verbose: + print('') + print("Tiles generated from the max zoom level:") + print("----------------------------------------") + print('') + + ds = self.out_ds + + querysize = self.querysize + + # 1bto4b + if self.isDEMtile: + tilebands = 4 + querysize = self.tilesize + else: + tilebands = self.dataBandsCount + 1 + tz = self.tmaxz + + try: + self.tminmax[tz] + except IndexError: + print(" Won't make zoom level " + str(tz)) + return + + # Set the bounds + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + + if self.options.verbose: + print("dataBandsCount: ", self.dataBandsCount) + print("tilebands: ", tilebands) + + tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + ti = 0 + + for ty in range(tmaxy, tminy-1, -1): + for tx in range(tminx, tmaxx+1): + + if self.stopped: + break + ti += 1 + tilefilename = os.path.join( + self.output, str(tz), str(tx), "%s.%s" % (ty, self.tileext)) + if self.options.verbose: + print(ti, '/', tcount, tilefilename) + + if self.options.resume and os.path.exists(tilefilename): + if self.options.verbose: + print("Tile generation skipped because of --resume") + else: + self.progressbar(ti / float(tcount)) + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + if self.options.profile == 'mercator': + # Tile bounds in EPSG:3857 + b = self.mercator.TileBounds(tx, ty, tz) + elif self.options.profile == 'geodetic': + b = self.geodetic.TileBounds(tx, ty, tz) + + # Don't scale up by nearest neighbour, better change the querysize + # to the native resolution (and return smaller query tile) for scaling + + if self.options.profile in ('mercator', 'geodetic'): + rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1]) + + # Pixel size in the raster covering query geo extent + nativesize = wb[0] + wb[2] + if self.options.verbose: + print("\tNative Extent (querysize", + nativesize, "): ", rb, wb) + + # Tile bounds in raster coordinates for ReadRaster query + rb, wb = self.geo_query( + ds, b[0], b[3], b[2], b[1], querysize=querysize) + + rx, ry, rxsize, rysize = rb + wx, wy, wxsize, wysize = wb + wxsize -= 1 # 1bto4b + wysize -= 1 # 1bto4b + + # MMGIS + elif self.isRasterBounded: # 'raster' profile: + + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.fWorldXSize + ysize = self.out_ds.fWorldYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld + #print("rx", rx) + rxsize = 0 + rxsize = tsize + + rysize = 0 + rysize = tsize + + ry = ysize - (ty * tsize) - rysize - \ + self.out_ds.fRasterYOriginWorld + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if rx < 0: + rxsize = tsize + rx + wx = -rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + rx = 0 + if ry < 0: + rysize = tsize + ry + wy = -ry + wysize = int(rysize/float(tsize) * self.tilesize) + ry = 0 + if rx + rxsize > self.out_ds.fRasterXSizeWorld: + rxsize = self.out_ds.fRasterXSizeWorld - rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + if ry + rysize > self.out_ds.fRasterYSizeWorld: + rysize = self.out_ds.fRasterYSizeWorld - ry + wysize = int(rysize/float(tsize) * self.tilesize) + + # Convert rx, ry back to non-world coordinates + rx = int(float(self.out_ds.RasterXSize) * + (float(rx) / self.out_ds.fRasterXSizeWorld)) + ry = int(float(self.out_ds.RasterYSize) * + (float(ry) / self.out_ds.fRasterYSizeWorld)) + rxsize = int(float(self.out_ds.RasterXSize) * + (float(rxsize) / self.out_ds.fRasterXSizeWorld)) + rysize = int(float(self.out_ds.RasterYSize) * + (float(rysize) / self.out_ds.fRasterYSizeWorld)) + + wxsize -= 1 # 1bto4b + wysize -= 1 # 1bto4b + + #print("Extent: ", (tx, ty, tz, tsize), (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize), (self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin)) + else: # 'raster' profile: + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.RasterXSize # size of the raster in pixels + ysize = self.out_ds.RasterYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize + rxsize = 0 + if tx == tmaxx: + rxsize = xsize % tsize + if rxsize == 0: + rxsize = tsize + + rysize = 0 + if ty == tmaxy: + rysize = ysize % tsize + if rysize == 0: + rysize = tsize + ry = ysize - (ty * tsize) - rysize + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if self.options.verbose: + print("\tReadRaster Extent: ", + (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)) + + # Query is in 'nearest neighbour' but can be bigger in then the tilesize + # We scale down the query to the tilesize by supplied algorithm. + + # Tile dataset in memory + + # 1bto4b + if self.isDEMtile: + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands, gdal.GDT_Byte) + else: + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands) + + data = alpha = None + # Read the source raster if anything is going inside the tile as per the computed + # geo_query + if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0: + # 1bto4b + if self.isDEMtile: + data = ds.GetRasterBand(1).ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) + else: + data = ds.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize, + band_list=list(range(1, self.dataBandsCount+1))) + alpha = self.alphaband.ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize) + + # The tile in memory is a transparent file by default. Write pixel values into it if + # any + if data: + # 1bto4b - both this full if and else + if self.isDEMtile: + if (wxsize * wysize) > 0: + data = struct.unpack('f' * wxsize * wysize, data) + else: + return + + if self.tilesize == querysize: + # Interpolate the values from four surrounding + + # This takes our 1d list of WxH data and pads it with a rect of none values + dataPad = list(data) + for i in reversed(range(1, wysize)): + dataPad.insert(wxsize * i, 0) + dataPad.insert(wxsize * i, 0) + for i in range(wxsize + 3): + dataPad.insert(0, 0) + for i in range(wxsize + 3): + dataPad.append(0) + + dataIn = [] + # Resample based on average of four + # averaging over: i, i + 1, i + wxsize, i + wxsize + 1 + for y in range(wysize+2 - 1): + for x in range(wxsize+2 - 1): + i = x+(y*(wxsize+2)) + nW = dataPad[i] + nE = dataPad[i+1] + sW = dataPad[i+(wxsize+2)] + sE = dataPad[i+(wxsize+2)+1] + dataIn.append((nW + nE + sW + sE)/float(4)) + + # Get the surrounding eight tiles + # Get NW + if tx - 1 >= tminx and ty + 1 <= tmaxy: + rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW = getTilePxBounds(self, + tx - 1, ty + 1, tz, ds) + wxsizeNW -= 1 + wysizeNW -= 1 + if wxsizeNW != 0 and wysizeNW != 0: + dataNW = ds.GetRasterBand(1).ReadRaster( + rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW, buf_type=gdal.GDT_Float32) + if dataNW is not None and (wxsizeNW * wysizeNW) > 0: + dataNW = struct.unpack( + 'f' * wxsizeNW * wysizeNW, dataNW) + else: + dataNW = None + else: + dataNW = None + + # Get N + if ty + 1 <= tmaxy: + rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN = getTilePxBounds( + self, tx, ty + 1, tz, ds) + wxsizeN -= 1 + wysizeN -= 1 + if wxsizeN != 0 and wysizeN != 0: + dataN = ds.GetRasterBand(1).ReadRaster( + rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN, buf_type=gdal.GDT_Float32) + if dataN is not None and (wxsizeN * wysizeN) > 0: + dataN = struct.unpack( + 'f' * wxsizeN * wysizeN, dataN) + else: + dataN = None + else: + dataN = None + # Get NE + if tx + 1 <= tmaxx and ty + 1 <= tmaxy: + rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE = getTilePxBounds( + self, tx + 1, ty + 1, tz, ds) + wxsizeNE -= 1 + wysizeNE -= 1 + if wxsizeNE != 0 and wysizeNE != 0: + dataNE = ds.GetRasterBand(1).ReadRaster( + rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE, buf_type=gdal.GDT_Float32) + if dataNE is not None and (wxsizeNE * wysizeNE) > 0: + dataNE = struct.unpack( + 'f' * wxsizeNE * wysizeNE, dataNE) + else: + dataNE = None + else: + dataNE = None + # Get E + if tx + 1 <= tmaxx: + rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE = getTilePxBounds( + self, tx + 1, ty, tz, ds) + wxsizeE -= 1 + wysizeE -= 1 + if wxsizeE != 0 and wysizeE != 0: + dataE = ds.GetRasterBand(1).ReadRaster( + rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE, buf_type=gdal.GDT_Float32) + if dataE is not None and (wxsizeE * wysizeE) > 0: + dataE = struct.unpack( + 'f' * wxsizeE * wysizeE, dataE) + else: + dataE = None + else: + dataE = None + # Get SE + if tx + 1 <= tmaxx and ty - 1 >= tminy: + rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE = getTilePxBounds( + self, tx + 1, ty - 1, tz, ds) + wxsizeSE -= 1 + wysizeSE -= 1 + if wxsizeSE != 0 and wysizeSE != 0: + dataSE = ds.GetRasterBand(1).ReadRaster( + rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE, buf_type=gdal.GDT_Float32) + if dataSE is not None and (wxsizeSE * wysizeSE) > 0: + dataSE = struct.unpack( + 'f' * wxsizeSE * wysizeSE, dataSE) + else: + dataSE = None + else: + dataSE = None + # Get S + if ty - 1 >= tminy: + rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS = getTilePxBounds( + self, tx, ty - 1, tz, ds) + wxsizeS -= 1 + wysizeS -= 1 + if wxsizeS != 0 and wysizeS != 0: + dataS = ds.GetRasterBand(1).ReadRaster( + rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS, buf_type=gdal.GDT_Float32) + if dataS is not None and (wxsizeS * wysizeS) > 0: + dataS = struct.unpack( + 'f' * wxsizeS * wysizeS, dataS) + else: + dataS = None + else: + dataS = None + # Get SW + if tx - 1 >= tminx and ty - 1 >= tminy: + rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW = getTilePxBounds( + self, tx - 1, ty - 1, tz, ds) + wxsizeSW -= 1 + wysizeSW -= 1 + if wxsizeSW != 0 and wysizeSW != 0: + dataSW = ds.GetRasterBand(1).ReadRaster( + rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW, buf_type=gdal.GDT_Float32) + if dataSW is not None and (wxsizeSW * wysizeSW) > 0: + dataSW = struct.unpack( + 'f' * wxsizeSW * wysizeSW, dataSW) + else: + dataSW = None + else: + dataSW = None + # Get W + if tx - 1 >= tminx: + rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW = getTilePxBounds( + self, tx - 1, ty, tz, ds) + wxsizeW -= 1 + wysizeW -= 1 + if wxsizeW != 0 and wysizeW != 0: + dataW = ds.GetRasterBand(1).ReadRaster( + rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW, buf_type=gdal.GDT_Float32) + if dataW is not None and (wxsizeW * wysizeW) > 0: + dataW = struct.unpack( + 'f' * wxsizeW * wysizeW, dataW) + else: + dataW = None + else: + dataW = None + + # NW (uses N, NW, W) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataN is not None: + fN = dataN[len(dataN)-wxsizeN] + values = values + 1 + if dataNW is not None: + fNW = dataNW[len(dataNW)-1] + values = values + 1 + if dataW is not None: + fW = dataW[wxsizeW-1] + values = values + 1 + dataIn[0] = ((dataIn[0]*4) + fN + + fNW + fW)/float(values) + + # NE (uses N, NE, E) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataN is not None: + fN = dataN[len(dataN)-1] + values = values + 1 + if dataNE is not None: + fNE = dataNE[len(dataNE)-wxsizeNE] + values = values + 1 + if dataE is not None: + fE = dataE[0] + values = values + 1 + dataIn[wxsize] = ( + (dataIn[wxsize]*4) + fN + fNE + fE)/float(values) + + # SE (uses S, SE, E) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataS is not None: + fS = dataS[wxsizeS-1] + values = values + 1 + if dataSE is not None: + fSE = dataSE[0] + values = values + 1 + if dataE is not None: + fE = dataE[len(dataE)-wxsizeE] + values = values + 1 + dataIn[len(dataIn)-1] = ((dataIn[len(dataIn)-1] + * 4) + fS + fSE + fE)/float(values) + + # SW (uses S, SW, W) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataS is not None: + fS = dataS[0] + values = values + 1 + if dataSW is not None: + fSW = dataSW[wxsizeSW-1] + values = values + 1 + if dataW is not None: + fW = dataW[len(dataW)-1] + values = values + 1 + dataIn[len( + dataIn)-wxsize-1] = ((dataIn[len(dataIn)-wxsize-1]*4) + fS + fSW + fW)/float(values) + + # Then the edges minus corners + # N + if dataN is not None: + for i in range(1, wxsize): + dataIn[i] = ( + (dataIn[i]*4) + dataN[len(dataN)-wxsizeN-1+i] + dataN[len(dataN)-wxsizeN-1+i+1])/float(4) + else: + for i in range(1, wxsize): + dataIn[i] = (dataIn[i]*4)/float(2) + + # E + if dataE is not None: + for i in range(1, wysize): + dataIn[((i+1)*(wxsize+1)-1)] = ((dataIn[((i+1)*(wxsize+1)-1)] + * 4) + dataE[(i-1)*wxsizeE] + dataE[i*wxsizeE])/float(4) + else: + for i in range(1, wysize): + dataIn[( + (i+1)*(wxsize+1)-1)] = (dataIn[((i+1)*(wxsize+1)-1)]*4)/float(2) + + # S + if dataS is not None: + for i in range(1, wxsize): + dataIn[len(dataIn)-wxsize-1+i] = ( + (dataIn[len(dataIn)-wxsize-1+i]*4) + dataS[i-1] + dataS[i])/float(4) + else: + for i in range(1, wxsize): + dataIn[len( + dataIn)-wxsize-1+i] = (dataIn[len(dataIn)-wxsize-1+i]*4)/float(2) + + # W + if dataW is not None: + for i in range(1, wysize): + dataIn[(i)*(wxsize+1)] = ((dataIn[(i)*(wxsize+1)]*4) + + dataW[i*wxsizeW-1] + dataW[(i+1)*wxsizeW-1])/float(4) + else: + for i in range(1, wysize): + dataIn[(i)*(wxsize+1)] = (dataIn[(i) + * (wxsize+1)]*4)/float(2) + + data1 = [] + data2 = [] + data3 = [] + data4 = [] + for f in dataIn: + f = str(binary(f)) + data1.append(int(f[:8], 2)) + data2.append(int(f[8:16], 2)) + data3.append(int(f[16:24], 2)) + data4.append(int(f[24:], 2)) + + data1s = b'' + data2s = b'' + data3s = b'' + data4s = b'' + indx = 0 + for v in data1: + data1s += struct.pack('B', data1[indx]) + data2s += struct.pack('B', data2[indx]) + data3s += struct.pack('B', data3[indx]) + data4s += struct.pack('B', data4[indx]) + indx += 1 + dstile.GetRasterBand(1).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data1s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(2).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data2s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(3).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data3s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(4).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data4s, buf_type=gdal.GDT_Byte) + elif wxsize != 0 and wysize != 0: + # Big ReadRaster query in memory scaled to the tilesize - all but 'near' algo + dsquery = self.mem_drv.Create( + '', querysize, querysize, tilebands, gdal.GDT_Byte) # 1bto4b + # TODO: fill the null value in case a tile without alpha is produced (now only png tiles are supported) + # for i in range(1, tilebands+1): + # dsquery.GetRasterBand(1).Fill(tilenodata) + # dsquery.WriteRaster(wx, wy, wxsize, wysize, data, band_list=list(range(1,self.dataBandsCount+1)))###############1bto4b + # dsquery.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands])###############################1bto4b + + # 1bto4b + data = ds.GetRasterBand(1).ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) + + data = struct.unpack('f' * wxsize * wysize, data) + data1 = [] + data2 = [] + data3 = [] + data4 = [] + for f in data: + f = str(binary(f)) + data1.append(int(f[:8], 2)) + data2.append(int(f[8:16], 2)) + data3.append(int(f[16:24], 2)) + data4.append(int(f[24:], 2)) + + data1s = b'' + data2s = b'' + data3s = b'' + data4s = b'' + indx = 0 + for v in data1: + data1s += struct.pack('B', data1[indx]) + data2s += struct.pack('B', data2[indx]) + data3s += struct.pack('B', data3[indx]) + data4s += struct.pack('B', data4[indx]) + indx += 1 + + dsquery.GetRasterBand(1).WriteRaster( + wx, wy, wxsize, wysize, data1s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(2).WriteRaster( + wx, wy, wxsize, wysize, data2s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(3).WriteRaster( + wx, wy, wxsize, wysize, data3s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(4).WriteRaster( + wx, wy, wxsize, wysize, data4s, buf_type=gdal.GDT_Byte) + # sys.exit('done') + # 1bto4b + + self.scale_query_to_tile( + dsquery, dstile, tilefilename) + del dsquery + + else: + if self.tilesize == querysize: + # Use the ReadRaster result directly in tiles ('nearest neighbour' query) + dstile.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, self.dataBandsCount+1))) + dstile.WriteRaster( + wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + # Note: For source drivers based on WaveLet compression (JPEG2000, ECW, + # MrSID) the ReadRaster function returns high-quality raster (not ugly + # nearest neighbour) + # TODO: Use directly 'near' for WaveLet files + else: + # Big ReadRaster query in memory scaled to the tilesize - all but 'near' + # algo + dsquery = self.mem_drv.Create( + '', querysize, querysize, tilebands) + # TODO: fill the null value in case a tile without alpha is produced (now + # only png tiles are supported) + dsquery.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, self.dataBandsCount+1))) + dsquery.WriteRaster( + wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + self.scale_query_to_tile( + dsquery, dstile, tilefilename) + del dsquery + + del data + + if self.options.resampling != 'antialias': + # Write a copy of tile to png/jpg + self.out_drv.CreateCopy(tilefilename, dstile, strict=0) + + del dstile + + # Create a KML file for this tile. + if self.kml: + kmlfilename = os.path.join( + self.output, str(tz), str(tx), '%d.kml' % ty) + if not self.options.resume or not os.path.exists(kmlfilename): + f = open(kmlfilename, 'wb') + f.write(self.generate_kml(tx, ty, tz).encode('utf-8')) + f.close() + + if not self.options.verbose and not self.options.quiet: + self.progressbar(ti / float(tcount)) + + def generate_overview_tiles(self): + """Generation of the overview tiles (higher in the pyramid) based on existing tiles""" + + if not self.options.quiet: + print("Generating Overview Tiles:") + + # 1bto4b + if self.isDEMtile: + tilebands = 4 + else: + tilebands = self.dataBandsCount + 1 + + # Usage of existing tiles: from 4 underlying tiles generate one as overview. + + tcount = 0 + for tz in range(self.tmaxz-1, self.tminz-1, -1): + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + tcount += (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + + ti = 0 + + for tz in range(self.tmaxz-1, self.tminz-1, -1): + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + for ty in range(tmaxy, tminy-1, -1): + for tx in range(tminx, tmaxx+1): + + if self.stopped: + break + + ti += 1 + tilefilename = os.path.join(self.output, + str(tz), + str(tx), + "%s.%s" % (ty, self.tileext)) + + if self.options.verbose: + print(ti, '/', tcount, tilefilename) + + if self.options.resume and os.path.exists(tilefilename): + if self.options.verbose: + print("Tile generation skipped because of --resume") + else: + self.progressbar(ti / float(tcount)) + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + dsquery = self.mem_drv.Create( + '', 2*self.tilesize, 2*self.tilesize, tilebands) + # TODO: fill the null value + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands) + + # TODO: Implement more clever walking on the tiles with cache functionality + # probably walk should start with reading of four tiles from top left corner + # Hilbert curve + + children = [] + # Read the tiles and write them to query window + for y in range(2*ty, 2*ty+2): + for x in range(2*tx, 2*tx+2): + minx, miny, maxx, maxy = self.tminmax[tz+1] + if x >= minx and x <= maxx and y >= miny and y <= maxy: + dsquerytile = gdal.Open( + os.path.join(self.output, str(tz+1), str(x), + "%s.%s" % (y, self.tileext)), + gdal.GA_ReadOnly) + if (ty == 0 and y == 1) or (ty != 0 and (y % (2*ty)) != 0): + tileposy = 0 + else: + tileposy = self.tilesize + if tx: + tileposx = x % (2*tx) * self.tilesize + elif tx == 0 and x == 1: + tileposx = self.tilesize + else: + tileposx = 0 + dsquery.WriteRaster( + tileposx, tileposy, self.tilesize, self.tilesize, + dsquerytile.ReadRaster( + 0, 0, self.tilesize, self.tilesize), + band_list=list(range(1, tilebands+1))) + children.append([x, y, tz+1]) + + self.scale_query_to_tile(dsquery, dstile, tilefilename) + # Write a copy of tile to png/jpg + if self.options.resampling != 'antialias': + # Write a copy of tile to png/jpg + self.out_drv.CreateCopy(tilefilename, dstile, strict=0) + + if self.options.verbose: + print("\tbuild from zoom", tz+1, + " tiles:", (2*tx, 2*ty), (2*tx+1, 2*ty), + (2*tx, 2*ty+1), (2*tx+1, 2*ty+1)) + + # Create a KML file for this tile. + if self.kml: + f = open(os.path.join( + self.output, '%d/%d/%d.kml' % (tz, tx, ty)), 'wb') + f.write(self.generate_kml( + tx, ty, tz, children).encode('utf-8')) + f.close() + + if not self.options.verbose and not self.options.quiet: + self.progressbar(ti / float(tcount)) + + def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): + """ + For given dataset and query in cartographic coordinates returns parameters for ReadRaster() + in raster coordinates and x/y shifts (for border tiles). If the querysize is not given, the + extent is returned in the native resolution of dataset ds. + + raises Gdal2TilesError if the dataset does not contain anything inside this geo_query + """ + geotran = ds.GetGeoTransform() + rx = int((ulx - geotran[0]) / geotran[1] + 0.001) + ry = int((uly - geotran[3]) / geotran[5] + 0.001) + rxsize = int((lrx - ulx) / geotran[1] + 0.5) + rysize = int((lry - uly) / geotran[5] + 0.5) + + if not querysize: + wxsize, wysize = rxsize, rysize + else: + wxsize, wysize = querysize, querysize + + # Coordinates should not go out of the bounds of the raster + wx = 0 + if rx < 0: + rxshift = abs(rx) + wx = int(wxsize * (float(rxshift) / rxsize)) + wxsize = wxsize - wx + rxsize = rxsize - int(rxsize * (float(rxshift) / rxsize)) + rx = 0 + if rx+rxsize > ds.RasterXSize: + wxsize = int(wxsize * (float(ds.RasterXSize - rx) / rxsize)) + rxsize = ds.RasterXSize - rx + + wy = 0 + if ry < 0: + ryshift = abs(ry) + wy = int(wysize * (float(ryshift) / rysize)) + wysize = wysize - wy + rysize = rysize - int(rysize * (float(ryshift) / rysize)) + ry = 0 + if ry+rysize > ds.RasterYSize: + wysize = int(wysize * (float(ds.RasterYSize - ry) / rysize)) + rysize = ds.RasterYSize - ry + + return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize) + + def scale_query_to_tile(self, dsquery, dstile, tilefilename=''): + """Scales down query dataset to the tile dataset""" + + querysize = dsquery.RasterXSize + tilesize = dstile.RasterXSize + tilebands = dstile.RasterCount + + if self.options.resampling == 'average': + + # Function: gdal.RegenerateOverview() + for i in range(1, tilebands+1): + # Black border around NODATA + res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i), + 'average') + if res != 0: + self.error("RegenerateOverview() failed on %s, error %d" % ( + tilefilename, res)) + + elif self.options.resampling == 'antialias': + + # Scaling by PIL (Python Imaging Library) - improved Lanczos + array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) + for i in range(tilebands): + array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i+1), + 0, 0, querysize, querysize) + im = Image.fromarray(array, 'RGBA') # Always four bands + im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS) + if os.path.exists(tilefilename): + im0 = Image.open(tilefilename) + im1 = Image.composite(im1, im0, im1) + im1.save(tilefilename, self.tiledriver) + + else: + + # Other algorithms are implemented by gdal.ReprojectImage(). + dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0, + tilesize / float(querysize))) + dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) + + res = gdal.ReprojectImage( + dsquery, dstile, None, None, self.resampling) + if res != 0: + self.error("ReprojectImage() failed on %s, error %d" % + (tilefilename, res)) + + def generate_tilemapresource(self): + """ + Template for tilemapresource.xml. Returns filled string. Expected variables: + title, north, south, east, west, isepsg4326, projection, publishurl, + zoompixels, tilesize, tileformat, profile + """ + + args = {} + args['title'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['profile'] = self.options.profile + + if self.options.profile == 'mercator': + args['srs'] = "EPSG:3857" + elif self.options.profile == 'geodetic': + args['srs'] = "EPSG:4326" + elif self.options.s_srs: + args['srs'] = self.options.s_srs + elif self.out_srs: + args['srs'] = self.out_srs.ExportToWkt() + else: + args['srs'] = "" + + s = """ + + %(title)s + + %(srs)s + + + + +""" % args # noqa + for z in range(self.tminz, self.tmaxz+1): + if self.options.profile == 'raster': + s += """ \n""" % ( + args['publishurl'], z, (2**(self.nativezoom-z) * self.out_gt[1]), z) + elif self.options.profile == 'mercator': + s += """ \n""" % ( + args['publishurl'], z, 156543.0339/2**z, z) + elif self.options.profile == 'geodetic': + s += """ \n""" % ( + args['publishurl'], z, 0.703125/2**z, z) + s += """ + + """ + return s + + def generate_kml(self, tx, ty, tz, children=None, **args): + """ + Template for the KML. Returns filled string. + """ + if not children: + children = [] + + args['tx'], args['ty'], args['tz'] = tx, ty, tz + args['tileformat'] = self.tileext + if 'tilesize' not in args: + args['tilesize'] = self.tilesize + + if 'minlodpixels' not in args: + args['minlodpixels'] = int(args['tilesize'] / 2) + if 'maxlodpixels' not in args: + args['maxlodpixels'] = int(args['tilesize'] * 8) + if children == []: + args['maxlodpixels'] = -1 + + if tx is None: + tilekml = False + args['title'] = self.options.title + else: + tilekml = True + args['title'] = "%d/%d/%d.kml" % (tz, tx, ty) + args['south'], args['west'], args['north'], args['east'] = self.tileswne( + tx, ty, tz) + + if tx == 0: + args['drawOrder'] = 2 * tz + 1 + elif tx is not None: + args['drawOrder'] = 2 * tz + else: + args['drawOrder'] = 0 + + url = self.options.url + if not url: + if tilekml: + url = "../../" + else: + url = "" + + s = """ + + + %(title)s + + """ % args + if tilekml: + s += """ + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + %(minlodpixels)d + %(maxlodpixels)d + + + + %(drawOrder)d + + %(ty)d.%(tileformat)s + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + """ % args + + for cx, cy, cz in children: + csouth, cwest, cnorth, ceast = self.tileswne(cx, cy, cz) + s += """ + + %d/%d/%d.%s + + + %.14f + %.14f + %.14f + %.14f + + + %d + -1 + + + + %s%d/%d/%d.kml + onRegion + + + + """ % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest, + args['minlodpixels'], url, cz, cx, cy) + + s += """ + + """ + return s + + def generate_googlemaps(self): + """ + Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, + publishurl + """ + args = {} + args['title'] = self.options.title + args['googlemapskey'] = self.options.googlekey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + + s = r""" + + + %(title)s + + + + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + def generate_leaflet(self): + """ + Template for leaflet.html implementing overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title.replace('"', '\\"') + args['htmltitle'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['centerlon'] = (args['north'] + args['south']) / 2. + args['centerlat'] = (args['west'] + args['east']) / 2. + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['beginzoom'] = self.tmaxz + args['tilesize'] = self.tilesize # not used + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url # not used + args['copyright'] = self.options.copyright.replace('"', '\\"') + + s = """ + + + + + %(htmltitle)s + + + + + + + + + + +
+ + + + + + + """ % args # noqa + + return s + + def generate_openlayers(self): + """ + Template for openlayers.html implementing overlay of available Spherical Mercator layers. + + It returns filled string. Expected variables: + title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title + args['bingkey'] = self.options.bingkey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + if self.options.tmscompatible: + args['tmsoffset'] = "-1" + else: + args['tmsoffset'] = "" + if self.options.profile == 'raster': + args['rasterzoomlevels'] = self.tmaxz+1 + args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1] + + s = r""" + + %(title)s + + """ % args # noqa + + if self.options.profile == 'mercator': + s += """ + + """ % args + + s += """ + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + +def main(): + argv = gdal.GeneralCmdLineProcessor(sys.argv) + if argv: + gdal2tiles = GDAL2Tiles(argv[1:]) + gdal2tiles.process() + + +if __name__ == '__main__': + main() + +# vim: set tabstop=4 shiftwidth=4 expandtab: diff --git a/auxiliary/gdal2customtiles/gdal2customtiles_py27.py b/auxiliary/gdal2customtiles/legacy/gdal2customtiles_py27.py similarity index 97% rename from auxiliary/gdal2customtiles/gdal2customtiles_py27.py rename to auxiliary/gdal2customtiles/legacy/gdal2customtiles_py27.py index 8b6421d64..13082c6fc 100644 --- a/auxiliary/gdal2customtiles/gdal2customtiles_py27.py +++ b/auxiliary/gdal2customtiles/legacy/gdal2customtiles_py27.py @@ -1,3218 +1,3218 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# ****************************************************************************** -# $Id$ -# -# Project: Google Summer of Code 2007, 2008 (http://code.google.com/soc/) -# Support: BRGM (http://www.brgm.fr) -# Purpose: Convert a raster into TMS (Tile Map Service) tiles in a directory. -# - generate Google Earth metadata (KML SuperOverlay) -# - generate simple HTML viewer based on Google Maps and OpenLayers -# - support of global tiles (Spherical Mercator) for compatibility -# with interactive web maps a la Google Maps -# Author: Klokan Petr Pridal, klokan at klokan dot cz -# Web: http://www.klokan.cz/projects/gdal2tiles/ -# GUI: http://www.maptiler.org/ -# -############################################################################### -# Copyright (c) 2008, Klokan Petr Pridal -# Copyright (c) 2010-2013, Even Rouault -# -# Permission is hereby granted, free of charge, to any person obtaining a -# copy of this software and associated documentation files (the "Software"), -# to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, -# and/or sell copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included -# in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. -# ****************************************************************************** - -import math -import os -import sys - -from osgeo import gdal -from osgeo import osr - -import struct # 1bto4b - - -def binary(num): # 1bto4b - # 1bto4b - return ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num)) - -# 1bto4b - -def getTilePxBounds(self, tx, ty, tz, ds): - - querysize = self.tilesize - - if self.isRasterBounded: # 'raster' profile: - # tilesize in raster coordinates for actual zoom - tsize = int(self.tsize[tz]) - xsize = self.out_ds.fWorldXSize - ysize = self.out_ds.fWorldYSize - if tz >= self.tmaxz: - querysize = self.tilesize - - rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld - #print("rx", rx) - rxsize = 0 - rxsize = tsize - - rysize = 0 - rysize = tsize - - ry = ysize - (ty * tsize) - rysize - \ - self.out_ds.fRasterYOriginWorld - - wx, wy = 0, 0 - wxsize = int(rxsize/float(tsize) * self.tilesize) - wysize = int(rysize/float(tsize) * self.tilesize) - if wysize != self.tilesize: - wy = self.tilesize - wysize - - if rx < 0: - rxsize = tsize + rx - wx = -rx - wxsize = int(rxsize/float(tsize) * self.tilesize) - rx = 0 - if ry < 0: - rysize = tsize + ry - wy = -ry - wysize = int(rysize/float(tsize) * self.tilesize) - ry = 0 - if rx + rxsize > self.out_ds.fRasterXSizeWorld: - rxsize = self.out_ds.fRasterXSizeWorld - rx - wxsize = int(rxsize/float(tsize) * self.tilesize) - if ry + rysize > self.out_ds.fRasterYSizeWorld: - rysize = self.out_ds.fRasterYSizeWorld - ry - wysize = int(rysize/float(tsize) * self.tilesize) - - # Convert rx, ry back to non-world coordinates - rx = int(float(self.out_ds.RasterXSize) * - (float(rx) / self.out_ds.fRasterXSizeWorld)) - ry = int(float(self.out_ds.RasterYSize) * - (float(ry) / self.out_ds.fRasterYSizeWorld)) - rxsize = int(float(self.out_ds.RasterXSize) * - (float(rxsize) / self.out_ds.fRasterXSizeWorld)) - rysize = int(float(self.out_ds.RasterYSize) * - (float(rysize) / self.out_ds.fRasterYSizeWorld)) - else: - b = self.mercator.TileBounds(tx, ty, tz) - rb, wb = self.geo_query( - ds, b[0], b[3], b[2], b[1], querysize=querysize) - rx, ry, rxsize, rysize = rb - wx, wy, wxsize, wysize = wb - - return [rx, ry, rxsize, rysize, wxsize, wysize] - - -try: - from PIL import Image - import numpy - import osgeo.gdal_array as gdalarray -except Exception: - # 'antialias' resampling is not available - pass - -__version__ = "$Id$" - -resampling_list = ('average', 'near', 'bilinear', 'cubic', - 'cubicspline', 'lanczos', 'antialias') -profile_list = ('mercator', 'geodetic', 'raster') -webviewer_list = ('all', 'google', 'openlayers', 'leaflet', 'none') - -# ============================================================================= -# ============================================================================= -# ============================================================================= - -__doc__globalmaptiles = """ -globalmaptiles.py - -Global Map Tiles as defined in Tile Map Service (TMS) Profiles -============================================================== - -Functions necessary for generation of global tiles used on the web. -It contains classes implementing coordinate conversions for: - - - GlobalMercator (based on EPSG:3857) - for Google Maps, Yahoo Maps, Bing Maps compatible tiles - - GlobalGeodetic (based on EPSG:4326) - for OpenLayers Base Map and Google Earth compatible tiles - -More info at: - -http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification -http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation -http://msdn.microsoft.com/en-us/library/bb259689.aspx -http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates - -Created by Klokan Petr Pridal on 2008-07-03. -Google Summer of Code 2008, project GDAL2Tiles for OSGEO. - -In case you use this class in your product, translate it to another language -or find it useful for your project please let me know. -My email: klokan at klokan dot cz. -I would like to know where it was used. - -Class is available under the open-source GDAL license (www.gdal.org). -""" - -MAXZOOMLEVEL = 32 - - -class GlobalMercator(object): - r""" - TMS Global Mercator Profile - --------------------------- - - Functions necessary for generation of tiles in Spherical Mercator projection, - EPSG:3857. - - Such tiles are compatible with Google Maps, Bing Maps, Yahoo Maps, - UK Ordnance Survey OpenSpace API, ... - and you can overlay them on top of base maps of those web mapping applications. - - Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). - - What coordinate conversions do we need for TMS Global Mercator tiles:: - - LatLon <-> Meters <-> Pixels <-> Tile - - WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid - lat/lon XY in meters XY pixels Z zoom XYZ from TMS - EPSG:4326 EPSG:387 - .----. --------- -- TMS - / \ <-> | | <-> /----/ <-> Google - \ / | | /--------/ QuadTree - ----- --------- /------------/ - KML, public WebMapService Web Clients TileMapService - - What is the coordinate extent of Earth in EPSG:3857? - - [-20037508.342789244, -20037508.342789244, - 20037508.342789244, 20037508.342789244] - Constant 20037508.342789244 comes from the circumference of the Earth in meters, - which is 40 thousand kilometers, the coordinate origin is in the middle of extent. - In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0 - $ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:3857 - Polar areas with abs(latitude) bigger then 85.05112878 are clipped off. - - What are zoom level constants (pixels/meter) for pyramid with EPSG:3857? - - whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile, - every lower zoom level resolution is always divided by two - initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062 - - What is the difference between TMS and Google Maps/QuadTree tile name convention? - - The tile raster itself is the same (equal extent, projection, pixel size), - there is just different identification of the same raster tile. - Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ. - Google placed the origin [0,0] to the top-left corner, reference is XYZ. - Microsoft is referencing tiles by a QuadTree name, defined on the website: - http://msdn2.microsoft.com/en-us/library/bb259689.aspx - - The lat/lon coordinates are using WGS84 datum, yes? - - Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum. - Well, the web clients like Google Maps are projecting those coordinates by - Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if - the were on the WGS84 ellipsoid. - - From MSDN documentation: - To simplify the calculations, we use the spherical form of projection, not - the ellipsoidal form. Since the projection is used only for map display, - and not for displaying numeric coordinates, we don't need the extra precision - of an ellipsoidal projection. The spherical projection causes approximately - 0.33 percent scale distortion in the Y direction, which is not visually - noticeable. - - How do I create a raster in EPSG:3857 and convert coordinates with PROJ.4? - - You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform. - All of the tools supports -t_srs 'epsg:3857'. - - For other GIS programs check the exact definition of the projection: - More info at http://spatialreference.org/ref/user/google-projection/ - The same projection is designated as EPSG:3857. WKT definition is in the - official EPSG database. - - Proj4 Text: - +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 - +k=1.0 +units=m +nadgrids=@null +no_defs - - Human readable WKT format of EPSG:3857: - PROJCS["Google Maps Global Mercator", - GEOGCS["WGS 84", - DATUM["WGS_1984", - SPHEROID["WGS 84",6378137,298.257223563, - AUTHORITY["EPSG","7030"]], - AUTHORITY["EPSG","6326"]], - PRIMEM["Greenwich",0], - UNIT["degree",0.0174532925199433], - AUTHORITY["EPSG","4326"]], - PROJECTION["Mercator_1SP"], - PARAMETER["central_meridian",0], - PARAMETER["scale_factor",1], - PARAMETER["false_easting",0], - PARAMETER["false_northing",0], - UNIT["metre",1, - AUTHORITY["EPSG","9001"]]] - """ - - def __init__(self, tileSize=256): - "Initialize the TMS Global Mercator pyramid" - self.tileSize = tileSize - self.initialResolution = 2 * math.pi * 6378137 / self.tileSize - # 156543.03392804062 for tileSize 256 pixels - self.originShift = 2 * math.pi * 6378137 / 2.0 - # 20037508.342789244 - - def LatLonToMeters(self, lat, lon): - "Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:3857" - - mx = lon * self.originShift / 180.0 - my = math.log(math.tan((90 + lat) * math.pi / 360.0)) / \ - (math.pi / 180.0) - - my = my * self.originShift / 180.0 - return mx, my - - def MetersToLatLon(self, mx, my): - "Converts XY point from Spherical Mercator EPSG:3857 to lat/lon in WGS84 Datum" - - lon = (mx / self.originShift) * 180.0 - lat = (my / self.originShift) * 180.0 - - lat = 180 / math.pi * \ - (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) - return lat, lon - - def PixelsToMeters(self, px, py, zoom): - "Converts pixel coordinates in given zoom level of pyramid to EPSG:3857" - - res = self.Resolution(zoom) - mx = px * res - self.originShift - my = py * res - self.originShift - return mx, my - - def MetersToPixels(self, mx, my, zoom): - "Converts EPSG:3857 to pyramid pixel coordinates in given zoom level" - - res = self.Resolution(zoom) - px = (mx + self.originShift) / res - py = (my + self.originShift) / res - return px, py - - def PixelsToTile(self, px, py): - "Returns a tile covering region in given pixel coordinates" - - tx = int(math.ceil(px / float(self.tileSize)) - 1) - ty = int(math.ceil(py / float(self.tileSize)) - 1) - return tx, ty - - def PixelsToRaster(self, px, py, zoom): - "Move the origin of pixel coordinates to top-left corner" - - mapSize = self.tileSize << zoom - return px, mapSize - py - - def MetersToTile(self, mx, my, zoom): - "Returns tile for given mercator coordinates" - - px, py = self.MetersToPixels(mx, my, zoom) - return self.PixelsToTile(px, py) - - def TileBounds(self, tx, ty, zoom): - "Returns bounds of the given tile in EPSG:3857 coordinates" - - minx, miny = self.PixelsToMeters( - tx*self.tileSize, ty*self.tileSize, zoom) - maxx, maxy = self.PixelsToMeters( - (tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom) - return (minx, miny, maxx, maxy) - - def TileLatLonBounds(self, tx, ty, zoom): - "Returns bounds of the given tile in latitude/longitude using WGS84 datum" - - bounds = self.TileBounds(tx, ty, zoom) - minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1]) - maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3]) - - return (minLat, minLon, maxLat, maxLon) - - def Resolution(self, zoom): - "Resolution (meters/pixel) for given zoom level (measured at Equator)" - - # return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom) - return self.initialResolution / (2**zoom) - - def ZoomForPixelSize(self, pixelSize): - "Maximal scaledown zoom of the pyramid closest to the pixelSize." - - for i in range(MAXZOOMLEVEL): - if pixelSize > self.Resolution(i): - if i != -1: - return i-1 - else: - return 0 # We don't want to scale up - - def GoogleTile(self, tx, ty, zoom): - "Converts TMS tile coordinates to Google Tile coordinates" - - # coordinate origin is moved from bottom-left to top-left corner of the extent - return tx, (2**zoom - 1) - ty - - def QuadTree(self, tx, ty, zoom): - "Converts TMS tile coordinates to Microsoft QuadTree" - - quadKey = "" - ty = (2**zoom - 1) - ty - for i in range(zoom, 0, -1): - digit = 0 - mask = 1 << (i-1) - if (tx & mask) != 0: - digit += 1 - if (ty & mask) != 0: - digit += 2 - quadKey += str(digit) - - return quadKey - - -class GlobalGeodetic(object): - r""" - TMS Global Geodetic Profile - --------------------------- - - Functions necessary for generation of global tiles in Plate Carre projection, - EPSG:4326, "unprojected profile". - - Such tiles are compatible with Google Earth (as any other EPSG:4326 rasters) - and you can overlay the tiles on top of OpenLayers base map. - - Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). - - What coordinate conversions do we need for TMS Global Geodetic tiles? - - Global Geodetic tiles are using geodetic coordinates (latitude,longitude) - directly as planar coordinates XY (it is also called Unprojected or Plate - Carre). We need only scaling to pixel pyramid and cutting to tiles. - Pyramid has on top level two tiles, so it is not square but rectangle. - Area [-180,-90,180,90] is scaled to 512x256 pixels. - TMS has coordinate origin (for pixels and tiles) in bottom-left corner. - Rasters are in EPSG:4326 and therefore are compatible with Google Earth. - - LatLon <-> Pixels <-> Tiles - - WGS84 coordinates Pixels in pyramid Tiles in pyramid - lat/lon XY pixels Z zoom XYZ from TMS - EPSG:4326 - .----. ---- - / \ <-> /--------/ <-> TMS - \ / /--------------/ - ----- /--------------------/ - WMS, KML Web Clients, Google Earth TileMapService - """ - - def __init__(self, tmscompatible, tileSize=256): - self.tileSize = tileSize - if tmscompatible is not None: - # Defaults the resolution factor to 0.703125 (2 tiles @ level 0) - # Adhers to OSGeo TMS spec - # http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification#global-geodetic - self.resFact = 180.0 / self.tileSize - else: - # Defaults the resolution factor to 1.40625 (1 tile @ level 0) - # Adheres OpenLayers, MapProxy, etc default resolution for WMTS - self.resFact = 360.0 / self.tileSize - - def LonLatToPixels(self, lon, lat, zoom): - "Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid" - - res = self.resFact / 2**zoom - px = (180 + lon) / res - py = (90 + lat) / res - return px, py - - def PixelsToTile(self, px, py): - "Returns coordinates of the tile covering region in pixel coordinates" - - tx = int(math.ceil(px / float(self.tileSize)) - 1) - ty = int(math.ceil(py / float(self.tileSize)) - 1) - return tx, ty - - def LonLatToTile(self, lon, lat, zoom): - "Returns the tile for zoom which covers given lon/lat coordinates" - - px, py = self.LonLatToPixels(lon, lat, zoom) - return self.PixelsToTile(px, py) - - def Resolution(self, zoom): - "Resolution (arc/pixel) for given zoom level (measured at Equator)" - - return self.resFact / 2**zoom - - def ZoomForPixelSize(self, pixelSize): - "Maximal scaledown zoom of the pyramid closest to the pixelSize." - - for i in range(MAXZOOMLEVEL): - if pixelSize > self.Resolution(i): - if i != 0: - return i-1 - else: - return 0 # We don't want to scale up - - def TileBounds(self, tx, ty, zoom): - "Returns bounds of the given tile" - res = self.resFact / 2**zoom - return ( - tx*self.tileSize*res - 180, - ty*self.tileSize*res - 90, - (tx+1)*self.tileSize*res - 180, - (ty+1)*self.tileSize*res - 90 - ) - - def TileLatLonBounds(self, tx, ty, zoom): - "Returns bounds of the given tile in the SWNE form" - b = self.TileBounds(tx, ty, zoom) - return (b[1], b[0], b[3], b[2]) - - -class Zoomify(object): - """ - Tiles compatible with the Zoomify viewer - ---------------------------------------- - """ - - def __init__(self, width, height, tilesize=256, tileformat='jpg'): - """Initialization of the Zoomify tile tree""" - - self.tilesize = tilesize - self.tileformat = tileformat - imagesize = (width, height) - tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize)) - - # Size (in tiles) for each tier of pyramid. - self.tierSizeInTiles = [] - self.tierSizeInTiles.append(tiles) - - # Image size in pixels for each pyramid tierself - self.tierImageSize = [] - self.tierImageSize.append(imagesize) - - while (imagesize[0] > tilesize or imagesize[1] > tilesize): - imagesize = (math.floor( - imagesize[0] / 2), math.floor(imagesize[1] / 2)) - tiles = (math.ceil(imagesize[0] / tilesize), - math.ceil(imagesize[1] / tilesize)) - self.tierSizeInTiles.append(tiles) - self.tierImageSize.append(imagesize) - - self.tierSizeInTiles.reverse() - self.tierImageSize.reverse() - - # Depth of the Zoomify pyramid, number of tiers (zoom levels) - self.numberOfTiers = len(self.tierSizeInTiles) - - # Number of tiles up to the given tier of pyramid. - self.tileCountUpToTier = [] - self.tileCountUpToTier[0] = 0 - for i in range(1, self.numberOfTiers+1): - self.tileCountUpToTier.append( - self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] + - self.tileCountUpToTier[i-1] - ) - - def tilefilename(self, x, y, z): - """Returns filename for tile with given coordinates""" - - tileIndex = x + y * \ - self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z] - return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256), - "%s-%s-%s.%s" % (z, x, y, self.tileformat)) - - -class Gdal2TilesError(Exception): - pass - - -class GDAL2Tiles(object): - - def process(self): - """The main processing function, runs all the main steps of processing""" - - # Opening and preprocessing of the input file - self.open_input() - - # Generation of main metadata files and HTML viewers - self.generate_metadata() - - # 1bto4b - if self.isDEMtile: - for z in range(self.tminz, self.tmaxz + int(abs(math.log(self.tilesize, 2) - 8))): # 1bto4b - self.generate_base_tiles(z) - print(' Zoom ' + str(z) + ' tiles done!') - else: - # Generation of the lowest tiles - self.generate_base_tiles(self.tmaxz) - - # Generation of the overview tiles (higher in the pyramid) - self.generate_overview_tiles() - - def error(self, msg, details=""): - """Print an error message and stop the processing""" - if details: - self.parser.error(msg + "\n\n" + details) - else: - self.parser.error(msg) - - def progressbar(self, complete=0.0): - """Print progressbar for float value 0..1""" - gdal.TermProgress_nocb(complete) - - def gettempfilename(self, suffix): - """Returns a temporary filename""" - if '_' in os.environ: - # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly) - if os.environ['_'].find('wine') >= 0: - tmpdir = '.' - if 'TMP' in os.environ: - tmpdir = os.environ['TMP'] - import time - import random - random.seed(time.time()) - random_part = 'file%d' % random.randint(0, 1000000000) - return os.path.join(tmpdir, random_part + suffix) - - import tempfile - return tempfile.mktemp(suffix) - - def stop(self): - """Stop the rendering immediately""" - self.stopped = True - - def __init__(self, arguments): - """Constructor function - initialization""" - self.out_drv = None - self.mem_drv = None - self.in_ds = None - self.out_ds = None - self.out_srs = None - self.nativezoom = None - self.tminmax = None - self.tsize = None - self.mercator = None - self.geodetic = None - self.alphaband = None - self.dataBandsCount = None - self.out_gt = None - self.tileswne = None - self.swne = None - self.ominx = None - self.omaxx = None - self.omaxy = None - self.ominy = None - - # MMGIS - self.isRasterBounded = False - - # 1bto4b - self.isDEMtile = False - - # MMGIS - self.fminx = None - self.fmaxx = None - self.fminy = None - self.fmaxy = None - self.fPixelSize = None - - self.stopped = False - self.input = None - self.output = None - - # Tile format - self.tilesize = 256 - self.tiledriver = 'PNG' - self.tileext = 'png' - - # Should we read bigger window of the input raster and scale it down? - # Note: Modified later by open_input() - # Not for 'near' resampling - # Not for Wavelet based drivers (JPEG2000, ECW, MrSID) - # Not for 'raster' profile - self.scaledquery = True - # How big should be query window be for scaling down - # Later on reset according the chosen resampling algorightm - self.querysize = 4 * self.tilesize - - # Should we use Read on the input file for generating overview tiles? - # Note: Modified later by open_input() - # Otherwise the overview tiles are generated from existing underlying tiles - self.overviewquery = False - - # RUN THE ARGUMENT PARSER: - - self.optparse_init() - self.options, self.args = self.parser.parse_args(args=arguments) - if not self.args: - self.error("No input file specified") - - # POSTPROCESSING OF PARSED ARGUMENTS: - - # Workaround for old versions of GDAL - try: - if ((self.options.verbose and self.options.resampling == 'near') or - gdal.TermProgress_nocb): - pass - except Exception: - self.error( - "This version of GDAL is not supported. Please upgrade to 1.6+.") - - # Is output directory the last argument? - - # Test output directory, if it doesn't exist - if (os.path.isdir(self.args[-1]) or - (len(self.args) > 1 and not os.path.exists(self.args[-1]))): - self.output = self.args[-1] - self.args = self.args[:-1] - - # More files on the input not directly supported yet - - if (len(self.args) > 1): - self.error("Processing of several input files is not supported.", - "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the " - "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(self.args)) - - self.input = self.args[0] - - # MMGIS - if self.options.extentworld: - extentworld = self.options.extentworld.split(",") - self.isRasterBounded = True - self.fminx = float(extentworld[0]) - self.fmaxx = float(extentworld[2]) - self.fminy = float(extentworld[3]) - self.fmaxy = float(extentworld[1]) - self.fPixelSize = float(extentworld[4]) - - # 1bto4b - if self.options.isDEMtile: - self.isDEMtile = True - self.tilesize = 32 - self.querysize = 4 * self.tilesize - - # Default values for not given options - - if not self.output: - # Directory with input filename without extension in actual directory - self.output = os.path.splitext(os.path.basename(self.input))[0] - - if not self.options.title: - self.options.title = os.path.basename(self.input) - - if self.options.url and not self.options.url.endswith('/'): - self.options.url += '/' - if self.options.url: - self.options.url += os.path.basename(self.output) + '/' - - # Supported options - - self.resampling = None - - if self.options.resampling == 'average': - try: - if gdal.RegenerateOverview: - pass - except Exception: - self.error("'average' resampling algorithm is not available.", - "Please use -r 'near' argument or upgrade to newer version of GDAL.") - - elif self.options.resampling == 'antialias': - try: - if numpy: # pylint:disable=W0125 - pass - except Exception: - self.error("'antialias' resampling algorithm is not available.", - "Install PIL (Python Imaging Library) and numpy.") - - elif self.options.resampling == 'near': - self.resampling = gdal.GRA_NearestNeighbour - self.querysize = self.tilesize - - elif self.options.resampling == 'bilinear': - self.resampling = gdal.GRA_Bilinear - self.querysize = self.tilesize * 2 - - elif self.options.resampling == 'cubic': - self.resampling = gdal.GRA_Cubic - - elif self.options.resampling == 'cubicspline': - self.resampling = gdal.GRA_CubicSpline - - elif self.options.resampling == 'lanczos': - self.resampling = gdal.GRA_Lanczos - - # User specified zoom levels - self.tminz = None - self.tmaxz = None - if self.options.zoom: - minmax = self.options.zoom.split('-', 1) - minmax.extend(['']) - zoom_min, zoom_max = minmax[:2] - self.tminz = int(zoom_min) - if zoom_max: - self.tmaxz = int(zoom_max) - else: - self.tmaxz = int(zoom_min) - - # KML generation - self.kml = self.options.kml - - # Check if the input filename is full ascii or not - try: - os.path.basename(self.input).encode('ascii') - except UnicodeEncodeError: - full_ascii = False - else: - full_ascii = True - - # LC_CTYPE check - if not full_ascii and 'UTF-8' not in os.environ.get("LC_CTYPE", ""): - if not self.options.quiet: - print("\nWARNING: " - "You are running gdal2tiles.py with a LC_CTYPE environment variable that is " - "not UTF-8 compatible, and your input file contains non-ascii characters. " - "The generated sample googlemaps, openlayers or " - "leaflet files might contain some invalid characters as a result\n") - - # Output the results - if self.options.verbose: - print("Options:", self.options) - print("Input:", self.input) - print("Output:", self.output) - print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024)) - print('') - - def optparse_init(self): - """Prepare the option parser for input (argv)""" - - from optparse import OptionParser, OptionGroup - usage = "Usage: %prog [options] input_file(s) [output]" - p = OptionParser(usage, version="%prog " + __version__) - p.add_option("-p", "--profile", dest='profile', - type='choice', choices=profile_list, - help=("Tile cutting profile (%s) - default 'mercator' " - "(Google Maps compatible)" % ",".join(profile_list))) - p.add_option("-r", "--resampling", dest="resampling", - type='choice', choices=resampling_list, - help="Resampling method (%s) - default 'average'" % ",".join(resampling_list)) - p.add_option('-s', '--s_srs', dest="s_srs", metavar="SRS", - help="The spatial reference system used for the source input data") - p.add_option('-z', '--zoom', dest="zoom", - help="Zoom levels to render (format:'2-5' or '10').") - p.add_option('-e', '--resume', dest="resume", action="store_true", - help="Resume mode. Generate only missing files.") - p.add_option('-a', '--srcnodata', dest="srcnodata", metavar="NODATA", - help="NODATA transparency value to assign to the input data") - p.add_option('-d', '--tmscompatible', dest="tmscompatible", action="store_true", - help=("When using the geodetic profile, specifies the base resolution " - "as 0.703125 or 2 tiles at zoom level 0.")) - p.add_option("-v", "--verbose", - action="store_true", dest="verbose", - help="Print status messages to stdout") - p.add_option("-q", "--quiet", - action="store_true", dest="quiet", - help="Disable messages and status to stdout") - # MMGIS - p.add_option("-x", "--extentworld", dest="extentworld", - help="The full world meter extent (comma-separated as minx,maxx,miny,maxy,pixelsize) of an inner raster profile.") - # 1bto4b - p.add_option("-m", "--dem", action="store_true", dest="isDEMtile", - help="Indicate if the input is a Digital Elevation Model") - # KML options - g = OptionGroup(p, "KML (Google Earth) options", - "Options for generated Google Earth SuperOverlay metadata") - g.add_option("-k", "--force-kml", dest='kml', action="store_true", - help=("Generate KML for Google Earth - default for 'geodetic' profile and " - "'raster' in EPSG:4326. For a dataset with different projection use " - "with caution!")) - g.add_option("-n", "--no-kml", dest='kml', action="store_false", - help="Avoid automatic generation of KML files for EPSG:4326") - g.add_option("-u", "--url", dest='url', - help="URL address where the generated tiles are going to be published") - p.add_option_group(g) - - # HTML options - g = OptionGroup(p, "Web viewer options", - "Options for generated HTML viewers a la Google Maps") - g.add_option("-w", "--webviewer", dest='webviewer', type='choice', choices=webviewer_list, - help="Web viewer to generate (%s) - default 'all'" % ",".join(webviewer_list)) - g.add_option("-t", "--title", dest='title', - help="Title of the map") - g.add_option("-c", "--copyright", dest='copyright', - help="Copyright for the map") - g.add_option("-g", "--googlekey", dest='googlekey', - help="Google Maps API key from http://code.google.com/apis/maps/signup.html") - g.add_option("-b", "--bingkey", dest='bingkey', - help="Bing Maps API key from https://www.bingmapsportal.com/") - p.add_option_group(g) - - p.set_defaults(verbose=False, profile="mercator", kml=False, url='', - webviewer='all', copyright='', resampling='average', resume=False, - googlekey='INSERT_YOUR_KEY_HERE', bingkey='INSERT_YOUR_KEY_HERE') - - self.parser = p - - # ------------------------------------------------------------------------- - def open_input(self): - """Initialization of the input raster, reprojection if necessary""" - gdal.AllRegister() - - self.out_drv = gdal.GetDriverByName(self.tiledriver) - self.mem_drv = gdal.GetDriverByName('MEM') - - if not self.out_drv: - raise Exception("The '%s' driver was not found, is it available in this GDAL build?", - self.tiledriver) - if not self.mem_drv: - raise Exception( - "The 'MEM' driver was not found, is it available in this GDAL build?") - - # Open the input file - - if self.input: - self.in_ds = gdal.Open(self.input, gdal.GA_ReadOnly) - else: - raise Exception("No input file was specified") - - if self.options.verbose: - print("Input file:", - "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, - self.in_ds.RasterCount)) - - if not self.in_ds: - # Note: GDAL prints the ERROR message too - self.error( - "It is not possible to open the input file '%s'." % self.input) - - # Read metadata from the input file - if self.in_ds.RasterCount == 0: - self.error("Input file '%s' has no raster band" % self.input) - - if self.in_ds.GetRasterBand(1).GetRasterColorTable(): - self.error("Please convert this file to RGB/RGBA and run gdal2tiles on the result.", - "From paletted file you can create RGBA file (temp.vrt) by:\n" - "gdal_translate -of vrt -expand rgba %s temp.vrt\n" - "then run:\n" - "gdal2tiles temp.vrt" % self.input) - - # Get NODATA value - in_nodata = [] - for i in range(1, self.in_ds.RasterCount+1): - if self.in_ds.GetRasterBand(i).GetNoDataValue() is not None: - in_nodata.append(self.in_ds.GetRasterBand(i).GetNoDataValue()) - if self.options.srcnodata: - nds = list(map(float, self.options.srcnodata.split(','))) - if len(nds) < self.in_ds.RasterCount: - in_nodata = ( - nds * self.in_ds.RasterCount)[:self.in_ds.RasterCount] - else: - in_nodata = nds - - if self.options.verbose: - print("NODATA: %s" % in_nodata) - - if self.options.verbose: - print("Preprocessed file:", - "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, - self.in_ds.RasterCount)) - - in_srs = None - - if self.options.s_srs: - in_srs = osr.SpatialReference() - in_srs.SetFromUserInput(self.options.s_srs) - in_srs_wkt = in_srs.ExportToWkt() - else: - in_srs_wkt = self.in_ds.GetProjection() - if not in_srs_wkt and self.in_ds.GetGCPCount() != 0: - in_srs_wkt = self.in_ds.GetGCPProjection() - if in_srs_wkt: - in_srs = osr.SpatialReference() - in_srs.ImportFromWkt(in_srs_wkt) - - self.out_srs = osr.SpatialReference() - - if self.options.profile == 'mercator': - self.out_srs.ImportFromEPSG(3857) - elif self.options.profile == 'geodetic': - self.out_srs.ImportFromEPSG(4326) - else: - self.out_srs = in_srs - - # Are the reference systems the same? Reproject if necessary. - - self.out_ds = None - - if self.options.profile in ('mercator', 'geodetic'): - - if ((self.in_ds.GetGeoTransform() == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) and - (self.in_ds.GetGCPCount() == 0)): - self.error("There is no georeference - neither affine transformation (worldfile) " - "nor GCPs. You can generate only 'raster' profile tiles.", - "Either gdal2tiles with parameter -p 'raster' or use another GIS " - "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs") - - if in_srs: - if ((in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or - (self.in_ds.GetGCPCount() != 0)): - # Generation of VRT dataset in tile projection, - # default 'nearest neighbour' warping - self.out_ds = gdal.AutoCreateWarpedVRT( - self.in_ds, in_srs_wkt, self.out_srs.ExportToWkt()) - - if self.options.verbose: - print("Warping of the raster by AutoCreateWarpedVRT " - "(result saved into 'tiles.vrt')") - self.out_ds.GetDriver().CreateCopy("tiles.vrt", self.out_ds) - - # Correction of AutoCreateWarpedVRT for NODATA values - if in_nodata != []: - tempfilename = self.gettempfilename('-gdal2tiles.vrt') - self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) - # open as a text file - s = open(tempfilename).read() - # Add the warping options - s = s.replace( - "", - """ - - - - """) - # replace BandMapping tag for NODATA bands.... - for i in range(len(in_nodata)): - s = s.replace( - '' % ( - (i+1), (i+1)), - """ - - %i - 0 - %i - 0 - - """ % ((i+1), (i+1), in_nodata[i], in_nodata[i])) - # save the corrected VRT - open(tempfilename, "w").write(s) - # open by GDAL as self.out_ds - self.out_ds = gdal.Open(tempfilename) - # delete the temporary file - os.unlink(tempfilename) - - # set NODATA_VALUE metadata - self.out_ds.SetMetadataItem( - 'NODATA_VALUES', ' '.join([str(i) for i in in_nodata])) - - if self.options.verbose: - print("Modified warping result saved into 'tiles1.vrt'") - open("tiles1.vrt", "w").write(s) - - # Correction of AutoCreateWarpedVRT for Mono (1 band) and RGB (3 bands) files - # without NODATA: - # equivalent of gdalwarp -dstalpha - if in_nodata == [] and self.out_ds.RasterCount in [1, 3]: - tempfilename = self.gettempfilename('-gdal2tiles.vrt') - self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) - # open as a text file - s = open(tempfilename).read() - # Add the warping options - s = s.replace( - "", - """ - - Alpha - - - """ % (self.out_ds.RasterCount + 1)) - s = s.replace( - "", - """ - %i - - """ % (self.out_ds.RasterCount + 1)) - s = s.replace( - "", - """ - - - """) - # save the corrected VRT - open(tempfilename, "w").write(s) - # open by GDAL as self.out_ds - self.out_ds = gdal.Open(tempfilename) - # delete the temporary file - os.unlink(tempfilename) - - if self.options.verbose: - print( - "Modified -dstalpha warping result saved into 'tiles1.vrt'") - open("tiles1.vrt", "w").write(s) - s = ''' - ''' - - else: - self.error("Input file has unknown SRS.", - "Use --s_srs ESPG:xyz (or similar) to provide source reference system.") - - if self.out_ds and self.options.verbose: - print("Projected file:", "tiles.vrt", "( %sP x %sL - %s bands)" % ( - self.out_ds.RasterXSize, self.out_ds.RasterYSize, self.out_ds.RasterCount)) - - if not self.out_ds: - self.out_ds = self.in_ds - - # - # Here we should have a raster (out_ds) in the correct Spatial Reference system - # - - # Get alpha band (either directly or from NODATA value) - self.alphaband = self.out_ds.GetRasterBand(1).GetMaskBand() - if ((self.alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or - self.out_ds.RasterCount == 4 or - self.out_ds.RasterCount == 2): - self.dataBandsCount = self.out_ds.RasterCount - 1 - else: - self.dataBandsCount = self.out_ds.RasterCount - - # KML test - isepsg4326 = False - srs4326 = osr.SpatialReference() - srs4326.ImportFromEPSG(4326) - if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4(): - self.kml = True - isepsg4326 = True - if self.options.verbose: - print("KML autotest OK!") - - # Read the georeference - self.out_gt = self.out_ds.GetGeoTransform() - - # Test the size of the pixel - - # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile) - if (self.out_gt[2], self.out_gt[4]) != (0, 0): - self.error("Georeference of the raster contains rotation or skew. " - "Such raster is not supported. Please use gdalwarp first.") - - # Here we expect: pixel is square, no rotation on the raster - - # Output Bounds - coordinates in the output SRS - self.ominx = self.out_gt[0] - self.omaxx = self.out_gt[0] + self.out_ds.RasterXSize * self.out_gt[1] - self.omaxy = self.out_gt[3] - self.ominy = self.out_gt[3] - self.out_ds.RasterYSize * self.out_gt[1] - - # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15 - - # MMGIS - def linearScale(domain, rang, value): - return ( - ((rang[1] - rang[0]) * (value - domain[0])) / - (domain[1] - domain[0]) + - rang[0] - ) - # MMGIS - self.out_ds.fRasterXSize = self.out_ds.RasterXSize - self.out_ds.fRasterYSize = self.out_ds.RasterYSize - self.out_ds.fRasterXOrigin = 0 - self.out_ds.fRasterYOrigin = 0 - self.out_ds.PixelSize = self.out_gt[1] - self.out_ds.fPixelSize = self.fPixelSize - # print("ominx", self.ominx, "omaxx", self.omaxx, "ominy", self.ominy, "omaxy", self.omaxy) - # print("fminx", self.fminx, "fmaxx", self.fmaxx, "fminy", self.fminy, "fmaxy", self.fmaxy) - if self.isRasterBounded: - self.out_ds.fRasterXSize = int(math.floor(self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / ( - self.omaxx - self.ominx) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) - self.out_ds.fRasterYSize = int(math.ceil(self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / ( - self.omaxy - self.ominy) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) - self.out_ds.fRasterXSizeRaw = int(math.floor( - self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / (self.omaxx - self.ominx))) - self.out_ds.fRasterYSizeRaw = int(math.ceil( - self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / (self.omaxy - self.ominy))) - # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize ) - self.out_ds.fRasterXOrigin = int(math.floor(linearScale( - [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.out_gt[0]))) - self.out_ds.fRasterYOrigin = int(math.ceil(linearScale( - [self.fminy, self.fmaxy], [self.out_ds.fRasterYSize, 0], self.out_gt[3]))) - self.out_ds.fRasterXOriginRaw = int(math.floor(linearScale([self.fminx, self.fmaxx], [ - 0, self.out_ds.fRasterXSize], self.out_gt[0]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) - self.out_ds.fRasterYOriginRaw = int(math.ceil(linearScale([self.fminy, self.fmaxy], [ - self.out_ds.fRasterYSize, 0], self.out_gt[3]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) - self.out_ds.fRasterXWidth = int(math.floor(linearScale( - [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.omaxx))) - self.out_ds.fRasterXOrigin - self.out_ds.fRasterYHeight = int(math.ceil(linearScale( - [self.fminy, self.fmaxy], [0, self.out_ds.fRasterYSize], self.omaxy))) - self.out_ds.fRasterYOrigin - - if self.options.verbose: - print("Bounds (output srs):", round(self.ominx, 13), - self.ominy, self.omaxx, self.omaxy) - - # print("Input Raster Size: ", self.out_ds.RasterXSize, self.out_ds.RasterYSize) - # print("fmaxx-fminx", self.fmaxx - self.fminx, "omaxx-ominx", self.omaxx - self.ominx, "fmaxy-fminy", self.fmaxy - self.fminy, "omaxy-ominy", self.omaxy - self.ominy) - # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize) - # print("Full Raster Size Raw: ", self.out_ds.fRasterXSizeRaw, self.out_ds.fRasterYSizeRaw) - # print("Raster Origin: ", self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin) - # print("Raster Origin Raw: ", self.out_ds.fRasterXOriginRaw, self.out_ds.fRasterYOriginRaw) - # print("Raster Width Height: ", self.out_ds.fRasterXWidth, self.out_ds.fRasterYHeight) - - # Calculating ranges for tiles in different zoom levels - if self.options.profile == 'mercator': - - self.mercator = GlobalMercator() - - # Function which generates SWNE in LatLong for given tile - self.tileswne = self.mercator.TileLatLonBounds - - # Generate table with min max tile coordinates for all zoomlevels - self.tminmax = list(range(0, 32)) - for tz in range(0, 32): - tminx, tminy = self.mercator.MetersToTile( - self.ominx, self.ominy, tz) - tmaxx, tmaxy = self.mercator.MetersToTile( - self.omaxx, self.omaxy, tz) - # crop tiles extending world limits (+-180,+-90) - tminx, tminy = max(0, tminx), max(0, tminy) - tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy) - self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) - - # TODO: Maps crossing 180E (Alaska?) - - # Get the minimal zoom level (map covers area equivalent to one tile) - if self.tminz is None: - self.tminz = self.mercator.ZoomForPixelSize( - self.out_gt[1] * max(self.out_ds.RasterXSize, - self.out_ds.RasterYSize) / float(self.tilesize)) - - # Get the maximal zoom level - # (closest possible zoom level up on the resolution of raster) - if self.tmaxz is None: - self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1]) - - if self.options.verbose: - print("Bounds (latlong):", - self.mercator.MetersToLatLon(self.ominx, self.ominy), - self.mercator.MetersToLatLon(self.omaxx, self.omaxy)) - print('MinZoomLevel:', self.tminz) - print("MaxZoomLevel:", - self.tmaxz, - "(", - self.mercator.Resolution(self.tmaxz), - ")") - - if self.options.profile == 'geodetic': - - self.geodetic = GlobalGeodetic(self.options.tmscompatible) - - # Function which generates SWNE in LatLong for given tile - self.tileswne = self.geodetic.TileLatLonBounds - - # Generate table with min max tile coordinates for all zoomlevels - self.tminmax = list(range(0, 32)) - for tz in range(0, 32): - tminx, tminy = self.geodetic.LonLatToTile( - self.ominx, self.ominy, tz) - tmaxx, tmaxy = self.geodetic.LonLatToTile( - self.omaxx, self.omaxy, tz) - # crop tiles extending world limits (+-180,+-90) - tminx, tminy = max(0, tminx), max(0, tminy) - tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy) - self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) - - # TODO: Maps crossing 180E (Alaska?) - - # Get the maximal zoom level - # (closest possible zoom level up on the resolution of raster) - if self.tminz is None: - self.tminz = self.geodetic.ZoomForPixelSize( - self.out_gt[1] * max(self.out_ds.RasterXSize, - self.out_ds.RasterYSize) / float(self.tilesize)) - - # Get the maximal zoom level - # (closest possible zoom level up on the resolution of raster) - if self.tmaxz is None: - self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1]) - - if self.options.verbose: - print("Bounds (latlong):", self.ominx, - self.ominy, self.omaxx, self.omaxy) - - # MMGIS - if self.options.profile == 'raster' and self.isRasterBounded: - - def log2(x): - return math.log10(x) / math.log10(2) - - # MMGIS added 'f'* - self.nativezoom = int( - max(math.ceil(log2(self.out_ds.fRasterXSizeRaw/float(self.tilesize))), - math.ceil(log2(self.out_ds.fRasterYSizeRaw/float(self.tilesize))))) - - self.basenativezoom = int( - max(math.ceil(log2(self.out_ds.fRasterXSize/float(self.tilesize))), - math.ceil(log2(self.out_ds.fRasterYSize/float(self.tilesize))))) - - # MMGIS - self.out_ds.fWorldXSize = int( - float(self.out_ds.fRasterXSize) * (2**(self.nativezoom - self.basenativezoom))) - self.out_ds.fWorldYSize = int( - float(self.out_ds.fRasterYSize) * (2**(self.nativezoom - self.basenativezoom))) - self.out_ds.fRasterXOriginWorld = int(float( - self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXOrigin) / self.out_ds.fRasterXSize)) - self.out_ds.fRasterYOriginWorld = int(float( - self.out_ds.fWorldYSize) * (float(self.out_ds.fRasterYOrigin) / self.out_ds.fRasterYSize)) - self.out_ds.fRasterXSizeWorld = int(float( - self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXWidth) / self.out_ds.fRasterXSize)) - self.out_ds.fRasterYSizeWorld = int(float( - self.out_ds.RasterYSize) * (float(self.out_ds.fRasterXSizeWorld) / self.out_ds.RasterXSize)) - # print("World Size", self.out_ds.fWorldXSize, self.out_ds.fWorldYSize) - # print("Raster Origin World", self.out_ds.fRasterXOriginWorld, self.out_ds.fRasterYOriginWorld) - # print("Raster Size World", self.out_ds.fRasterXSizeWorld, self.out_ds.fRasterYSizeWorld) - - if self.options.verbose: - print("Native zoom of the raster:", self.nativezoom) - - # Get the minimal zoom level (whole raster in one tile) - if self.tminz is None: - self.tminz = 0 - - # Get the maximal zoom level (native resolution of the raster) - if self.tmaxz is None: - self.tmaxz = self.nativezoom - - # MMGIS added 'f'* - # Generate table with min max tile coordinates for all zoomlevels - self.tminmax = list(range(0, self.tmaxz+1)) - self.tsize = list(range(0, self.tmaxz+1)) - # print("Raster Size:", self.out_ds.RasterXSize,self.out_ds.RasterYSize) - # print("Pixel Size Ratio:", (self.out_ds.fPixelSize / self.out_ds.PixelSize)) - # print("nativezoom", self.nativezoom, "basenativezoom", self.basenativezoom, "tminz", self.tminz, "tmaxz", self.tmaxz) - for tz in range(0, self.tmaxz+1): - tsize = 2.0**(self.tmaxz-tz)*self.tilesize - toffsetx = int(math.floor( - 2.0**(tz) * self.out_ds.fRasterXOriginRaw / self.out_ds.fRasterXSizeRaw)) - toffsety = int(math.floor( - 2.0**(tz) * (self.out_ds.fRasterYOriginRaw) / self.out_ds.fRasterYSizeRaw)) - # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) - toffsetx = int(math.floor( - self.out_ds.fRasterXOriginWorld / tsize)) - toffsety = int(math.floor( - self.out_ds.fRasterYOriginWorld / tsize)) - # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) - tmaxx = int(math.floor( - self.out_ds.fRasterXSizeWorld / tsize)) + toffsetx + 1 - - tmaxy = int(math.floor( - self.out_ds.fRasterYSizeWorld / tsize)) + toffsety + 1 - self.tsize[tz] = math.ceil(tsize) - #tminx = toffsetx - tminx = int(tmaxx - ((tmaxx - toffsetx) / (0.75))) - 1 - tminy = int(tmaxy - ((tmaxy - toffsety) / (0.75))) - 1 - - self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) - # print("tminx", tminx, "tminy", tminy, "tmaxx", tmaxx, "tmaxy", tmaxy, "tz", tz) - - elif self.options.profile == 'raster': - - def log2(x): - return math.log10(x) / math.log10(2) - self.nativezoom = int( - max(math.ceil(log2(self.out_ds.RasterXSize/float(self.tilesize))), - math.ceil(log2(self.out_ds.RasterYSize/float(self.tilesize))))) - - if self.options.verbose: - print("Native zoom of the raster:", self.nativezoom) - - # Get the minimal zoom level (whole raster in one tile) - if self.tminz is None: - self.tminz = 0 - - # Get the maximal zoom level (native resolution of the raster) - if self.tmaxz is None: - self.tmaxz = self.nativezoom - - # Generate table with min max tile coordinates for all zoomlevels - self.tminmax = list(range(0, self.tmaxz+1)) - self.tsize = list(range(0, self.tmaxz+1)) - for tz in range(0, self.tmaxz+1): - tsize = 2.0**(self.tmaxz-tz)*self.tilesize - tminx, tminy = 0, 0 - tmaxx = int(math.ceil(self.out_ds.RasterXSize / tsize)) - 1 - tmaxy = int(math.ceil(self.out_ds.RasterYSize / tsize)) - 1 - self.tsize[tz] = math.ceil(tsize) - self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) - - # Function which generates SWNE in LatLong for given tile - if self.kml and in_srs_wkt: - ct = osr.CoordinateTransformation(in_srs, srs4326) - - def rastertileswne(x, y, z): - # X-pixel size in level - pixelsizex = (2**(self.tmaxz-z) * self.out_gt[1]) - west = self.out_gt[0] + x*self.tilesize*pixelsizex - east = west + self.tilesize*pixelsizex - south = self.ominy + y*self.tilesize*pixelsizex - north = south + self.tilesize*pixelsizex - if not isepsg4326: - # Transformation to EPSG:4326 (WGS84 datum) - west, south = ct.TransformPoint(west, south)[:2] - east, north = ct.TransformPoint(east, north)[:2] - return south, west, north, east - - self.tileswne = rastertileswne - else: - self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa - - def generate_metadata(self): - """ - Generation of main metadata files and HTML viewers (metadata related to particular - tiles are generated during the tile processing). - """ - - if not os.path.exists(self.output): - os.makedirs(self.output) - - if self.options.profile == 'mercator': - - south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy) - north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy) - south, west = max(-85.05112878, south), max(-180.0, west) - north, east = min(85.05112878, north), min(180.0, east) - self.swne = (south, west, north, east) - - # Generate googlemaps.html - if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator': - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'googlemaps.html'))): - f = open(os.path.join(self.output, 'googlemaps.html'), 'wb') - f.write(self.generate_googlemaps().encode('utf-8')) - f.close() - - # Generate openlayers.html - if self.options.webviewer in ('all', 'openlayers'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'openlayers.html'))): - f = open(os.path.join(self.output, 'openlayers.html'), 'wb') - f.write(self.generate_openlayers().encode('utf-8')) - f.close() - - # Generate leaflet.html - if self.options.webviewer in ('all', 'leaflet'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'leaflet.html'))): - f = open(os.path.join(self.output, 'leaflet.html'), 'wb') - f.write(self.generate_leaflet().encode('utf-8')) - f.close() - - elif self.options.profile == 'geodetic': - - west, south = self.ominx, self.ominy - east, north = self.omaxx, self.omaxy - south, west = max(-90.0, south), max(-180.0, west) - north, east = min(90.0, north), min(180.0, east) - self.swne = (south, west, north, east) - - # Generate openlayers.html - if self.options.webviewer in ('all', 'openlayers'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'openlayers.html'))): - f = open(os.path.join(self.output, 'openlayers.html'), 'wb') - f.write(self.generate_openlayers().encode('utf-8')) - f.close() - - elif self.options.profile == 'raster': - - west, south = self.ominx, self.ominy - east, north = self.omaxx, self.omaxy - - # MMGIS - if self.isRasterBounded: - west = self.fminx - east = self.fmaxx - south = self.fminy - north = self.fmaxy - - self.swne = (south, west, north, east) - - # Generate openlayers.html - if self.options.webviewer in ('all', 'openlayers'): - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'openlayers.html'))): - f = open(os.path.join(self.output, 'openlayers.html'), 'wb') - f.write(self.generate_openlayers().encode('utf-8')) - f.close() - - # Generate tilemapresource.xml. - if not self.options.resume or not os.path.exists(os.path.join(self.output, 'tilemapresource.xml')): - f = open(os.path.join(self.output, 'tilemapresource.xml'), 'wb') - f.write(self.generate_tilemapresource().encode('utf-8')) - f.close() - - if self.kml: - # TODO: Maybe problem for not automatically generated tminz - # The root KML should contain links to all tiles in the tminz level - children = [] - xmin, ymin, xmax, ymax = self.tminmax[self.tminz] - for x in range(xmin, xmax+1): - for y in range(ymin, ymax+1): - children.append([x, y, self.tminz]) - # Generate Root KML - if self.kml: - if (not self.options.resume or not - os.path.exists(os.path.join(self.output, 'doc.kml'))): - f = open(os.path.join(self.output, 'doc.kml'), 'wb') - f.write(self.generate_kml( - None, None, None, children).encode('utf-8')) - f.close() - - def generate_base_tiles(self, tz): - """ - Generation of the base tiles (the lowest in the pyramid) directly from the input raster - """ - - if self.isDEMtile: - print("Generating Tiles at Zoom " + str(tz) + ": ") - - if not self.options.quiet: - print("Generating Base Tiles:") - - if self.options.verbose: - print('') - print("Tiles generated from the max zoom level:") - print("----------------------------------------") - print('') - - ds = self.out_ds - - querysize = self.querysize - - # 1bto4b - if self.isDEMtile: - tilebands = 4 - querysize = self.tilesize - else: - tilebands = self.dataBandsCount + 1 - tz = self.tmaxz - - try: - self.tminmax[tz] - except IndexError: - print(" Won't make zoom level " + str(tz)) - return - - # Set the bounds - tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] - - if self.options.verbose: - print("dataBandsCount: ", self.dataBandsCount) - print("tilebands: ", tilebands) - - tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) - ti = 0 - - for ty in range(tmaxy, tminy-1, -1): - for tx in range(tminx, tmaxx+1): - - if self.stopped: - break - ti += 1 - tilefilename = os.path.join( - self.output, str(tz), str(tx), "%s.%s" % (ty, self.tileext)) - if self.options.verbose: - print(ti, '/', tcount, tilefilename) - - if self.options.resume and os.path.exists(tilefilename): - if self.options.verbose: - print("Tile generation skipped because of --resume") - else: - self.progressbar(ti / float(tcount)) - continue - - # Create directories for the tile - if not os.path.exists(os.path.dirname(tilefilename)): - os.makedirs(os.path.dirname(tilefilename)) - - if self.options.profile == 'mercator': - # Tile bounds in EPSG:3857 - b = self.mercator.TileBounds(tx, ty, tz) - elif self.options.profile == 'geodetic': - b = self.geodetic.TileBounds(tx, ty, tz) - - # Don't scale up by nearest neighbour, better change the querysize - # to the native resolution (and return smaller query tile) for scaling - - if self.options.profile in ('mercator', 'geodetic'): - rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1]) - - # Pixel size in the raster covering query geo extent - nativesize = wb[0] + wb[2] - if self.options.verbose: - print("\tNative Extent (querysize", - nativesize, "): ", rb, wb) - - # Tile bounds in raster coordinates for ReadRaster query - rb, wb = self.geo_query( - ds, b[0], b[3], b[2], b[1], querysize=querysize) - - rx, ry, rxsize, rysize = rb - wx, wy, wxsize, wysize = wb - wxsize -= 1 # 1bto4b - wysize -= 1 # 1bto4b - - # MMGIS - elif self.isRasterBounded: # 'raster' profile: - - # tilesize in raster coordinates for actual zoom - tsize = int(self.tsize[tz]) - xsize = self.out_ds.fWorldXSize - ysize = self.out_ds.fWorldYSize - if tz >= self.tmaxz: - querysize = self.tilesize - - rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld - #print("rx", rx) - rxsize = 0 - rxsize = tsize - - rysize = 0 - rysize = tsize - - ry = ysize - (ty * tsize) - rysize - \ - self.out_ds.fRasterYOriginWorld - - wx, wy = 0, 0 - wxsize = int(rxsize/float(tsize) * self.tilesize) - wysize = int(rysize/float(tsize) * self.tilesize) - if wysize != self.tilesize: - wy = self.tilesize - wysize - - if rx < 0: - rxsize = tsize + rx - wx = -rx - wxsize = int(rxsize/float(tsize) * self.tilesize) - rx = 0 - if ry < 0: - rysize = tsize + ry - wy = -ry - wysize = int(rysize/float(tsize) * self.tilesize) - ry = 0 - if rx + rxsize > self.out_ds.fRasterXSizeWorld: - rxsize = self.out_ds.fRasterXSizeWorld - rx - wxsize = int(rxsize/float(tsize) * self.tilesize) - if ry + rysize > self.out_ds.fRasterYSizeWorld: - rysize = self.out_ds.fRasterYSizeWorld - ry - wysize = int(rysize/float(tsize) * self.tilesize) - - # Convert rx, ry back to non-world coordinates - rx = int(float(self.out_ds.RasterXSize) * - (float(rx) / self.out_ds.fRasterXSizeWorld)) - ry = int(float(self.out_ds.RasterYSize) * - (float(ry) / self.out_ds.fRasterYSizeWorld)) - rxsize = int(float(self.out_ds.RasterXSize) * - (float(rxsize) / self.out_ds.fRasterXSizeWorld)) - rysize = int(float(self.out_ds.RasterYSize) * - (float(rysize) / self.out_ds.fRasterYSizeWorld)) - - wxsize -= 1 # 1bto4b - wysize -= 1 # 1bto4b - - #print("Extent: ", (tx, ty, tz, tsize), (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize), (self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin)) - else: # 'raster' profile: - # tilesize in raster coordinates for actual zoom - tsize = int(self.tsize[tz]) - xsize = self.out_ds.RasterXSize # size of the raster in pixels - ysize = self.out_ds.RasterYSize - if tz >= self.tmaxz: - querysize = self.tilesize - - rx = (tx) * tsize - rxsize = 0 - if tx == tmaxx: - rxsize = xsize % tsize - if rxsize == 0: - rxsize = tsize - - rysize = 0 - if ty == tmaxy: - rysize = ysize % tsize - if rysize == 0: - rysize = tsize - ry = ysize - (ty * tsize) - rysize - - wx, wy = 0, 0 - wxsize = int(rxsize/float(tsize) * self.tilesize) - wysize = int(rysize/float(tsize) * self.tilesize) - if wysize != self.tilesize: - wy = self.tilesize - wysize - - if self.options.verbose: - print("\tReadRaster Extent: ", - (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)) - - # Query is in 'nearest neighbour' but can be bigger in then the tilesize - # We scale down the query to the tilesize by supplied algorithm. - - # Tile dataset in memory - - # 1bto4b - if self.isDEMtile: - dstile = self.mem_drv.Create( - '', self.tilesize, self.tilesize, tilebands, gdal.GDT_Byte) - else: - dstile = self.mem_drv.Create( - '', self.tilesize, self.tilesize, tilebands) - - data = alpha = None - # Read the source raster if anything is going inside the tile as per the computed - # geo_query - if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0: - # 1bto4b - if self.isDEMtile: - data = ds.GetRasterBand(1).ReadRaster( - rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) - else: - data = ds.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize, - band_list=list(range(1, self.dataBandsCount+1))) - alpha = self.alphaband.ReadRaster( - rx, ry, rxsize, rysize, wxsize, wysize) - - # The tile in memory is a transparent file by default. Write pixel values into it if - # any - if data: - # 1bto4b - both this full if and else - if self.isDEMtile: - if (wxsize * wysize) > 0: - data = struct.unpack('f' * wxsize * wysize, data) - else: - return - - if self.tilesize == querysize: - # Interpolate the values from four surrounding - - # This takes our 1d list of WxH data and pads it with a rect of none values - dataPad = list(data) - for i in reversed(range(1, wysize)): - dataPad.insert(wxsize * i, 0) - dataPad.insert(wxsize * i, 0) - for i in range(wxsize + 3): - dataPad.insert(0, 0) - for i in range(wxsize + 3): - dataPad.append(0) - - dataIn = [] - # Resample based on average of four - # averaging over: i, i + 1, i + wxsize, i + wxsize + 1 - for y in range(wysize+2 - 1): - for x in range(wxsize+2 - 1): - i = x+(y*(wxsize+2)) - nW = dataPad[i] - nE = dataPad[i+1] - sW = dataPad[i+(wxsize+2)] - sE = dataPad[i+(wxsize+2)+1] - dataIn.append((nW + nE + sW + sE)/float(4)) - - # Get the surrounding eight tiles - # Get NW - if tx - 1 >= tminx and ty + 1 <= tmaxy: - rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW = getTilePxBounds(self, - tx - 1, ty + 1, tz, ds) - wxsizeNW -= 1 - wysizeNW -= 1 - if wxsizeNW != 0 and wysizeNW != 0: - dataNW = ds.GetRasterBand(1).ReadRaster( - rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW, buf_type=gdal.GDT_Float32) - if dataNW is not None and (wxsizeNW * wysizeNW) > 0: - dataNW = struct.unpack( - 'f' * wxsizeNW * wysizeNW, dataNW) - else: - dataNW = None - else: - dataNW = None - - # Get N - if ty + 1 <= tmaxy: - rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN = getTilePxBounds( - self, tx, ty + 1, tz, ds) - wxsizeN -= 1 - wysizeN -= 1 - if wxsizeN != 0 and wysizeN != 0: - dataN = ds.GetRasterBand(1).ReadRaster( - rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN, buf_type=gdal.GDT_Float32) - if dataN is not None and (wxsizeN * wysizeN) > 0: - dataN = struct.unpack( - 'f' * wxsizeN * wysizeN, dataN) - else: - dataN = None - else: - dataN = None - # Get NE - if tx + 1 <= tmaxx and ty + 1 <= tmaxy: - rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE = getTilePxBounds( - self, tx + 1, ty + 1, tz, ds) - wxsizeNE -= 1 - wysizeNE -= 1 - if wxsizeNE != 0 and wysizeNE != 0: - dataNE = ds.GetRasterBand(1).ReadRaster( - rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE, buf_type=gdal.GDT_Float32) - if dataNE is not None and (wxsizeNE * wysizeNE) > 0: - dataNE = struct.unpack( - 'f' * wxsizeNE * wysizeNE, dataNE) - else: - dataNE = None - else: - dataNE = None - # Get E - if tx + 1 <= tmaxx: - rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE = getTilePxBounds( - self, tx + 1, ty, tz, ds) - wxsizeE -= 1 - wysizeE -= 1 - if wxsizeE != 0 and wysizeE != 0: - dataE = ds.GetRasterBand(1).ReadRaster( - rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE, buf_type=gdal.GDT_Float32) - if dataE is not None and (wxsizeE * wysizeE) > 0: - dataE = struct.unpack( - 'f' * wxsizeE * wysizeE, dataE) - else: - dataE = None - else: - dataE = None - # Get SE - if tx + 1 <= tmaxx and ty - 1 >= tminy: - rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE = getTilePxBounds( - self, tx + 1, ty - 1, tz, ds) - wxsizeSE -= 1 - wysizeSE -= 1 - if wxsizeSE != 0 and wysizeSE != 0: - dataSE = ds.GetRasterBand(1).ReadRaster( - rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE, buf_type=gdal.GDT_Float32) - if dataSE is not None and (wxsizeSE * wysizeSE) > 0: - dataSE = struct.unpack( - 'f' * wxsizeSE * wysizeSE, dataSE) - else: - dataSE = None - else: - dataSE = None - # Get S - if ty - 1 >= tminy: - rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS = getTilePxBounds( - self, tx, ty - 1, tz, ds) - wxsizeS -= 1 - wysizeS -= 1 - if wxsizeS != 0 and wysizeS != 0: - dataS = ds.GetRasterBand(1).ReadRaster( - rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS, buf_type=gdal.GDT_Float32) - if dataS is not None and (wxsizeS * wysizeS) > 0: - dataS = struct.unpack( - 'f' * wxsizeS * wysizeS, dataS) - else: - dataS = None - else: - dataS = None - # Get SW - if tx - 1 >= tminx and ty - 1 >= tminy: - rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW = getTilePxBounds( - self, tx - 1, ty - 1, tz, ds) - wxsizeSW -= 1 - wysizeSW -= 1 - if wxsizeSW != 0 and wysizeSW != 0: - dataSW = ds.GetRasterBand(1).ReadRaster( - rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW, buf_type=gdal.GDT_Float32) - if dataSW is not None and (wxsizeSW * wysizeSW) > 0: - dataSW = struct.unpack( - 'f' * wxsizeSW * wysizeSW, dataSW) - else: - dataSW = None - else: - dataSW = None - # Get W - if tx - 1 >= tminx: - rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW = getTilePxBounds( - self, tx - 1, ty, tz, ds) - wxsizeW -= 1 - wysizeW -= 1 - if wxsizeW != 0 and wysizeW != 0: - dataW = ds.GetRasterBand(1).ReadRaster( - rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW, buf_type=gdal.GDT_Float32) - if dataW is not None and (wxsizeW * wysizeW) > 0: - dataW = struct.unpack( - 'f' * wxsizeW * wysizeW, dataW) - else: - dataW = None - else: - dataW = None - - # NW (uses N, NW, W) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataN is not None: - fN = dataN[len(dataN)-wxsizeN] - values = values + 1 - if dataNW is not None: - fNW = dataNW[len(dataNW)-1] - values = values + 1 - if dataW is not None: - fW = dataW[wxsizeW-1] - values = values + 1 - dataIn[0] = ((dataIn[0]*4) + fN + - fNW + fW)/float(values) - - # NE (uses N, NE, E) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataN is not None: - fN = dataN[len(dataN)-1] - values = values + 1 - if dataNE is not None: - fNE = dataNE[len(dataNE)-wxsizeNE] - values = values + 1 - if dataE is not None: - fE = dataE[0] - values = values + 1 - dataIn[wxsize] = ( - (dataIn[wxsize]*4) + fN + fNE + fE)/float(values) - - # SE (uses S, SE, E) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataS is not None: - fS = dataS[wxsizeS-1] - values = values + 1 - if dataSE is not None: - fSE = dataSE[0] - values = values + 1 - if dataE is not None: - fE = dataE[len(dataE)-wxsizeE] - values = values + 1 - dataIn[len(dataIn)-1] = ((dataIn[len(dataIn)-1] - * 4) + fS + fSE + fE)/float(values) - - # SW (uses S, SW, W) - fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 - values = 1 - if dataS is not None: - fS = dataS[0] - values = values + 1 - if dataSW is not None: - fSW = dataSW[wxsizeSW-1] - values = values + 1 - if dataW is not None: - fW = dataW[len(dataW)-1] - values = values + 1 - dataIn[len( - dataIn)-wxsize-1] = ((dataIn[len(dataIn)-wxsize-1]*4) + fS + fSW + fW)/float(values) - - # Then the edges minus corners - # N - if dataN is not None: - for i in range(1, wxsize): - dataIn[i] = ( - (dataIn[i]*4) + dataN[len(dataN)-wxsizeN-1+i] + dataN[len(dataN)-wxsizeN-1+i+1])/float(4) - else: - for i in range(1, wxsize): - dataIn[i] = (dataIn[i]*4)/float(2) - - # E - if dataE is not None: - for i in range(1, wysize): - dataIn[((i+1)*(wxsize+1)-1)] = ((dataIn[((i+1)*(wxsize+1)-1)] - * 4) + dataE[(i-1)*wxsizeE] + dataE[i*wxsizeE])/float(4) - else: - for i in range(1, wysize): - dataIn[( - (i+1)*(wxsize+1)-1)] = (dataIn[((i+1)*(wxsize+1)-1)]*4)/float(2) - - # S - if dataS is not None: - for i in range(1, wxsize): - dataIn[len(dataIn)-wxsize-1+i] = ( - (dataIn[len(dataIn)-wxsize-1+i]*4) + dataS[i-1] + dataS[i])/float(4) - else: - for i in range(1, wxsize): - dataIn[len( - dataIn)-wxsize-1+i] = (dataIn[len(dataIn)-wxsize-1+i]*4)/float(2) - - # W - if dataW is not None: - for i in range(1, wysize): - dataIn[(i)*(wxsize+1)] = ((dataIn[(i)*(wxsize+1)]*4) + - dataW[i*wxsizeW-1] + dataW[(i+1)*wxsizeW-1])/float(4) - else: - for i in range(1, wysize): - dataIn[(i)*(wxsize+1)] = (dataIn[(i) - * (wxsize+1)]*4)/float(2) - - data1 = [] - data2 = [] - data3 = [] - data4 = [] - for f in dataIn: - f = str(binary(f)) - data1.append(int(f[:8], 2)) - data2.append(int(f[8:16], 2)) - data3.append(int(f[16:24], 2)) - data4.append(int(f[24:], 2)) - - data1s = '' - data2s = '' - data3s = '' - data4s = '' - indx = 0 - for v in data1: - data1s += struct.pack('B', data1[indx]) - data2s += struct.pack('B', data2[indx]) - data3s += struct.pack('B', data3[indx]) - data4s += struct.pack('B', data4[indx]) - indx += 1 - dstile.GetRasterBand(1).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data1s, buf_type=gdal.GDT_Byte) - dstile.GetRasterBand(2).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data2s, buf_type=gdal.GDT_Byte) - dstile.GetRasterBand(3).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data3s, buf_type=gdal.GDT_Byte) - dstile.GetRasterBand(4).WriteRaster( - wx, wy, wxsize + 1, wysize + 1, data4s, buf_type=gdal.GDT_Byte) - elif wxsize != 0 and wysize != 0: - # Big ReadRaster query in memory scaled to the tilesize - all but 'near' algo - dsquery = self.mem_drv.Create( - '', querysize, querysize, tilebands, gdal.GDT_Byte) # 1bto4b - # TODO: fill the null value in case a tile without alpha is produced (now only png tiles are supported) - # for i in range(1, tilebands+1): - # dsquery.GetRasterBand(1).Fill(tilenodata) - # dsquery.WriteRaster(wx, wy, wxsize, wysize, data, band_list=list(range(1,self.dataBandsCount+1)))###############1bto4b - # dsquery.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands])###############################1bto4b - - # 1bto4b - data = ds.GetRasterBand(1).ReadRaster( - rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) - - data = struct.unpack('f' * wxsize * wysize, data) - data1 = [] - data2 = [] - data3 = [] - data4 = [] - for f in data: - f = str(binary(f)) - data1.append(int(f[:8], 2)) - data2.append(int(f[8:16], 2)) - data3.append(int(f[16:24], 2)) - data4.append(int(f[24:], 2)) - - data1s = '' - data2s = '' - data3s = '' - data4s = '' - indx = 0 - for v in data1: - data1s += struct.pack('B', data1[indx]) - data2s += struct.pack('B', data2[indx]) - data3s += struct.pack('B', data3[indx]) - data4s += struct.pack('B', data4[indx]) - indx += 1 - - dsquery.GetRasterBand(1).WriteRaster( - wx, wy, wxsize, wysize, data1s, buf_type=gdal.GDT_Byte) - dsquery.GetRasterBand(2).WriteRaster( - wx, wy, wxsize, wysize, data2s, buf_type=gdal.GDT_Byte) - dsquery.GetRasterBand(3).WriteRaster( - wx, wy, wxsize, wysize, data3s, buf_type=gdal.GDT_Byte) - dsquery.GetRasterBand(4).WriteRaster( - wx, wy, wxsize, wysize, data4s, buf_type=gdal.GDT_Byte) - # sys.exit('done') - # 1bto4b - - self.scale_query_to_tile( - dsquery, dstile, tilefilename) - del dsquery - - else: - if self.tilesize == querysize: - # Use the ReadRaster result directly in tiles ('nearest neighbour' query) - dstile.WriteRaster(wx, wy, wxsize, wysize, data, - band_list=list(range(1, self.dataBandsCount+1))) - dstile.WriteRaster( - wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) - - # Note: For source drivers based on WaveLet compression (JPEG2000, ECW, - # MrSID) the ReadRaster function returns high-quality raster (not ugly - # nearest neighbour) - # TODO: Use directly 'near' for WaveLet files - else: - # Big ReadRaster query in memory scaled to the tilesize - all but 'near' - # algo - dsquery = self.mem_drv.Create( - '', querysize, querysize, tilebands) - # TODO: fill the null value in case a tile without alpha is produced (now - # only png tiles are supported) - dsquery.WriteRaster(wx, wy, wxsize, wysize, data, - band_list=list(range(1, self.dataBandsCount+1))) - dsquery.WriteRaster( - wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) - - self.scale_query_to_tile( - dsquery, dstile, tilefilename) - del dsquery - - del data - - if self.options.resampling != 'antialias': - # Write a copy of tile to png/jpg - self.out_drv.CreateCopy(tilefilename, dstile, strict=0) - - del dstile - - # Create a KML file for this tile. - if self.kml: - kmlfilename = os.path.join( - self.output, str(tz), str(tx), '%d.kml' % ty) - if not self.options.resume or not os.path.exists(kmlfilename): - f = open(kmlfilename, 'wb') - f.write(self.generate_kml(tx, ty, tz).encode('utf-8')) - f.close() - - if not self.options.verbose and not self.options.quiet: - self.progressbar(ti / float(tcount)) - - def generate_overview_tiles(self): - """Generation of the overview tiles (higher in the pyramid) based on existing tiles""" - - if not self.options.quiet: - print("Generating Overview Tiles:") - - # 1bto4b - if self.isDEMtile: - tilebands = 4 - else: - tilebands = self.dataBandsCount + 1 - - # Usage of existing tiles: from 4 underlying tiles generate one as overview. - - tcount = 0 - for tz in range(self.tmaxz-1, self.tminz-1, -1): - tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] - tcount += (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) - - ti = 0 - - for tz in range(self.tmaxz-1, self.tminz-1, -1): - tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] - for ty in range(tmaxy, tminy-1, -1): - for tx in range(tminx, tmaxx+1): - - if self.stopped: - break - - ti += 1 - tilefilename = os.path.join(self.output, - str(tz), - str(tx), - "%s.%s" % (ty, self.tileext)) - - if self.options.verbose: - print(ti, '/', tcount, tilefilename) - - if self.options.resume and os.path.exists(tilefilename): - if self.options.verbose: - print("Tile generation skipped because of --resume") - else: - self.progressbar(ti / float(tcount)) - continue - - # Create directories for the tile - if not os.path.exists(os.path.dirname(tilefilename)): - os.makedirs(os.path.dirname(tilefilename)) - - dsquery = self.mem_drv.Create( - '', 2*self.tilesize, 2*self.tilesize, tilebands) - # TODO: fill the null value - dstile = self.mem_drv.Create( - '', self.tilesize, self.tilesize, tilebands) - - # TODO: Implement more clever walking on the tiles with cache functionality - # probably walk should start with reading of four tiles from top left corner - # Hilbert curve - - children = [] - # Read the tiles and write them to query window - for y in range(2*ty, 2*ty+2): - for x in range(2*tx, 2*tx+2): - minx, miny, maxx, maxy = self.tminmax[tz+1] - if x >= minx and x <= maxx and y >= miny and y <= maxy: - dsquerytile = gdal.Open( - os.path.join(self.output, str(tz+1), str(x), - "%s.%s" % (y, self.tileext)), - gdal.GA_ReadOnly) - if (ty == 0 and y == 1) or (ty != 0 and (y % (2*ty)) != 0): - tileposy = 0 - else: - tileposy = self.tilesize - if tx: - tileposx = x % (2*tx) * self.tilesize - elif tx == 0 and x == 1: - tileposx = self.tilesize - else: - tileposx = 0 - dsquery.WriteRaster( - tileposx, tileposy, self.tilesize, self.tilesize, - dsquerytile.ReadRaster( - 0, 0, self.tilesize, self.tilesize), - band_list=list(range(1, tilebands+1))) - children.append([x, y, tz+1]) - - self.scale_query_to_tile(dsquery, dstile, tilefilename) - # Write a copy of tile to png/jpg - if self.options.resampling != 'antialias': - # Write a copy of tile to png/jpg - self.out_drv.CreateCopy(tilefilename, dstile, strict=0) - - if self.options.verbose: - print("\tbuild from zoom", tz+1, - " tiles:", (2*tx, 2*ty), (2*tx+1, 2*ty), - (2*tx, 2*ty+1), (2*tx+1, 2*ty+1)) - - # Create a KML file for this tile. - if self.kml: - f = open(os.path.join( - self.output, '%d/%d/%d.kml' % (tz, tx, ty)), 'wb') - f.write(self.generate_kml( - tx, ty, tz, children).encode('utf-8')) - f.close() - - if not self.options.verbose and not self.options.quiet: - self.progressbar(ti / float(tcount)) - - def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): - """ - For given dataset and query in cartographic coordinates returns parameters for ReadRaster() - in raster coordinates and x/y shifts (for border tiles). If the querysize is not given, the - extent is returned in the native resolution of dataset ds. - - raises Gdal2TilesError if the dataset does not contain anything inside this geo_query - """ - geotran = ds.GetGeoTransform() - rx = int((ulx - geotran[0]) / geotran[1] + 0.001) - ry = int((uly - geotran[3]) / geotran[5] + 0.001) - rxsize = int((lrx - ulx) / geotran[1] + 0.5) - rysize = int((lry - uly) / geotran[5] + 0.5) - - if not querysize: - wxsize, wysize = rxsize, rysize - else: - wxsize, wysize = querysize, querysize - - # Coordinates should not go out of the bounds of the raster - wx = 0 - if rx < 0: - rxshift = abs(rx) - wx = int(wxsize * (float(rxshift) / rxsize)) - wxsize = wxsize - wx - rxsize = rxsize - int(rxsize * (float(rxshift) / rxsize)) - rx = 0 - if rx+rxsize > ds.RasterXSize: - wxsize = int(wxsize * (float(ds.RasterXSize - rx) / rxsize)) - rxsize = ds.RasterXSize - rx - - wy = 0 - if ry < 0: - ryshift = abs(ry) - wy = int(wysize * (float(ryshift) / rysize)) - wysize = wysize - wy - rysize = rysize - int(rysize * (float(ryshift) / rysize)) - ry = 0 - if ry+rysize > ds.RasterYSize: - wysize = int(wysize * (float(ds.RasterYSize - ry) / rysize)) - rysize = ds.RasterYSize - ry - - return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize) - - def scale_query_to_tile(self, dsquery, dstile, tilefilename=''): - """Scales down query dataset to the tile dataset""" - - querysize = dsquery.RasterXSize - tilesize = dstile.RasterXSize - tilebands = dstile.RasterCount - - if self.options.resampling == 'average': - - # Function: gdal.RegenerateOverview() - for i in range(1, tilebands+1): - # Black border around NODATA - res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i), - 'average') - if res != 0: - self.error("RegenerateOverview() failed on %s, error %d" % ( - tilefilename, res)) - - elif self.options.resampling == 'antialias': - - # Scaling by PIL (Python Imaging Library) - improved Lanczos - array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) - for i in range(tilebands): - array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i+1), - 0, 0, querysize, querysize) - im = Image.fromarray(array, 'RGBA') # Always four bands - im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS) - if os.path.exists(tilefilename): - im0 = Image.open(tilefilename) - im1 = Image.composite(im1, im0, im1) - im1.save(tilefilename, self.tiledriver) - - else: - - # Other algorithms are implemented by gdal.ReprojectImage(). - dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0, - tilesize / float(querysize))) - dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) - - res = gdal.ReprojectImage( - dsquery, dstile, None, None, self.resampling) - if res != 0: - self.error("ReprojectImage() failed on %s, error %d" % - (tilefilename, res)) - - def generate_tilemapresource(self): - """ - Template for tilemapresource.xml. Returns filled string. Expected variables: - title, north, south, east, west, isepsg4326, projection, publishurl, - zoompixels, tilesize, tileformat, profile - """ - - args = {} - args['title'] = self.options.title - args['south'], args['west'], args['north'], args['east'] = self.swne - args['tilesize'] = self.tilesize - args['tileformat'] = self.tileext - args['publishurl'] = self.options.url - args['profile'] = self.options.profile - - if self.options.profile == 'mercator': - args['srs'] = "EPSG:3857" - elif self.options.profile == 'geodetic': - args['srs'] = "EPSG:4326" - elif self.options.s_srs: - args['srs'] = self.options.s_srs - elif self.out_srs: - args['srs'] = self.out_srs.ExportToWkt() - else: - args['srs'] = "" - - s = """ - - %(title)s - - %(srs)s - - - - -""" % args # noqa - for z in range(self.tminz, self.tmaxz+1): - if self.options.profile == 'raster': - s += """ \n""" % ( - args['publishurl'], z, (2**(self.nativezoom-z) * self.out_gt[1]), z) - elif self.options.profile == 'mercator': - s += """ \n""" % ( - args['publishurl'], z, 156543.0339/2**z, z) - elif self.options.profile == 'geodetic': - s += """ \n""" % ( - args['publishurl'], z, 0.703125/2**z, z) - s += """ - - """ - return s - - def generate_kml(self, tx, ty, tz, children=None, **args): - """ - Template for the KML. Returns filled string. - """ - if not children: - children = [] - - args['tx'], args['ty'], args['tz'] = tx, ty, tz - args['tileformat'] = self.tileext - if 'tilesize' not in args: - args['tilesize'] = self.tilesize - - if 'minlodpixels' not in args: - args['minlodpixels'] = int(args['tilesize'] / 2) - if 'maxlodpixels' not in args: - args['maxlodpixels'] = int(args['tilesize'] * 8) - if children == []: - args['maxlodpixels'] = -1 - - if tx is None: - tilekml = False - args['title'] = self.options.title - else: - tilekml = True - args['title'] = "%d/%d/%d.kml" % (tz, tx, ty) - args['south'], args['west'], args['north'], args['east'] = self.tileswne( - tx, ty, tz) - - if tx == 0: - args['drawOrder'] = 2 * tz + 1 - elif tx is not None: - args['drawOrder'] = 2 * tz - else: - args['drawOrder'] = 0 - - url = self.options.url - if not url: - if tilekml: - url = "../../" - else: - url = "" - - s = """ - - - %(title)s - - """ % args - if tilekml: - s += """ - - - %(north).14f - %(south).14f - %(east).14f - %(west).14f - - - %(minlodpixels)d - %(maxlodpixels)d - - - - %(drawOrder)d - - %(ty)d.%(tileformat)s - - - %(north).14f - %(south).14f - %(east).14f - %(west).14f - - - """ % args - - for cx, cy, cz in children: - csouth, cwest, cnorth, ceast = self.tileswne(cx, cy, cz) - s += """ - - %d/%d/%d.%s - - - %.14f - %.14f - %.14f - %.14f - - - %d - -1 - - - - %s%d/%d/%d.kml - onRegion - - - - """ % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest, - args['minlodpixels'], url, cz, cx, cy) - - s += """ - - """ - return s - - def generate_googlemaps(self): - """ - Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile. - It returns filled string. Expected variables: - title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, - publishurl - """ - args = {} - args['title'] = self.options.title - args['googlemapskey'] = self.options.googlekey - args['south'], args['west'], args['north'], args['east'] = self.swne - args['minzoom'] = self.tminz - args['maxzoom'] = self.tmaxz - args['tilesize'] = self.tilesize - args['tileformat'] = self.tileext - args['publishurl'] = self.options.url - args['copyright'] = self.options.copyright - - s = r""" - - - %(title)s - - - - - - - - -
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC - -
-
- - - """ % args # noqa - - return s - - def generate_leaflet(self): - """ - Template for leaflet.html implementing overlay of tiles for 'mercator' profile. - It returns filled string. Expected variables: - title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl - """ - - args = {} - args['title'] = self.options.title.replace('"', '\\"') - args['htmltitle'] = self.options.title - args['south'], args['west'], args['north'], args['east'] = self.swne - args['centerlon'] = (args['north'] + args['south']) / 2. - args['centerlat'] = (args['west'] + args['east']) / 2. - args['minzoom'] = self.tminz - args['maxzoom'] = self.tmaxz - args['beginzoom'] = self.tmaxz - args['tilesize'] = self.tilesize # not used - args['tileformat'] = self.tileext - args['publishurl'] = self.options.url # not used - args['copyright'] = self.options.copyright.replace('"', '\\"') - - s = """ - - - - - %(htmltitle)s - - - - - - - - - - -
- - - - - - - """ % args # noqa - - return s - - def generate_openlayers(self): - """ - Template for openlayers.html implementing overlay of available Spherical Mercator layers. - - It returns filled string. Expected variables: - title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl - """ - - args = {} - args['title'] = self.options.title - args['bingkey'] = self.options.bingkey - args['south'], args['west'], args['north'], args['east'] = self.swne - args['minzoom'] = self.tminz - args['maxzoom'] = self.tmaxz - args['tilesize'] = self.tilesize - args['tileformat'] = self.tileext - args['publishurl'] = self.options.url - args['copyright'] = self.options.copyright - if self.options.tmscompatible: - args['tmsoffset'] = "-1" - else: - args['tmsoffset'] = "" - if self.options.profile == 'raster': - args['rasterzoomlevels'] = self.tmaxz+1 - args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1] - - s = r""" - - %(title)s - - """ % args # noqa - - if self.options.profile == 'mercator': - s += """ - - """ % args - - s += """ - - - - - -
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC - -
-
- - - """ % args # noqa - - return s - - -def main(): - argv = gdal.GeneralCmdLineProcessor(sys.argv) - if argv: - gdal2tiles = GDAL2Tiles(argv[1:]) - gdal2tiles.process() - - -if __name__ == '__main__': - main() - -# vim: set tabstop=4 shiftwidth=4 expandtab: +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# ****************************************************************************** +# $Id$ +# +# Project: Google Summer of Code 2007, 2008 (http://code.google.com/soc/) +# Support: BRGM (http://www.brgm.fr) +# Purpose: Convert a raster into TMS (Tile Map Service) tiles in a directory. +# - generate Google Earth metadata (KML SuperOverlay) +# - generate simple HTML viewer based on Google Maps and OpenLayers +# - support of global tiles (Spherical Mercator) for compatibility +# with interactive web maps a la Google Maps +# Author: Klokan Petr Pridal, klokan at klokan dot cz +# Web: http://www.klokan.cz/projects/gdal2tiles/ +# GUI: http://www.maptiler.org/ +# +############################################################################### +# Copyright (c) 2008, Klokan Petr Pridal +# Copyright (c) 2010-2013, Even Rouault +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +# ****************************************************************************** + +import math +import os +import sys + +from osgeo import gdal +from osgeo import osr + +import struct # 1bto4b + + +def binary(num): # 1bto4b + # 1bto4b + return ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num)) + +# 1bto4b + +def getTilePxBounds(self, tx, ty, tz, ds): + + querysize = self.tilesize + + if self.isRasterBounded: # 'raster' profile: + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.fWorldXSize + ysize = self.out_ds.fWorldYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld + #print("rx", rx) + rxsize = 0 + rxsize = tsize + + rysize = 0 + rysize = tsize + + ry = ysize - (ty * tsize) - rysize - \ + self.out_ds.fRasterYOriginWorld + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if rx < 0: + rxsize = tsize + rx + wx = -rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + rx = 0 + if ry < 0: + rysize = tsize + ry + wy = -ry + wysize = int(rysize/float(tsize) * self.tilesize) + ry = 0 + if rx + rxsize > self.out_ds.fRasterXSizeWorld: + rxsize = self.out_ds.fRasterXSizeWorld - rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + if ry + rysize > self.out_ds.fRasterYSizeWorld: + rysize = self.out_ds.fRasterYSizeWorld - ry + wysize = int(rysize/float(tsize) * self.tilesize) + + # Convert rx, ry back to non-world coordinates + rx = int(float(self.out_ds.RasterXSize) * + (float(rx) / self.out_ds.fRasterXSizeWorld)) + ry = int(float(self.out_ds.RasterYSize) * + (float(ry) / self.out_ds.fRasterYSizeWorld)) + rxsize = int(float(self.out_ds.RasterXSize) * + (float(rxsize) / self.out_ds.fRasterXSizeWorld)) + rysize = int(float(self.out_ds.RasterYSize) * + (float(rysize) / self.out_ds.fRasterYSizeWorld)) + else: + b = self.mercator.TileBounds(tx, ty, tz) + rb, wb = self.geo_query( + ds, b[0], b[3], b[2], b[1], querysize=querysize) + rx, ry, rxsize, rysize = rb + wx, wy, wxsize, wysize = wb + + return [rx, ry, rxsize, rysize, wxsize, wysize] + + +try: + from PIL import Image + import numpy + import osgeo.gdal_array as gdalarray +except Exception: + # 'antialias' resampling is not available + pass + +__version__ = "$Id$" + +resampling_list = ('average', 'near', 'bilinear', 'cubic', + 'cubicspline', 'lanczos', 'antialias') +profile_list = ('mercator', 'geodetic', 'raster') +webviewer_list = ('all', 'google', 'openlayers', 'leaflet', 'none') + +# ============================================================================= +# ============================================================================= +# ============================================================================= + +__doc__globalmaptiles = """ +globalmaptiles.py + +Global Map Tiles as defined in Tile Map Service (TMS) Profiles +============================================================== + +Functions necessary for generation of global tiles used on the web. +It contains classes implementing coordinate conversions for: + + - GlobalMercator (based on EPSG:3857) + for Google Maps, Yahoo Maps, Bing Maps compatible tiles + - GlobalGeodetic (based on EPSG:4326) + for OpenLayers Base Map and Google Earth compatible tiles + +More info at: + +http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification +http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation +http://msdn.microsoft.com/en-us/library/bb259689.aspx +http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates + +Created by Klokan Petr Pridal on 2008-07-03. +Google Summer of Code 2008, project GDAL2Tiles for OSGEO. + +In case you use this class in your product, translate it to another language +or find it useful for your project please let me know. +My email: klokan at klokan dot cz. +I would like to know where it was used. + +Class is available under the open-source GDAL license (www.gdal.org). +""" + +MAXZOOMLEVEL = 32 + + +class GlobalMercator(object): + r""" + TMS Global Mercator Profile + --------------------------- + + Functions necessary for generation of tiles in Spherical Mercator projection, + EPSG:3857. + + Such tiles are compatible with Google Maps, Bing Maps, Yahoo Maps, + UK Ordnance Survey OpenSpace API, ... + and you can overlay them on top of base maps of those web mapping applications. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Mercator tiles:: + + LatLon <-> Meters <-> Pixels <-> Tile + + WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid + lat/lon XY in meters XY pixels Z zoom XYZ from TMS + EPSG:4326 EPSG:387 + .----. --------- -- TMS + / \ <-> | | <-> /----/ <-> Google + \ / | | /--------/ QuadTree + ----- --------- /------------/ + KML, public WebMapService Web Clients TileMapService + + What is the coordinate extent of Earth in EPSG:3857? + + [-20037508.342789244, -20037508.342789244, + 20037508.342789244, 20037508.342789244] + Constant 20037508.342789244 comes from the circumference of the Earth in meters, + which is 40 thousand kilometers, the coordinate origin is in the middle of extent. + In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0 + $ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:3857 + Polar areas with abs(latitude) bigger then 85.05112878 are clipped off. + + What are zoom level constants (pixels/meter) for pyramid with EPSG:3857? + + whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile, + every lower zoom level resolution is always divided by two + initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062 + + What is the difference between TMS and Google Maps/QuadTree tile name convention? + + The tile raster itself is the same (equal extent, projection, pixel size), + there is just different identification of the same raster tile. + Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ. + Google placed the origin [0,0] to the top-left corner, reference is XYZ. + Microsoft is referencing tiles by a QuadTree name, defined on the website: + http://msdn2.microsoft.com/en-us/library/bb259689.aspx + + The lat/lon coordinates are using WGS84 datum, yes? + + Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum. + Well, the web clients like Google Maps are projecting those coordinates by + Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if + the were on the WGS84 ellipsoid. + + From MSDN documentation: + To simplify the calculations, we use the spherical form of projection, not + the ellipsoidal form. Since the projection is used only for map display, + and not for displaying numeric coordinates, we don't need the extra precision + of an ellipsoidal projection. The spherical projection causes approximately + 0.33 percent scale distortion in the Y direction, which is not visually + noticeable. + + How do I create a raster in EPSG:3857 and convert coordinates with PROJ.4? + + You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform. + All of the tools supports -t_srs 'epsg:3857'. + + For other GIS programs check the exact definition of the projection: + More info at http://spatialreference.org/ref/user/google-projection/ + The same projection is designated as EPSG:3857. WKT definition is in the + official EPSG database. + + Proj4 Text: + +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 + +k=1.0 +units=m +nadgrids=@null +no_defs + + Human readable WKT format of EPSG:3857: + PROJCS["Google Maps Global Mercator", + GEOGCS["WGS 84", + DATUM["WGS_1984", + SPHEROID["WGS 84",6378137,298.257223563, + AUTHORITY["EPSG","7030"]], + AUTHORITY["EPSG","6326"]], + PRIMEM["Greenwich",0], + UNIT["degree",0.0174532925199433], + AUTHORITY["EPSG","4326"]], + PROJECTION["Mercator_1SP"], + PARAMETER["central_meridian",0], + PARAMETER["scale_factor",1], + PARAMETER["false_easting",0], + PARAMETER["false_northing",0], + UNIT["metre",1, + AUTHORITY["EPSG","9001"]]] + """ + + def __init__(self, tileSize=256): + "Initialize the TMS Global Mercator pyramid" + self.tileSize = tileSize + self.initialResolution = 2 * math.pi * 6378137 / self.tileSize + # 156543.03392804062 for tileSize 256 pixels + self.originShift = 2 * math.pi * 6378137 / 2.0 + # 20037508.342789244 + + def LatLonToMeters(self, lat, lon): + "Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:3857" + + mx = lon * self.originShift / 180.0 + my = math.log(math.tan((90 + lat) * math.pi / 360.0)) / \ + (math.pi / 180.0) + + my = my * self.originShift / 180.0 + return mx, my + + def MetersToLatLon(self, mx, my): + "Converts XY point from Spherical Mercator EPSG:3857 to lat/lon in WGS84 Datum" + + lon = (mx / self.originShift) * 180.0 + lat = (my / self.originShift) * 180.0 + + lat = 180 / math.pi * \ + (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) + return lat, lon + + def PixelsToMeters(self, px, py, zoom): + "Converts pixel coordinates in given zoom level of pyramid to EPSG:3857" + + res = self.Resolution(zoom) + mx = px * res - self.originShift + my = py * res - self.originShift + return mx, my + + def MetersToPixels(self, mx, my, zoom): + "Converts EPSG:3857 to pyramid pixel coordinates in given zoom level" + + res = self.Resolution(zoom) + px = (mx + self.originShift) / res + py = (my + self.originShift) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns a tile covering region in given pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def PixelsToRaster(self, px, py, zoom): + "Move the origin of pixel coordinates to top-left corner" + + mapSize = self.tileSize << zoom + return px, mapSize - py + + def MetersToTile(self, mx, my, zoom): + "Returns tile for given mercator coordinates" + + px, py = self.MetersToPixels(mx, my, zoom) + return self.PixelsToTile(px, py) + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in EPSG:3857 coordinates" + + minx, miny = self.PixelsToMeters( + tx*self.tileSize, ty*self.tileSize, zoom) + maxx, maxy = self.PixelsToMeters( + (tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom) + return (minx, miny, maxx, maxy) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in latitude/longitude using WGS84 datum" + + bounds = self.TileBounds(tx, ty, zoom) + minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1]) + maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3]) + + return (minLat, minLon, maxLat, maxLon) + + def Resolution(self, zoom): + "Resolution (meters/pixel) for given zoom level (measured at Equator)" + + # return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom) + return self.initialResolution / (2**zoom) + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != -1: + return i-1 + else: + return 0 # We don't want to scale up + + def GoogleTile(self, tx, ty, zoom): + "Converts TMS tile coordinates to Google Tile coordinates" + + # coordinate origin is moved from bottom-left to top-left corner of the extent + return tx, (2**zoom - 1) - ty + + def QuadTree(self, tx, ty, zoom): + "Converts TMS tile coordinates to Microsoft QuadTree" + + quadKey = "" + ty = (2**zoom - 1) - ty + for i in range(zoom, 0, -1): + digit = 0 + mask = 1 << (i-1) + if (tx & mask) != 0: + digit += 1 + if (ty & mask) != 0: + digit += 2 + quadKey += str(digit) + + return quadKey + + +class GlobalGeodetic(object): + r""" + TMS Global Geodetic Profile + --------------------------- + + Functions necessary for generation of global tiles in Plate Carre projection, + EPSG:4326, "unprojected profile". + + Such tiles are compatible with Google Earth (as any other EPSG:4326 rasters) + and you can overlay the tiles on top of OpenLayers base map. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Geodetic tiles? + + Global Geodetic tiles are using geodetic coordinates (latitude,longitude) + directly as planar coordinates XY (it is also called Unprojected or Plate + Carre). We need only scaling to pixel pyramid and cutting to tiles. + Pyramid has on top level two tiles, so it is not square but rectangle. + Area [-180,-90,180,90] is scaled to 512x256 pixels. + TMS has coordinate origin (for pixels and tiles) in bottom-left corner. + Rasters are in EPSG:4326 and therefore are compatible with Google Earth. + + LatLon <-> Pixels <-> Tiles + + WGS84 coordinates Pixels in pyramid Tiles in pyramid + lat/lon XY pixels Z zoom XYZ from TMS + EPSG:4326 + .----. ---- + / \ <-> /--------/ <-> TMS + \ / /--------------/ + ----- /--------------------/ + WMS, KML Web Clients, Google Earth TileMapService + """ + + def __init__(self, tmscompatible, tileSize=256): + self.tileSize = tileSize + if tmscompatible is not None: + # Defaults the resolution factor to 0.703125 (2 tiles @ level 0) + # Adhers to OSGeo TMS spec + # http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification#global-geodetic + self.resFact = 180.0 / self.tileSize + else: + # Defaults the resolution factor to 1.40625 (1 tile @ level 0) + # Adheres OpenLayers, MapProxy, etc default resolution for WMTS + self.resFact = 360.0 / self.tileSize + + def LonLatToPixels(self, lon, lat, zoom): + "Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid" + + res = self.resFact / 2**zoom + px = (180 + lon) / res + py = (90 + lat) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns coordinates of the tile covering region in pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def LonLatToTile(self, lon, lat, zoom): + "Returns the tile for zoom which covers given lon/lat coordinates" + + px, py = self.LonLatToPixels(lon, lat, zoom) + return self.PixelsToTile(px, py) + + def Resolution(self, zoom): + "Resolution (arc/pixel) for given zoom level (measured at Equator)" + + return self.resFact / 2**zoom + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != 0: + return i-1 + else: + return 0 # We don't want to scale up + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile" + res = self.resFact / 2**zoom + return ( + tx*self.tileSize*res - 180, + ty*self.tileSize*res - 90, + (tx+1)*self.tileSize*res - 180, + (ty+1)*self.tileSize*res - 90 + ) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in the SWNE form" + b = self.TileBounds(tx, ty, zoom) + return (b[1], b[0], b[3], b[2]) + + +class Zoomify(object): + """ + Tiles compatible with the Zoomify viewer + ---------------------------------------- + """ + + def __init__(self, width, height, tilesize=256, tileformat='jpg'): + """Initialization of the Zoomify tile tree""" + + self.tilesize = tilesize + self.tileformat = tileformat + imagesize = (width, height) + tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize)) + + # Size (in tiles) for each tier of pyramid. + self.tierSizeInTiles = [] + self.tierSizeInTiles.append(tiles) + + # Image size in pixels for each pyramid tierself + self.tierImageSize = [] + self.tierImageSize.append(imagesize) + + while (imagesize[0] > tilesize or imagesize[1] > tilesize): + imagesize = (math.floor( + imagesize[0] / 2), math.floor(imagesize[1] / 2)) + tiles = (math.ceil(imagesize[0] / tilesize), + math.ceil(imagesize[1] / tilesize)) + self.tierSizeInTiles.append(tiles) + self.tierImageSize.append(imagesize) + + self.tierSizeInTiles.reverse() + self.tierImageSize.reverse() + + # Depth of the Zoomify pyramid, number of tiers (zoom levels) + self.numberOfTiers = len(self.tierSizeInTiles) + + # Number of tiles up to the given tier of pyramid. + self.tileCountUpToTier = [] + self.tileCountUpToTier[0] = 0 + for i in range(1, self.numberOfTiers+1): + self.tileCountUpToTier.append( + self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] + + self.tileCountUpToTier[i-1] + ) + + def tilefilename(self, x, y, z): + """Returns filename for tile with given coordinates""" + + tileIndex = x + y * \ + self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z] + return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256), + "%s-%s-%s.%s" % (z, x, y, self.tileformat)) + + +class Gdal2TilesError(Exception): + pass + + +class GDAL2Tiles(object): + + def process(self): + """The main processing function, runs all the main steps of processing""" + + # Opening and preprocessing of the input file + self.open_input() + + # Generation of main metadata files and HTML viewers + self.generate_metadata() + + # 1bto4b + if self.isDEMtile: + for z in range(self.tminz, self.tmaxz + int(abs(math.log(self.tilesize, 2) - 8))): # 1bto4b + self.generate_base_tiles(z) + print(' Zoom ' + str(z) + ' tiles done!') + else: + # Generation of the lowest tiles + self.generate_base_tiles(self.tmaxz) + + # Generation of the overview tiles (higher in the pyramid) + self.generate_overview_tiles() + + def error(self, msg, details=""): + """Print an error message and stop the processing""" + if details: + self.parser.error(msg + "\n\n" + details) + else: + self.parser.error(msg) + + def progressbar(self, complete=0.0): + """Print progressbar for float value 0..1""" + gdal.TermProgress_nocb(complete) + + def gettempfilename(self, suffix): + """Returns a temporary filename""" + if '_' in os.environ: + # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly) + if os.environ['_'].find('wine') >= 0: + tmpdir = '.' + if 'TMP' in os.environ: + tmpdir = os.environ['TMP'] + import time + import random + random.seed(time.time()) + random_part = 'file%d' % random.randint(0, 1000000000) + return os.path.join(tmpdir, random_part + suffix) + + import tempfile + return tempfile.mktemp(suffix) + + def stop(self): + """Stop the rendering immediately""" + self.stopped = True + + def __init__(self, arguments): + """Constructor function - initialization""" + self.out_drv = None + self.mem_drv = None + self.in_ds = None + self.out_ds = None + self.out_srs = None + self.nativezoom = None + self.tminmax = None + self.tsize = None + self.mercator = None + self.geodetic = None + self.alphaband = None + self.dataBandsCount = None + self.out_gt = None + self.tileswne = None + self.swne = None + self.ominx = None + self.omaxx = None + self.omaxy = None + self.ominy = None + + # MMGIS + self.isRasterBounded = False + + # 1bto4b + self.isDEMtile = False + + # MMGIS + self.fminx = None + self.fmaxx = None + self.fminy = None + self.fmaxy = None + self.fPixelSize = None + + self.stopped = False + self.input = None + self.output = None + + # Tile format + self.tilesize = 256 + self.tiledriver = 'PNG' + self.tileext = 'png' + + # Should we read bigger window of the input raster and scale it down? + # Note: Modified later by open_input() + # Not for 'near' resampling + # Not for Wavelet based drivers (JPEG2000, ECW, MrSID) + # Not for 'raster' profile + self.scaledquery = True + # How big should be query window be for scaling down + # Later on reset according the chosen resampling algorightm + self.querysize = 4 * self.tilesize + + # Should we use Read on the input file for generating overview tiles? + # Note: Modified later by open_input() + # Otherwise the overview tiles are generated from existing underlying tiles + self.overviewquery = False + + # RUN THE ARGUMENT PARSER: + + self.optparse_init() + self.options, self.args = self.parser.parse_args(args=arguments) + if not self.args: + self.error("No input file specified") + + # POSTPROCESSING OF PARSED ARGUMENTS: + + # Workaround for old versions of GDAL + try: + if ((self.options.verbose and self.options.resampling == 'near') or + gdal.TermProgress_nocb): + pass + except Exception: + self.error( + "This version of GDAL is not supported. Please upgrade to 1.6+.") + + # Is output directory the last argument? + + # Test output directory, if it doesn't exist + if (os.path.isdir(self.args[-1]) or + (len(self.args) > 1 and not os.path.exists(self.args[-1]))): + self.output = self.args[-1] + self.args = self.args[:-1] + + # More files on the input not directly supported yet + + if (len(self.args) > 1): + self.error("Processing of several input files is not supported.", + "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the " + "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(self.args)) + + self.input = self.args[0] + + # MMGIS + if self.options.extentworld: + extentworld = self.options.extentworld.split(",") + self.isRasterBounded = True + self.fminx = float(extentworld[0]) + self.fmaxx = float(extentworld[2]) + self.fminy = float(extentworld[3]) + self.fmaxy = float(extentworld[1]) + self.fPixelSize = float(extentworld[4]) + + # 1bto4b + if self.options.isDEMtile: + self.isDEMtile = True + self.tilesize = 32 + self.querysize = 4 * self.tilesize + + # Default values for not given options + + if not self.output: + # Directory with input filename without extension in actual directory + self.output = os.path.splitext(os.path.basename(self.input))[0] + + if not self.options.title: + self.options.title = os.path.basename(self.input) + + if self.options.url and not self.options.url.endswith('/'): + self.options.url += '/' + if self.options.url: + self.options.url += os.path.basename(self.output) + '/' + + # Supported options + + self.resampling = None + + if self.options.resampling == 'average': + try: + if gdal.RegenerateOverview: + pass + except Exception: + self.error("'average' resampling algorithm is not available.", + "Please use -r 'near' argument or upgrade to newer version of GDAL.") + + elif self.options.resampling == 'antialias': + try: + if numpy: # pylint:disable=W0125 + pass + except Exception: + self.error("'antialias' resampling algorithm is not available.", + "Install PIL (Python Imaging Library) and numpy.") + + elif self.options.resampling == 'near': + self.resampling = gdal.GRA_NearestNeighbour + self.querysize = self.tilesize + + elif self.options.resampling == 'bilinear': + self.resampling = gdal.GRA_Bilinear + self.querysize = self.tilesize * 2 + + elif self.options.resampling == 'cubic': + self.resampling = gdal.GRA_Cubic + + elif self.options.resampling == 'cubicspline': + self.resampling = gdal.GRA_CubicSpline + + elif self.options.resampling == 'lanczos': + self.resampling = gdal.GRA_Lanczos + + # User specified zoom levels + self.tminz = None + self.tmaxz = None + if self.options.zoom: + minmax = self.options.zoom.split('-', 1) + minmax.extend(['']) + zoom_min, zoom_max = minmax[:2] + self.tminz = int(zoom_min) + if zoom_max: + self.tmaxz = int(zoom_max) + else: + self.tmaxz = int(zoom_min) + + # KML generation + self.kml = self.options.kml + + # Check if the input filename is full ascii or not + try: + os.path.basename(self.input).encode('ascii') + except UnicodeEncodeError: + full_ascii = False + else: + full_ascii = True + + # LC_CTYPE check + if not full_ascii and 'UTF-8' not in os.environ.get("LC_CTYPE", ""): + if not self.options.quiet: + print("\nWARNING: " + "You are running gdal2tiles.py with a LC_CTYPE environment variable that is " + "not UTF-8 compatible, and your input file contains non-ascii characters. " + "The generated sample googlemaps, openlayers or " + "leaflet files might contain some invalid characters as a result\n") + + # Output the results + if self.options.verbose: + print("Options:", self.options) + print("Input:", self.input) + print("Output:", self.output) + print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024)) + print('') + + def optparse_init(self): + """Prepare the option parser for input (argv)""" + + from optparse import OptionParser, OptionGroup + usage = "Usage: %prog [options] input_file(s) [output]" + p = OptionParser(usage, version="%prog " + __version__) + p.add_option("-p", "--profile", dest='profile', + type='choice', choices=profile_list, + help=("Tile cutting profile (%s) - default 'mercator' " + "(Google Maps compatible)" % ",".join(profile_list))) + p.add_option("-r", "--resampling", dest="resampling", + type='choice', choices=resampling_list, + help="Resampling method (%s) - default 'average'" % ",".join(resampling_list)) + p.add_option('-s', '--s_srs', dest="s_srs", metavar="SRS", + help="The spatial reference system used for the source input data") + p.add_option('-z', '--zoom', dest="zoom", + help="Zoom levels to render (format:'2-5' or '10').") + p.add_option('-e', '--resume', dest="resume", action="store_true", + help="Resume mode. Generate only missing files.") + p.add_option('-a', '--srcnodata', dest="srcnodata", metavar="NODATA", + help="NODATA transparency value to assign to the input data") + p.add_option('-d', '--tmscompatible', dest="tmscompatible", action="store_true", + help=("When using the geodetic profile, specifies the base resolution " + "as 0.703125 or 2 tiles at zoom level 0.")) + p.add_option("-v", "--verbose", + action="store_true", dest="verbose", + help="Print status messages to stdout") + p.add_option("-q", "--quiet", + action="store_true", dest="quiet", + help="Disable messages and status to stdout") + # MMGIS + p.add_option("-x", "--extentworld", dest="extentworld", + help="The full world meter extent (comma-separated as minx,maxx,miny,maxy,pixelsize) of an inner raster profile.") + # 1bto4b + p.add_option("-m", "--dem", action="store_true", dest="isDEMtile", + help="Indicate if the input is a Digital Elevation Model") + # KML options + g = OptionGroup(p, "KML (Google Earth) options", + "Options for generated Google Earth SuperOverlay metadata") + g.add_option("-k", "--force-kml", dest='kml', action="store_true", + help=("Generate KML for Google Earth - default for 'geodetic' profile and " + "'raster' in EPSG:4326. For a dataset with different projection use " + "with caution!")) + g.add_option("-n", "--no-kml", dest='kml', action="store_false", + help="Avoid automatic generation of KML files for EPSG:4326") + g.add_option("-u", "--url", dest='url', + help="URL address where the generated tiles are going to be published") + p.add_option_group(g) + + # HTML options + g = OptionGroup(p, "Web viewer options", + "Options for generated HTML viewers a la Google Maps") + g.add_option("-w", "--webviewer", dest='webviewer', type='choice', choices=webviewer_list, + help="Web viewer to generate (%s) - default 'all'" % ",".join(webviewer_list)) + g.add_option("-t", "--title", dest='title', + help="Title of the map") + g.add_option("-c", "--copyright", dest='copyright', + help="Copyright for the map") + g.add_option("-g", "--googlekey", dest='googlekey', + help="Google Maps API key from http://code.google.com/apis/maps/signup.html") + g.add_option("-b", "--bingkey", dest='bingkey', + help="Bing Maps API key from https://www.bingmapsportal.com/") + p.add_option_group(g) + + p.set_defaults(verbose=False, profile="mercator", kml=False, url='', + webviewer='all', copyright='', resampling='average', resume=False, + googlekey='INSERT_YOUR_KEY_HERE', bingkey='INSERT_YOUR_KEY_HERE') + + self.parser = p + + # ------------------------------------------------------------------------- + def open_input(self): + """Initialization of the input raster, reprojection if necessary""" + gdal.AllRegister() + + self.out_drv = gdal.GetDriverByName(self.tiledriver) + self.mem_drv = gdal.GetDriverByName('MEM') + + if not self.out_drv: + raise Exception("The '%s' driver was not found, is it available in this GDAL build?", + self.tiledriver) + if not self.mem_drv: + raise Exception( + "The 'MEM' driver was not found, is it available in this GDAL build?") + + # Open the input file + + if self.input: + self.in_ds = gdal.Open(self.input, gdal.GA_ReadOnly) + else: + raise Exception("No input file was specified") + + if self.options.verbose: + print("Input file:", + "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, + self.in_ds.RasterCount)) + + if not self.in_ds: + # Note: GDAL prints the ERROR message too + self.error( + "It is not possible to open the input file '%s'." % self.input) + + # Read metadata from the input file + if self.in_ds.RasterCount == 0: + self.error("Input file '%s' has no raster band" % self.input) + + if self.in_ds.GetRasterBand(1).GetRasterColorTable(): + self.error("Please convert this file to RGB/RGBA and run gdal2tiles on the result.", + "From paletted file you can create RGBA file (temp.vrt) by:\n" + "gdal_translate -of vrt -expand rgba %s temp.vrt\n" + "then run:\n" + "gdal2tiles temp.vrt" % self.input) + + # Get NODATA value + in_nodata = [] + for i in range(1, self.in_ds.RasterCount+1): + if self.in_ds.GetRasterBand(i).GetNoDataValue() is not None: + in_nodata.append(self.in_ds.GetRasterBand(i).GetNoDataValue()) + if self.options.srcnodata: + nds = list(map(float, self.options.srcnodata.split(','))) + if len(nds) < self.in_ds.RasterCount: + in_nodata = ( + nds * self.in_ds.RasterCount)[:self.in_ds.RasterCount] + else: + in_nodata = nds + + if self.options.verbose: + print("NODATA: %s" % in_nodata) + + if self.options.verbose: + print("Preprocessed file:", + "( %sP x %sL - %s bands)" % (self.in_ds.RasterXSize, self.in_ds.RasterYSize, + self.in_ds.RasterCount)) + + in_srs = None + + if self.options.s_srs: + in_srs = osr.SpatialReference() + in_srs.SetFromUserInput(self.options.s_srs) + in_srs_wkt = in_srs.ExportToWkt() + else: + in_srs_wkt = self.in_ds.GetProjection() + if not in_srs_wkt and self.in_ds.GetGCPCount() != 0: + in_srs_wkt = self.in_ds.GetGCPProjection() + if in_srs_wkt: + in_srs = osr.SpatialReference() + in_srs.ImportFromWkt(in_srs_wkt) + + self.out_srs = osr.SpatialReference() + + if self.options.profile == 'mercator': + self.out_srs.ImportFromEPSG(3857) + elif self.options.profile == 'geodetic': + self.out_srs.ImportFromEPSG(4326) + else: + self.out_srs = in_srs + + # Are the reference systems the same? Reproject if necessary. + + self.out_ds = None + + if self.options.profile in ('mercator', 'geodetic'): + + if ((self.in_ds.GetGeoTransform() == (0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) and + (self.in_ds.GetGCPCount() == 0)): + self.error("There is no georeference - neither affine transformation (worldfile) " + "nor GCPs. You can generate only 'raster' profile tiles.", + "Either gdal2tiles with parameter -p 'raster' or use another GIS " + "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs") + + if in_srs: + if ((in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or + (self.in_ds.GetGCPCount() != 0)): + # Generation of VRT dataset in tile projection, + # default 'nearest neighbour' warping + self.out_ds = gdal.AutoCreateWarpedVRT( + self.in_ds, in_srs_wkt, self.out_srs.ExportToWkt()) + + if self.options.verbose: + print("Warping of the raster by AutoCreateWarpedVRT " + "(result saved into 'tiles.vrt')") + self.out_ds.GetDriver().CreateCopy("tiles.vrt", self.out_ds) + + # Correction of AutoCreateWarpedVRT for NODATA values + if in_nodata != []: + tempfilename = self.gettempfilename('-gdal2tiles.vrt') + self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) + # open as a text file + s = open(tempfilename).read() + # Add the warping options + s = s.replace( + "", + """ + + + + """) + # replace BandMapping tag for NODATA bands.... + for i in range(len(in_nodata)): + s = s.replace( + '' % ( + (i+1), (i+1)), + """ + + %i + 0 + %i + 0 + + """ % ((i+1), (i+1), in_nodata[i], in_nodata[i])) + # save the corrected VRT + open(tempfilename, "w").write(s) + # open by GDAL as self.out_ds + self.out_ds = gdal.Open(tempfilename) + # delete the temporary file + os.unlink(tempfilename) + + # set NODATA_VALUE metadata + self.out_ds.SetMetadataItem( + 'NODATA_VALUES', ' '.join([str(i) for i in in_nodata])) + + if self.options.verbose: + print("Modified warping result saved into 'tiles1.vrt'") + open("tiles1.vrt", "w").write(s) + + # Correction of AutoCreateWarpedVRT for Mono (1 band) and RGB (3 bands) files + # without NODATA: + # equivalent of gdalwarp -dstalpha + if in_nodata == [] and self.out_ds.RasterCount in [1, 3]: + tempfilename = self.gettempfilename('-gdal2tiles.vrt') + self.out_ds.GetDriver().CreateCopy(tempfilename, self.out_ds) + # open as a text file + s = open(tempfilename).read() + # Add the warping options + s = s.replace( + "", + """ + + Alpha + + + """ % (self.out_ds.RasterCount + 1)) + s = s.replace( + "", + """ + %i + + """ % (self.out_ds.RasterCount + 1)) + s = s.replace( + "", + """ + + + """) + # save the corrected VRT + open(tempfilename, "w").write(s) + # open by GDAL as self.out_ds + self.out_ds = gdal.Open(tempfilename) + # delete the temporary file + os.unlink(tempfilename) + + if self.options.verbose: + print( + "Modified -dstalpha warping result saved into 'tiles1.vrt'") + open("tiles1.vrt", "w").write(s) + s = ''' + ''' + + else: + self.error("Input file has unknown SRS.", + "Use --s_srs ESPG:xyz (or similar) to provide source reference system.") + + if self.out_ds and self.options.verbose: + print("Projected file:", "tiles.vrt", "( %sP x %sL - %s bands)" % ( + self.out_ds.RasterXSize, self.out_ds.RasterYSize, self.out_ds.RasterCount)) + + if not self.out_ds: + self.out_ds = self.in_ds + + # + # Here we should have a raster (out_ds) in the correct Spatial Reference system + # + + # Get alpha band (either directly or from NODATA value) + self.alphaband = self.out_ds.GetRasterBand(1).GetMaskBand() + if ((self.alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or + self.out_ds.RasterCount == 4 or + self.out_ds.RasterCount == 2): + self.dataBandsCount = self.out_ds.RasterCount - 1 + else: + self.dataBandsCount = self.out_ds.RasterCount + + # KML test + isepsg4326 = False + srs4326 = osr.SpatialReference() + srs4326.ImportFromEPSG(4326) + if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4(): + self.kml = True + isepsg4326 = True + if self.options.verbose: + print("KML autotest OK!") + + # Read the georeference + self.out_gt = self.out_ds.GetGeoTransform() + + # Test the size of the pixel + + # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile) + if (self.out_gt[2], self.out_gt[4]) != (0, 0): + self.error("Georeference of the raster contains rotation or skew. " + "Such raster is not supported. Please use gdalwarp first.") + + # Here we expect: pixel is square, no rotation on the raster + + # Output Bounds - coordinates in the output SRS + self.ominx = self.out_gt[0] + self.omaxx = self.out_gt[0] + self.out_ds.RasterXSize * self.out_gt[1] + self.omaxy = self.out_gt[3] + self.ominy = self.out_gt[3] - self.out_ds.RasterYSize * self.out_gt[1] + + # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15 + + # MMGIS + def linearScale(domain, rang, value): + return ( + ((rang[1] - rang[0]) * (value - domain[0])) / + (domain[1] - domain[0]) + + rang[0] + ) + # MMGIS + self.out_ds.fRasterXSize = self.out_ds.RasterXSize + self.out_ds.fRasterYSize = self.out_ds.RasterYSize + self.out_ds.fRasterXOrigin = 0 + self.out_ds.fRasterYOrigin = 0 + self.out_ds.PixelSize = self.out_gt[1] + self.out_ds.fPixelSize = self.fPixelSize + # print("ominx", self.ominx, "omaxx", self.omaxx, "ominy", self.ominy, "omaxy", self.omaxy) + # print("fminx", self.fminx, "fmaxx", self.fmaxx, "fminy", self.fminy, "fmaxy", self.fmaxy) + if self.isRasterBounded: + self.out_ds.fRasterXSize = int(math.floor(self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / ( + self.omaxx - self.ominx) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) + self.out_ds.fRasterYSize = int(math.ceil(self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / ( + self.omaxy - self.ominy) * (self.out_ds.PixelSize / self.out_ds.fPixelSize))) + self.out_ds.fRasterXSizeRaw = int(math.floor( + self.out_ds.RasterXSize * (self.fmaxx - self.fminx) / (self.omaxx - self.ominx))) + self.out_ds.fRasterYSizeRaw = int(math.ceil( + self.out_ds.RasterYSize * (self.fmaxy - self.fminy) / (self.omaxy - self.ominy))) + # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize ) + self.out_ds.fRasterXOrigin = int(math.floor(linearScale( + [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.out_gt[0]))) + self.out_ds.fRasterYOrigin = int(math.ceil(linearScale( + [self.fminy, self.fmaxy], [self.out_ds.fRasterYSize, 0], self.out_gt[3]))) + self.out_ds.fRasterXOriginRaw = int(math.floor(linearScale([self.fminx, self.fmaxx], [ + 0, self.out_ds.fRasterXSize], self.out_gt[0]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) + self.out_ds.fRasterYOriginRaw = int(math.ceil(linearScale([self.fminy, self.fmaxy], [ + self.out_ds.fRasterYSize, 0], self.out_gt[3]) * (self.out_ds.fPixelSize / self.out_ds.PixelSize))) + self.out_ds.fRasterXWidth = int(math.floor(linearScale( + [self.fminx, self.fmaxx], [0, self.out_ds.fRasterXSize], self.omaxx))) - self.out_ds.fRasterXOrigin + self.out_ds.fRasterYHeight = int(math.ceil(linearScale( + [self.fminy, self.fmaxy], [0, self.out_ds.fRasterYSize], self.omaxy))) - self.out_ds.fRasterYOrigin + + if self.options.verbose: + print("Bounds (output srs):", round(self.ominx, 13), + self.ominy, self.omaxx, self.omaxy) + + # print("Input Raster Size: ", self.out_ds.RasterXSize, self.out_ds.RasterYSize) + # print("fmaxx-fminx", self.fmaxx - self.fminx, "omaxx-ominx", self.omaxx - self.ominx, "fmaxy-fminy", self.fmaxy - self.fminy, "omaxy-ominy", self.omaxy - self.ominy) + # print("Full Raster Size: ", self.out_ds.fRasterXSize, self.out_ds.fRasterYSize) + # print("Full Raster Size Raw: ", self.out_ds.fRasterXSizeRaw, self.out_ds.fRasterYSizeRaw) + # print("Raster Origin: ", self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin) + # print("Raster Origin Raw: ", self.out_ds.fRasterXOriginRaw, self.out_ds.fRasterYOriginRaw) + # print("Raster Width Height: ", self.out_ds.fRasterXWidth, self.out_ds.fRasterYHeight) + + # Calculating ranges for tiles in different zoom levels + if self.options.profile == 'mercator': + + self.mercator = GlobalMercator() + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.mercator.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.mercator.MetersToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.mercator.MetersToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the minimal zoom level (map covers area equivalent to one tile) + if self.tminz is None: + self.tminz = self.mercator.ZoomForPixelSize( + self.out_gt[1] * max(self.out_ds.RasterXSize, + self.out_ds.RasterYSize) / float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", + self.mercator.MetersToLatLon(self.ominx, self.ominy), + self.mercator.MetersToLatLon(self.omaxx, self.omaxy)) + print('MinZoomLevel:', self.tminz) + print("MaxZoomLevel:", + self.tmaxz, + "(", + self.mercator.Resolution(self.tmaxz), + ")") + + if self.options.profile == 'geodetic': + + self.geodetic = GlobalGeodetic(self.options.tmscompatible) + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.geodetic.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.geodetic.LonLatToTile( + self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.geodetic.LonLatToTile( + self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tminz is None: + self.tminz = self.geodetic.ZoomForPixelSize( + self.out_gt[1] * max(self.out_ds.RasterXSize, + self.out_ds.RasterYSize) / float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", self.ominx, + self.ominy, self.omaxx, self.omaxy) + + # MMGIS + if self.options.profile == 'raster' and self.isRasterBounded: + + def log2(x): + return math.log10(x) / math.log10(2) + + # MMGIS added 'f'* + self.nativezoom = int( + max(math.ceil(log2(self.out_ds.fRasterXSizeRaw/float(self.tilesize))), + math.ceil(log2(self.out_ds.fRasterYSizeRaw/float(self.tilesize))))) + + self.basenativezoom = int( + max(math.ceil(log2(self.out_ds.fRasterXSize/float(self.tilesize))), + math.ceil(log2(self.out_ds.fRasterYSize/float(self.tilesize))))) + + # MMGIS + self.out_ds.fWorldXSize = int( + float(self.out_ds.fRasterXSize) * (2**(self.nativezoom - self.basenativezoom))) + self.out_ds.fWorldYSize = int( + float(self.out_ds.fRasterYSize) * (2**(self.nativezoom - self.basenativezoom))) + self.out_ds.fRasterXOriginWorld = int(float( + self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXOrigin) / self.out_ds.fRasterXSize)) + self.out_ds.fRasterYOriginWorld = int(float( + self.out_ds.fWorldYSize) * (float(self.out_ds.fRasterYOrigin) / self.out_ds.fRasterYSize)) + self.out_ds.fRasterXSizeWorld = int(float( + self.out_ds.fWorldXSize) * (float(self.out_ds.fRasterXWidth) / self.out_ds.fRasterXSize)) + self.out_ds.fRasterYSizeWorld = int(float( + self.out_ds.RasterYSize) * (float(self.out_ds.fRasterXSizeWorld) / self.out_ds.RasterXSize)) + # print("World Size", self.out_ds.fWorldXSize, self.out_ds.fWorldYSize) + # print("Raster Origin World", self.out_ds.fRasterXOriginWorld, self.out_ds.fRasterYOriginWorld) + # print("Raster Size World", self.out_ds.fRasterXSizeWorld, self.out_ds.fRasterYSizeWorld) + + if self.options.verbose: + print("Native zoom of the raster:", self.nativezoom) + + # Get the minimal zoom level (whole raster in one tile) + if self.tminz is None: + self.tminz = 0 + + # Get the maximal zoom level (native resolution of the raster) + if self.tmaxz is None: + self.tmaxz = self.nativezoom + + # MMGIS added 'f'* + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, self.tmaxz+1)) + self.tsize = list(range(0, self.tmaxz+1)) + # print("Raster Size:", self.out_ds.RasterXSize,self.out_ds.RasterYSize) + # print("Pixel Size Ratio:", (self.out_ds.fPixelSize / self.out_ds.PixelSize)) + # print("nativezoom", self.nativezoom, "basenativezoom", self.basenativezoom, "tminz", self.tminz, "tmaxz", self.tmaxz) + for tz in range(0, self.tmaxz+1): + tsize = 2.0**(self.tmaxz-tz)*self.tilesize + toffsetx = int(math.floor( + 2.0**(tz) * self.out_ds.fRasterXOriginRaw / self.out_ds.fRasterXSizeRaw)) + toffsety = int(math.floor( + 2.0**(tz) * (self.out_ds.fRasterYOriginRaw) / self.out_ds.fRasterYSizeRaw)) + # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) + toffsetx = int(math.floor( + self.out_ds.fRasterXOriginWorld / tsize)) + toffsety = int(math.floor( + self.out_ds.fRasterYOriginWorld / tsize)) + # print("tsize", tsize, "toffsetx", toffsetx, "toffsety", toffsety) + tmaxx = int(math.floor( + self.out_ds.fRasterXSizeWorld / tsize)) + toffsetx + 1 + + tmaxy = int(math.floor( + self.out_ds.fRasterYSizeWorld / tsize)) + toffsety + 1 + self.tsize[tz] = math.ceil(tsize) + #tminx = toffsetx + tminx = int(tmaxx - ((tmaxx - toffsetx) / (0.75))) - 1 + tminy = int(tmaxy - ((tmaxy - toffsety) / (0.75))) - 1 + + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + # print("tminx", tminx, "tminy", tminy, "tmaxx", tmaxx, "tmaxy", tmaxy, "tz", tz) + + elif self.options.profile == 'raster': + + def log2(x): + return math.log10(x) / math.log10(2) + self.nativezoom = int( + max(math.ceil(log2(self.out_ds.RasterXSize/float(self.tilesize))), + math.ceil(log2(self.out_ds.RasterYSize/float(self.tilesize))))) + + if self.options.verbose: + print("Native zoom of the raster:", self.nativezoom) + + # Get the minimal zoom level (whole raster in one tile) + if self.tminz is None: + self.tminz = 0 + + # Get the maximal zoom level (native resolution of the raster) + if self.tmaxz is None: + self.tmaxz = self.nativezoom + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, self.tmaxz+1)) + self.tsize = list(range(0, self.tmaxz+1)) + for tz in range(0, self.tmaxz+1): + tsize = 2.0**(self.tmaxz-tz)*self.tilesize + tminx, tminy = 0, 0 + tmaxx = int(math.ceil(self.out_ds.RasterXSize / tsize)) - 1 + tmaxy = int(math.ceil(self.out_ds.RasterYSize / tsize)) - 1 + self.tsize[tz] = math.ceil(tsize) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # Function which generates SWNE in LatLong for given tile + if self.kml and in_srs_wkt: + ct = osr.CoordinateTransformation(in_srs, srs4326) + + def rastertileswne(x, y, z): + # X-pixel size in level + pixelsizex = (2**(self.tmaxz-z) * self.out_gt[1]) + west = self.out_gt[0] + x*self.tilesize*pixelsizex + east = west + self.tilesize*pixelsizex + south = self.ominy + y*self.tilesize*pixelsizex + north = south + self.tilesize*pixelsizex + if not isepsg4326: + # Transformation to EPSG:4326 (WGS84 datum) + west, south = ct.TransformPoint(west, south)[:2] + east, north = ct.TransformPoint(east, north)[:2] + return south, west, north, east + + self.tileswne = rastertileswne + else: + self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa + + def generate_metadata(self): + """ + Generation of main metadata files and HTML viewers (metadata related to particular + tiles are generated during the tile processing). + """ + + if not os.path.exists(self.output): + os.makedirs(self.output) + + if self.options.profile == 'mercator': + + south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy) + north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy) + south, west = max(-85.05112878, south), max(-180.0, west) + north, east = min(85.05112878, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate googlemaps.html + if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator': + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'googlemaps.html'))): + f = open(os.path.join(self.output, 'googlemaps.html'), 'wb') + f.write(self.generate_googlemaps().encode('utf-8')) + f.close() + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + # Generate leaflet.html + if self.options.webviewer in ('all', 'leaflet'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'leaflet.html'))): + f = open(os.path.join(self.output, 'leaflet.html'), 'wb') + f.write(self.generate_leaflet().encode('utf-8')) + f.close() + + elif self.options.profile == 'geodetic': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + south, west = max(-90.0, south), max(-180.0, west) + north, east = min(90.0, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + elif self.options.profile == 'raster': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + + # MMGIS + if self.isRasterBounded: + west = self.fminx + east = self.fmaxx + south = self.fminy + north = self.fmaxy + + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'openlayers.html'))): + f = open(os.path.join(self.output, 'openlayers.html'), 'wb') + f.write(self.generate_openlayers().encode('utf-8')) + f.close() + + # Generate tilemapresource.xml. + if not self.options.resume or not os.path.exists(os.path.join(self.output, 'tilemapresource.xml')): + f = open(os.path.join(self.output, 'tilemapresource.xml'), 'wb') + f.write(self.generate_tilemapresource().encode('utf-8')) + f.close() + + if self.kml: + # TODO: Maybe problem for not automatically generated tminz + # The root KML should contain links to all tiles in the tminz level + children = [] + xmin, ymin, xmax, ymax = self.tminmax[self.tminz] + for x in range(xmin, xmax+1): + for y in range(ymin, ymax+1): + children.append([x, y, self.tminz]) + # Generate Root KML + if self.kml: + if (not self.options.resume or not + os.path.exists(os.path.join(self.output, 'doc.kml'))): + f = open(os.path.join(self.output, 'doc.kml'), 'wb') + f.write(self.generate_kml( + None, None, None, children).encode('utf-8')) + f.close() + + def generate_base_tiles(self, tz): + """ + Generation of the base tiles (the lowest in the pyramid) directly from the input raster + """ + + if self.isDEMtile: + print("Generating Tiles at Zoom " + str(tz) + ": ") + + if not self.options.quiet: + print("Generating Base Tiles:") + + if self.options.verbose: + print('') + print("Tiles generated from the max zoom level:") + print("----------------------------------------") + print('') + + ds = self.out_ds + + querysize = self.querysize + + # 1bto4b + if self.isDEMtile: + tilebands = 4 + querysize = self.tilesize + else: + tilebands = self.dataBandsCount + 1 + tz = self.tmaxz + + try: + self.tminmax[tz] + except IndexError: + print(" Won't make zoom level " + str(tz)) + return + + # Set the bounds + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + + if self.options.verbose: + print("dataBandsCount: ", self.dataBandsCount) + print("tilebands: ", tilebands) + + tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + ti = 0 + + for ty in range(tmaxy, tminy-1, -1): + for tx in range(tminx, tmaxx+1): + + if self.stopped: + break + ti += 1 + tilefilename = os.path.join( + self.output, str(tz), str(tx), "%s.%s" % (ty, self.tileext)) + if self.options.verbose: + print(ti, '/', tcount, tilefilename) + + if self.options.resume and os.path.exists(tilefilename): + if self.options.verbose: + print("Tile generation skipped because of --resume") + else: + self.progressbar(ti / float(tcount)) + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + if self.options.profile == 'mercator': + # Tile bounds in EPSG:3857 + b = self.mercator.TileBounds(tx, ty, tz) + elif self.options.profile == 'geodetic': + b = self.geodetic.TileBounds(tx, ty, tz) + + # Don't scale up by nearest neighbour, better change the querysize + # to the native resolution (and return smaller query tile) for scaling + + if self.options.profile in ('mercator', 'geodetic'): + rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1]) + + # Pixel size in the raster covering query geo extent + nativesize = wb[0] + wb[2] + if self.options.verbose: + print("\tNative Extent (querysize", + nativesize, "): ", rb, wb) + + # Tile bounds in raster coordinates for ReadRaster query + rb, wb = self.geo_query( + ds, b[0], b[3], b[2], b[1], querysize=querysize) + + rx, ry, rxsize, rysize = rb + wx, wy, wxsize, wysize = wb + wxsize -= 1 # 1bto4b + wysize -= 1 # 1bto4b + + # MMGIS + elif self.isRasterBounded: # 'raster' profile: + + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.fWorldXSize + ysize = self.out_ds.fWorldYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize - self.out_ds.fRasterXOriginWorld + #print("rx", rx) + rxsize = 0 + rxsize = tsize + + rysize = 0 + rysize = tsize + + ry = ysize - (ty * tsize) - rysize - \ + self.out_ds.fRasterYOriginWorld + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if rx < 0: + rxsize = tsize + rx + wx = -rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + rx = 0 + if ry < 0: + rysize = tsize + ry + wy = -ry + wysize = int(rysize/float(tsize) * self.tilesize) + ry = 0 + if rx + rxsize > self.out_ds.fRasterXSizeWorld: + rxsize = self.out_ds.fRasterXSizeWorld - rx + wxsize = int(rxsize/float(tsize) * self.tilesize) + if ry + rysize > self.out_ds.fRasterYSizeWorld: + rysize = self.out_ds.fRasterYSizeWorld - ry + wysize = int(rysize/float(tsize) * self.tilesize) + + # Convert rx, ry back to non-world coordinates + rx = int(float(self.out_ds.RasterXSize) * + (float(rx) / self.out_ds.fRasterXSizeWorld)) + ry = int(float(self.out_ds.RasterYSize) * + (float(ry) / self.out_ds.fRasterYSizeWorld)) + rxsize = int(float(self.out_ds.RasterXSize) * + (float(rxsize) / self.out_ds.fRasterXSizeWorld)) + rysize = int(float(self.out_ds.RasterYSize) * + (float(rysize) / self.out_ds.fRasterYSizeWorld)) + + wxsize -= 1 # 1bto4b + wysize -= 1 # 1bto4b + + #print("Extent: ", (tx, ty, tz, tsize), (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize), (self.out_ds.fRasterXOrigin, self.out_ds.fRasterYOrigin)) + else: # 'raster' profile: + # tilesize in raster coordinates for actual zoom + tsize = int(self.tsize[tz]) + xsize = self.out_ds.RasterXSize # size of the raster in pixels + ysize = self.out_ds.RasterYSize + if tz >= self.tmaxz: + querysize = self.tilesize + + rx = (tx) * tsize + rxsize = 0 + if tx == tmaxx: + rxsize = xsize % tsize + if rxsize == 0: + rxsize = tsize + + rysize = 0 + if ty == tmaxy: + rysize = ysize % tsize + if rysize == 0: + rysize = tsize + ry = ysize - (ty * tsize) - rysize + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + if self.options.verbose: + print("\tReadRaster Extent: ", + (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)) + + # Query is in 'nearest neighbour' but can be bigger in then the tilesize + # We scale down the query to the tilesize by supplied algorithm. + + # Tile dataset in memory + + # 1bto4b + if self.isDEMtile: + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands, gdal.GDT_Byte) + else: + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands) + + data = alpha = None + # Read the source raster if anything is going inside the tile as per the computed + # geo_query + if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0: + # 1bto4b + if self.isDEMtile: + data = ds.GetRasterBand(1).ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) + else: + data = ds.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize, + band_list=list(range(1, self.dataBandsCount+1))) + alpha = self.alphaband.ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize) + + # The tile in memory is a transparent file by default. Write pixel values into it if + # any + if data: + # 1bto4b - both this full if and else + if self.isDEMtile: + if (wxsize * wysize) > 0: + data = struct.unpack('f' * wxsize * wysize, data) + else: + return + + if self.tilesize == querysize: + # Interpolate the values from four surrounding + + # This takes our 1d list of WxH data and pads it with a rect of none values + dataPad = list(data) + for i in reversed(range(1, wysize)): + dataPad.insert(wxsize * i, 0) + dataPad.insert(wxsize * i, 0) + for i in range(wxsize + 3): + dataPad.insert(0, 0) + for i in range(wxsize + 3): + dataPad.append(0) + + dataIn = [] + # Resample based on average of four + # averaging over: i, i + 1, i + wxsize, i + wxsize + 1 + for y in range(wysize+2 - 1): + for x in range(wxsize+2 - 1): + i = x+(y*(wxsize+2)) + nW = dataPad[i] + nE = dataPad[i+1] + sW = dataPad[i+(wxsize+2)] + sE = dataPad[i+(wxsize+2)+1] + dataIn.append((nW + nE + sW + sE)/float(4)) + + # Get the surrounding eight tiles + # Get NW + if tx - 1 >= tminx and ty + 1 <= tmaxy: + rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW = getTilePxBounds(self, + tx - 1, ty + 1, tz, ds) + wxsizeNW -= 1 + wysizeNW -= 1 + if wxsizeNW != 0 and wysizeNW != 0: + dataNW = ds.GetRasterBand(1).ReadRaster( + rxNW, ryNW, rxsizeNW, rysizeNW, wxsizeNW, wysizeNW, buf_type=gdal.GDT_Float32) + if dataNW is not None and (wxsizeNW * wysizeNW) > 0: + dataNW = struct.unpack( + 'f' * wxsizeNW * wysizeNW, dataNW) + else: + dataNW = None + else: + dataNW = None + + # Get N + if ty + 1 <= tmaxy: + rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN = getTilePxBounds( + self, tx, ty + 1, tz, ds) + wxsizeN -= 1 + wysizeN -= 1 + if wxsizeN != 0 and wysizeN != 0: + dataN = ds.GetRasterBand(1).ReadRaster( + rxN, ryN, rxsizeN, rysizeN, wxsizeN, wysizeN, buf_type=gdal.GDT_Float32) + if dataN is not None and (wxsizeN * wysizeN) > 0: + dataN = struct.unpack( + 'f' * wxsizeN * wysizeN, dataN) + else: + dataN = None + else: + dataN = None + # Get NE + if tx + 1 <= tmaxx and ty + 1 <= tmaxy: + rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE = getTilePxBounds( + self, tx + 1, ty + 1, tz, ds) + wxsizeNE -= 1 + wysizeNE -= 1 + if wxsizeNE != 0 and wysizeNE != 0: + dataNE = ds.GetRasterBand(1).ReadRaster( + rxNE, ryNE, rxsizeNE, rysizeNE, wxsizeNE, wysizeNE, buf_type=gdal.GDT_Float32) + if dataNE is not None and (wxsizeNE * wysizeNE) > 0: + dataNE = struct.unpack( + 'f' * wxsizeNE * wysizeNE, dataNE) + else: + dataNE = None + else: + dataNE = None + # Get E + if tx + 1 <= tmaxx: + rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE = getTilePxBounds( + self, tx + 1, ty, tz, ds) + wxsizeE -= 1 + wysizeE -= 1 + if wxsizeE != 0 and wysizeE != 0: + dataE = ds.GetRasterBand(1).ReadRaster( + rxE, ryE, rxsizeE, rysizeE, wxsizeE, wysizeE, buf_type=gdal.GDT_Float32) + if dataE is not None and (wxsizeE * wysizeE) > 0: + dataE = struct.unpack( + 'f' * wxsizeE * wysizeE, dataE) + else: + dataE = None + else: + dataE = None + # Get SE + if tx + 1 <= tmaxx and ty - 1 >= tminy: + rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE = getTilePxBounds( + self, tx + 1, ty - 1, tz, ds) + wxsizeSE -= 1 + wysizeSE -= 1 + if wxsizeSE != 0 and wysizeSE != 0: + dataSE = ds.GetRasterBand(1).ReadRaster( + rxSE, rySE, rxsizeSE, rysizeSE, wxsizeSE, wysizeSE, buf_type=gdal.GDT_Float32) + if dataSE is not None and (wxsizeSE * wysizeSE) > 0: + dataSE = struct.unpack( + 'f' * wxsizeSE * wysizeSE, dataSE) + else: + dataSE = None + else: + dataSE = None + # Get S + if ty - 1 >= tminy: + rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS = getTilePxBounds( + self, tx, ty - 1, tz, ds) + wxsizeS -= 1 + wysizeS -= 1 + if wxsizeS != 0 and wysizeS != 0: + dataS = ds.GetRasterBand(1).ReadRaster( + rxS, ryS, rxsizeS, rysizeS, wxsizeS, wysizeS, buf_type=gdal.GDT_Float32) + if dataS is not None and (wxsizeS * wysizeS) > 0: + dataS = struct.unpack( + 'f' * wxsizeS * wysizeS, dataS) + else: + dataS = None + else: + dataS = None + # Get SW + if tx - 1 >= tminx and ty - 1 >= tminy: + rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW = getTilePxBounds( + self, tx - 1, ty - 1, tz, ds) + wxsizeSW -= 1 + wysizeSW -= 1 + if wxsizeSW != 0 and wysizeSW != 0: + dataSW = ds.GetRasterBand(1).ReadRaster( + rxSW, rySW, rxsizeSW, rysizeSW, wxsizeSW, wysizeSW, buf_type=gdal.GDT_Float32) + if dataSW is not None and (wxsizeSW * wysizeSW) > 0: + dataSW = struct.unpack( + 'f' * wxsizeSW * wysizeSW, dataSW) + else: + dataSW = None + else: + dataSW = None + # Get W + if tx - 1 >= tminx: + rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW = getTilePxBounds( + self, tx - 1, ty, tz, ds) + wxsizeW -= 1 + wysizeW -= 1 + if wxsizeW != 0 and wysizeW != 0: + dataW = ds.GetRasterBand(1).ReadRaster( + rxW, ryW, rxsizeW, rysizeW, wxsizeW, wysizeW, buf_type=gdal.GDT_Float32) + if dataW is not None and (wxsizeW * wysizeW) > 0: + dataW = struct.unpack( + 'f' * wxsizeW * wysizeW, dataW) + else: + dataW = None + else: + dataW = None + + # NW (uses N, NW, W) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataN is not None: + fN = dataN[len(dataN)-wxsizeN] + values = values + 1 + if dataNW is not None: + fNW = dataNW[len(dataNW)-1] + values = values + 1 + if dataW is not None: + fW = dataW[wxsizeW-1] + values = values + 1 + dataIn[0] = ((dataIn[0]*4) + fN + + fNW + fW)/float(values) + + # NE (uses N, NE, E) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataN is not None: + fN = dataN[len(dataN)-1] + values = values + 1 + if dataNE is not None: + fNE = dataNE[len(dataNE)-wxsizeNE] + values = values + 1 + if dataE is not None: + fE = dataE[0] + values = values + 1 + dataIn[wxsize] = ( + (dataIn[wxsize]*4) + fN + fNE + fE)/float(values) + + # SE (uses S, SE, E) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataS is not None: + fS = dataS[wxsizeS-1] + values = values + 1 + if dataSE is not None: + fSE = dataSE[0] + values = values + 1 + if dataE is not None: + fE = dataE[len(dataE)-wxsizeE] + values = values + 1 + dataIn[len(dataIn)-1] = ((dataIn[len(dataIn)-1] + * 4) + fS + fSE + fE)/float(values) + + # SW (uses S, SW, W) + fN = fNE = fE = fSE = fS = fSW = fW = fNW = 0 + values = 1 + if dataS is not None: + fS = dataS[0] + values = values + 1 + if dataSW is not None: + fSW = dataSW[wxsizeSW-1] + values = values + 1 + if dataW is not None: + fW = dataW[len(dataW)-1] + values = values + 1 + dataIn[len( + dataIn)-wxsize-1] = ((dataIn[len(dataIn)-wxsize-1]*4) + fS + fSW + fW)/float(values) + + # Then the edges minus corners + # N + if dataN is not None: + for i in range(1, wxsize): + dataIn[i] = ( + (dataIn[i]*4) + dataN[len(dataN)-wxsizeN-1+i] + dataN[len(dataN)-wxsizeN-1+i+1])/float(4) + else: + for i in range(1, wxsize): + dataIn[i] = (dataIn[i]*4)/float(2) + + # E + if dataE is not None: + for i in range(1, wysize): + dataIn[((i+1)*(wxsize+1)-1)] = ((dataIn[((i+1)*(wxsize+1)-1)] + * 4) + dataE[(i-1)*wxsizeE] + dataE[i*wxsizeE])/float(4) + else: + for i in range(1, wysize): + dataIn[( + (i+1)*(wxsize+1)-1)] = (dataIn[((i+1)*(wxsize+1)-1)]*4)/float(2) + + # S + if dataS is not None: + for i in range(1, wxsize): + dataIn[len(dataIn)-wxsize-1+i] = ( + (dataIn[len(dataIn)-wxsize-1+i]*4) + dataS[i-1] + dataS[i])/float(4) + else: + for i in range(1, wxsize): + dataIn[len( + dataIn)-wxsize-1+i] = (dataIn[len(dataIn)-wxsize-1+i]*4)/float(2) + + # W + if dataW is not None: + for i in range(1, wysize): + dataIn[(i)*(wxsize+1)] = ((dataIn[(i)*(wxsize+1)]*4) + + dataW[i*wxsizeW-1] + dataW[(i+1)*wxsizeW-1])/float(4) + else: + for i in range(1, wysize): + dataIn[(i)*(wxsize+1)] = (dataIn[(i) + * (wxsize+1)]*4)/float(2) + + data1 = [] + data2 = [] + data3 = [] + data4 = [] + for f in dataIn: + f = str(binary(f)) + data1.append(int(f[:8], 2)) + data2.append(int(f[8:16], 2)) + data3.append(int(f[16:24], 2)) + data4.append(int(f[24:], 2)) + + data1s = '' + data2s = '' + data3s = '' + data4s = '' + indx = 0 + for v in data1: + data1s += struct.pack('B', data1[indx]) + data2s += struct.pack('B', data2[indx]) + data3s += struct.pack('B', data3[indx]) + data4s += struct.pack('B', data4[indx]) + indx += 1 + dstile.GetRasterBand(1).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data1s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(2).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data2s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(3).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data3s, buf_type=gdal.GDT_Byte) + dstile.GetRasterBand(4).WriteRaster( + wx, wy, wxsize + 1, wysize + 1, data4s, buf_type=gdal.GDT_Byte) + elif wxsize != 0 and wysize != 0: + # Big ReadRaster query in memory scaled to the tilesize - all but 'near' algo + dsquery = self.mem_drv.Create( + '', querysize, querysize, tilebands, gdal.GDT_Byte) # 1bto4b + # TODO: fill the null value in case a tile without alpha is produced (now only png tiles are supported) + # for i in range(1, tilebands+1): + # dsquery.GetRasterBand(1).Fill(tilenodata) + # dsquery.WriteRaster(wx, wy, wxsize, wysize, data, band_list=list(range(1,self.dataBandsCount+1)))###############1bto4b + # dsquery.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands])###############################1bto4b + + # 1bto4b + data = ds.GetRasterBand(1).ReadRaster( + rx, ry, rxsize, rysize, wxsize, wysize, buf_type=gdal.GDT_Float32) + + data = struct.unpack('f' * wxsize * wysize, data) + data1 = [] + data2 = [] + data3 = [] + data4 = [] + for f in data: + f = str(binary(f)) + data1.append(int(f[:8], 2)) + data2.append(int(f[8:16], 2)) + data3.append(int(f[16:24], 2)) + data4.append(int(f[24:], 2)) + + data1s = '' + data2s = '' + data3s = '' + data4s = '' + indx = 0 + for v in data1: + data1s += struct.pack('B', data1[indx]) + data2s += struct.pack('B', data2[indx]) + data3s += struct.pack('B', data3[indx]) + data4s += struct.pack('B', data4[indx]) + indx += 1 + + dsquery.GetRasterBand(1).WriteRaster( + wx, wy, wxsize, wysize, data1s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(2).WriteRaster( + wx, wy, wxsize, wysize, data2s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(3).WriteRaster( + wx, wy, wxsize, wysize, data3s, buf_type=gdal.GDT_Byte) + dsquery.GetRasterBand(4).WriteRaster( + wx, wy, wxsize, wysize, data4s, buf_type=gdal.GDT_Byte) + # sys.exit('done') + # 1bto4b + + self.scale_query_to_tile( + dsquery, dstile, tilefilename) + del dsquery + + else: + if self.tilesize == querysize: + # Use the ReadRaster result directly in tiles ('nearest neighbour' query) + dstile.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, self.dataBandsCount+1))) + dstile.WriteRaster( + wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + # Note: For source drivers based on WaveLet compression (JPEG2000, ECW, + # MrSID) the ReadRaster function returns high-quality raster (not ugly + # nearest neighbour) + # TODO: Use directly 'near' for WaveLet files + else: + # Big ReadRaster query in memory scaled to the tilesize - all but 'near' + # algo + dsquery = self.mem_drv.Create( + '', querysize, querysize, tilebands) + # TODO: fill the null value in case a tile without alpha is produced (now + # only png tiles are supported) + dsquery.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, self.dataBandsCount+1))) + dsquery.WriteRaster( + wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + self.scale_query_to_tile( + dsquery, dstile, tilefilename) + del dsquery + + del data + + if self.options.resampling != 'antialias': + # Write a copy of tile to png/jpg + self.out_drv.CreateCopy(tilefilename, dstile, strict=0) + + del dstile + + # Create a KML file for this tile. + if self.kml: + kmlfilename = os.path.join( + self.output, str(tz), str(tx), '%d.kml' % ty) + if not self.options.resume or not os.path.exists(kmlfilename): + f = open(kmlfilename, 'wb') + f.write(self.generate_kml(tx, ty, tz).encode('utf-8')) + f.close() + + if not self.options.verbose and not self.options.quiet: + self.progressbar(ti / float(tcount)) + + def generate_overview_tiles(self): + """Generation of the overview tiles (higher in the pyramid) based on existing tiles""" + + if not self.options.quiet: + print("Generating Overview Tiles:") + + # 1bto4b + if self.isDEMtile: + tilebands = 4 + else: + tilebands = self.dataBandsCount + 1 + + # Usage of existing tiles: from 4 underlying tiles generate one as overview. + + tcount = 0 + for tz in range(self.tmaxz-1, self.tminz-1, -1): + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + tcount += (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + + ti = 0 + + for tz in range(self.tmaxz-1, self.tminz-1, -1): + tminx, tminy, tmaxx, tmaxy = self.tminmax[tz] + for ty in range(tmaxy, tminy-1, -1): + for tx in range(tminx, tmaxx+1): + + if self.stopped: + break + + ti += 1 + tilefilename = os.path.join(self.output, + str(tz), + str(tx), + "%s.%s" % (ty, self.tileext)) + + if self.options.verbose: + print(ti, '/', tcount, tilefilename) + + if self.options.resume and os.path.exists(tilefilename): + if self.options.verbose: + print("Tile generation skipped because of --resume") + else: + self.progressbar(ti / float(tcount)) + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + dsquery = self.mem_drv.Create( + '', 2*self.tilesize, 2*self.tilesize, tilebands) + # TODO: fill the null value + dstile = self.mem_drv.Create( + '', self.tilesize, self.tilesize, tilebands) + + # TODO: Implement more clever walking on the tiles with cache functionality + # probably walk should start with reading of four tiles from top left corner + # Hilbert curve + + children = [] + # Read the tiles and write them to query window + for y in range(2*ty, 2*ty+2): + for x in range(2*tx, 2*tx+2): + minx, miny, maxx, maxy = self.tminmax[tz+1] + if x >= minx and x <= maxx and y >= miny and y <= maxy: + dsquerytile = gdal.Open( + os.path.join(self.output, str(tz+1), str(x), + "%s.%s" % (y, self.tileext)), + gdal.GA_ReadOnly) + if (ty == 0 and y == 1) or (ty != 0 and (y % (2*ty)) != 0): + tileposy = 0 + else: + tileposy = self.tilesize + if tx: + tileposx = x % (2*tx) * self.tilesize + elif tx == 0 and x == 1: + tileposx = self.tilesize + else: + tileposx = 0 + dsquery.WriteRaster( + tileposx, tileposy, self.tilesize, self.tilesize, + dsquerytile.ReadRaster( + 0, 0, self.tilesize, self.tilesize), + band_list=list(range(1, tilebands+1))) + children.append([x, y, tz+1]) + + self.scale_query_to_tile(dsquery, dstile, tilefilename) + # Write a copy of tile to png/jpg + if self.options.resampling != 'antialias': + # Write a copy of tile to png/jpg + self.out_drv.CreateCopy(tilefilename, dstile, strict=0) + + if self.options.verbose: + print("\tbuild from zoom", tz+1, + " tiles:", (2*tx, 2*ty), (2*tx+1, 2*ty), + (2*tx, 2*ty+1), (2*tx+1, 2*ty+1)) + + # Create a KML file for this tile. + if self.kml: + f = open(os.path.join( + self.output, '%d/%d/%d.kml' % (tz, tx, ty)), 'wb') + f.write(self.generate_kml( + tx, ty, tz, children).encode('utf-8')) + f.close() + + if not self.options.verbose and not self.options.quiet: + self.progressbar(ti / float(tcount)) + + def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): + """ + For given dataset and query in cartographic coordinates returns parameters for ReadRaster() + in raster coordinates and x/y shifts (for border tiles). If the querysize is not given, the + extent is returned in the native resolution of dataset ds. + + raises Gdal2TilesError if the dataset does not contain anything inside this geo_query + """ + geotran = ds.GetGeoTransform() + rx = int((ulx - geotran[0]) / geotran[1] + 0.001) + ry = int((uly - geotran[3]) / geotran[5] + 0.001) + rxsize = int((lrx - ulx) / geotran[1] + 0.5) + rysize = int((lry - uly) / geotran[5] + 0.5) + + if not querysize: + wxsize, wysize = rxsize, rysize + else: + wxsize, wysize = querysize, querysize + + # Coordinates should not go out of the bounds of the raster + wx = 0 + if rx < 0: + rxshift = abs(rx) + wx = int(wxsize * (float(rxshift) / rxsize)) + wxsize = wxsize - wx + rxsize = rxsize - int(rxsize * (float(rxshift) / rxsize)) + rx = 0 + if rx+rxsize > ds.RasterXSize: + wxsize = int(wxsize * (float(ds.RasterXSize - rx) / rxsize)) + rxsize = ds.RasterXSize - rx + + wy = 0 + if ry < 0: + ryshift = abs(ry) + wy = int(wysize * (float(ryshift) / rysize)) + wysize = wysize - wy + rysize = rysize - int(rysize * (float(ryshift) / rysize)) + ry = 0 + if ry+rysize > ds.RasterYSize: + wysize = int(wysize * (float(ds.RasterYSize - ry) / rysize)) + rysize = ds.RasterYSize - ry + + return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize) + + def scale_query_to_tile(self, dsquery, dstile, tilefilename=''): + """Scales down query dataset to the tile dataset""" + + querysize = dsquery.RasterXSize + tilesize = dstile.RasterXSize + tilebands = dstile.RasterCount + + if self.options.resampling == 'average': + + # Function: gdal.RegenerateOverview() + for i in range(1, tilebands+1): + # Black border around NODATA + res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i), + 'average') + if res != 0: + self.error("RegenerateOverview() failed on %s, error %d" % ( + tilefilename, res)) + + elif self.options.resampling == 'antialias': + + # Scaling by PIL (Python Imaging Library) - improved Lanczos + array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) + for i in range(tilebands): + array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i+1), + 0, 0, querysize, querysize) + im = Image.fromarray(array, 'RGBA') # Always four bands + im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS) + if os.path.exists(tilefilename): + im0 = Image.open(tilefilename) + im1 = Image.composite(im1, im0, im1) + im1.save(tilefilename, self.tiledriver) + + else: + + # Other algorithms are implemented by gdal.ReprojectImage(). + dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0, + tilesize / float(querysize))) + dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) + + res = gdal.ReprojectImage( + dsquery, dstile, None, None, self.resampling) + if res != 0: + self.error("ReprojectImage() failed on %s, error %d" % + (tilefilename, res)) + + def generate_tilemapresource(self): + """ + Template for tilemapresource.xml. Returns filled string. Expected variables: + title, north, south, east, west, isepsg4326, projection, publishurl, + zoompixels, tilesize, tileformat, profile + """ + + args = {} + args['title'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['profile'] = self.options.profile + + if self.options.profile == 'mercator': + args['srs'] = "EPSG:3857" + elif self.options.profile == 'geodetic': + args['srs'] = "EPSG:4326" + elif self.options.s_srs: + args['srs'] = self.options.s_srs + elif self.out_srs: + args['srs'] = self.out_srs.ExportToWkt() + else: + args['srs'] = "" + + s = """ + + %(title)s + + %(srs)s + + + + +""" % args # noqa + for z in range(self.tminz, self.tmaxz+1): + if self.options.profile == 'raster': + s += """ \n""" % ( + args['publishurl'], z, (2**(self.nativezoom-z) * self.out_gt[1]), z) + elif self.options.profile == 'mercator': + s += """ \n""" % ( + args['publishurl'], z, 156543.0339/2**z, z) + elif self.options.profile == 'geodetic': + s += """ \n""" % ( + args['publishurl'], z, 0.703125/2**z, z) + s += """ + + """ + return s + + def generate_kml(self, tx, ty, tz, children=None, **args): + """ + Template for the KML. Returns filled string. + """ + if not children: + children = [] + + args['tx'], args['ty'], args['tz'] = tx, ty, tz + args['tileformat'] = self.tileext + if 'tilesize' not in args: + args['tilesize'] = self.tilesize + + if 'minlodpixels' not in args: + args['minlodpixels'] = int(args['tilesize'] / 2) + if 'maxlodpixels' not in args: + args['maxlodpixels'] = int(args['tilesize'] * 8) + if children == []: + args['maxlodpixels'] = -1 + + if tx is None: + tilekml = False + args['title'] = self.options.title + else: + tilekml = True + args['title'] = "%d/%d/%d.kml" % (tz, tx, ty) + args['south'], args['west'], args['north'], args['east'] = self.tileswne( + tx, ty, tz) + + if tx == 0: + args['drawOrder'] = 2 * tz + 1 + elif tx is not None: + args['drawOrder'] = 2 * tz + else: + args['drawOrder'] = 0 + + url = self.options.url + if not url: + if tilekml: + url = "../../" + else: + url = "" + + s = """ + + + %(title)s + + """ % args + if tilekml: + s += """ + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + %(minlodpixels)d + %(maxlodpixels)d + + + + %(drawOrder)d + + %(ty)d.%(tileformat)s + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + """ % args + + for cx, cy, cz in children: + csouth, cwest, cnorth, ceast = self.tileswne(cx, cy, cz) + s += """ + + %d/%d/%d.%s + + + %.14f + %.14f + %.14f + %.14f + + + %d + -1 + + + + %s%d/%d/%d.kml + onRegion + + + + """ % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest, + args['minlodpixels'], url, cz, cx, cy) + + s += """ + + """ + return s + + def generate_googlemaps(self): + """ + Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, + publishurl + """ + args = {} + args['title'] = self.options.title + args['googlemapskey'] = self.options.googlekey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + + s = r""" + + + %(title)s + + + + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + def generate_leaflet(self): + """ + Template for leaflet.html implementing overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title.replace('"', '\\"') + args['htmltitle'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['centerlon'] = (args['north'] + args['south']) / 2. + args['centerlat'] = (args['west'] + args['east']) / 2. + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['beginzoom'] = self.tmaxz + args['tilesize'] = self.tilesize # not used + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url # not used + args['copyright'] = self.options.copyright.replace('"', '\\"') + + s = """ + + + + + %(htmltitle)s + + + + + + + + + + +
+ + + + + + + """ % args # noqa + + return s + + def generate_openlayers(self): + """ + Template for openlayers.html implementing overlay of available Spherical Mercator layers. + + It returns filled string. Expected variables: + title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title + args['bingkey'] = self.options.bingkey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + if self.options.tmscompatible: + args['tmsoffset'] = "-1" + else: + args['tmsoffset'] = "" + if self.options.profile == 'raster': + args['rasterzoomlevels'] = self.tmaxz+1 + args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1] + + s = r""" + + %(title)s + + """ % args # noqa + + if self.options.profile == 'mercator': + s += """ + + """ % args + + s += """ + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + +def main(): + argv = gdal.GeneralCmdLineProcessor(sys.argv) + if argv: + gdal2tiles = GDAL2Tiles(argv[1:]) + gdal2tiles.process() + + +if __name__ == '__main__': + main() + +# vim: set tabstop=4 shiftwidth=4 expandtab: diff --git a/auxiliary/gdal2customtiles/gdal2tiles_3.5.2.py b/auxiliary/gdal2customtiles/legacy/gdal2tiles_3.5.2.py similarity index 100% rename from auxiliary/gdal2customtiles/gdal2tiles_3.5.2.py rename to auxiliary/gdal2customtiles/legacy/gdal2tiles_3.5.2.py diff --git a/auxiliary/gdal2customtiles/rasters2customtiles_3.5.2.py b/auxiliary/gdal2customtiles/legacy/rasters2customtiles_3.5.2.py similarity index 100% rename from auxiliary/gdal2customtiles/rasters2customtiles_3.5.2.py rename to auxiliary/gdal2customtiles/legacy/rasters2customtiles_3.5.2.py diff --git a/auxiliary/gdal2customtiles/legacy/readme.md b/auxiliary/gdal2customtiles/legacy/readme.md new file mode 100644 index 000000000..91152fd28 --- /dev/null +++ b/auxiliary/gdal2customtiles/legacy/readme.md @@ -0,0 +1,68 @@ +# gdal2customtiles + +This wraps together: + +- gdal2tiles4extent.py +- gdal2tiles1bto4b_v3.py + +--- + +## Raster Extents: + +Tile partial world rasters. + +**Requires:** + +- `-p raster` + - This is necessary for generating tiles with a custom extent. +- `-x` OR `--extentworld` followed by values `ulx,uly,lrx,lry,pixel_resolution` + - The extentworld is the full bounding area of the projection for the planetary body. The extentworld is the full bounding area of the projection for the planetary body. Units are in meters using upper left (ul) and lower right (lr) order. These values are reported from gdalinfo. Units are in meters using upper left (ul) and lower right (lr) order. These values are reported from gdalinfo. Values are separated by commas with no spaces. + +**Example:** + +``` +python gdal2customtiles.py -p raster --extentworld -4022404.001,4022036.893,-4022036.893,4022404.001,367.108150109358121 input.tif output_dir +``` + +_Notes:_ + +- Only works if set zoom (-z 0-10) encompasses the native zoom of the raster. +- 'ERROR 5's are expected. + +--- + +## Digital Elevation Model Tiles: + +Generate Digital Elevation Maps (DEMs) tiles. + +Any 32-bit image data can be encoded into the RGBA channels of a PNG. MMGIS uses this file type to create terrain meshes as well as for a data layer. + +- Certain resampling methods can corrupt `--dem` results. + +**Requires:** + +- `-m` or `--dem` + +**Example:** + +``` +python gdal2customtiles.py -p raster --extentworld -4022404.001,4022036.893,-4022036.893,4022404.001,367.108150109358121 --dem inputdem.tif output_dir +``` + +_Notes:_ + +- Does not include the convenience of rasterstotiles.py yet. +- Can only tile 32-bit images with --dem option. + +## gdal2tiles_3.5.2.py + +- `rasters2customtiles_3.5.2.py` and `gdal2tiles_3.5.2.py` support only the `--dem` option (and not `--raster` yet). `-m` no longer works and must be `--dem`. Tested with gdal 3.4.3. Upgraded to support multi-processes. See `python rasters2customtiles_3.5.2.py --help`. Unlike `gda2customtiles.py`, does not seam-match DEM tiles (better for Data Layers and Viewshed Tool, bad for 3D Globe). +- Adds the resampling algorithm `near-composite` that uses nearest-neighbor and ovarlays the new tile onto the old tile (if any in output directory) +- Certain resampling methods can corrupt `--dem` results. +- To support the value 0, all 0 data values get mapped to to the value 2^31 (2147483648) (RGBA=79,0,0,0) and then decoded by the MMGIS reader back to 0. This avoids clashes with other nondata-like values writing to 0,0,0,0 in the outputted pngs. + +**Example:** + +``` +python gdal2tiles_3.5.2.py --dem input.tif output_dir --srcnodata=-9999 -r near-composite --tilesize=128 +``` diff --git a/auxiliary/gdal2customtiles/rasters2customtiles.py b/auxiliary/gdal2customtiles/rasters2customtiles.py new file mode 100644 index 000000000..d89444d9f --- /dev/null +++ b/auxiliary/gdal2customtiles/rasters2customtiles.py @@ -0,0 +1,151 @@ +import sys +import subprocess +import optparse +from osgeo import gdal, osr + + +def optparse_init() -> optparse.OptionParser: + """Prepare the option parser for input (argv)""" + + usage = "Usage: %prog [options] input_file [output]" + p = optparse.OptionParser(usage) + p.add_option( + "--dem", + action="store_true", + dest="isDEMtile", + help="Indicate if the input is a Digital Elevation Model" + ) + p.add_option( + "--processes", + dest="processes", + type="int", + help="Number of processes to use for tiling", + ) + p.add_option( + "--tilesize", + dest="tilesize", + metavar="PIXELS", + type="int", + help="Width and height in pixel of a tile. Defaults to 256 (or 32 for --dem)", + ) + p.add_option( + "-z", + "--zoom", + dest="zoom", + help="Zoom levels to render (format:'2-5', '10-' or '10').", + ) + p.add_option( + "-e", + "--resume", + dest="resume", + action="store_true", + help="Resume mode. Generate only missing files.", + ) + p.add_option( + "-a", + "--srcnodata", + dest="srcnodata", + metavar="NODATA", + help="Value in the input dataset considered as transparent", + ) + return p + + +def GetExtent(gt, cols, rows): + ''' Return list of corner coordinates from a geotransform + + @type gt: C{tuple/list} + @param gt: geotransform + @type cols: C{int} + @param cols: number of columns in the dataset + @type rows: C{int} + @param rows: number of rows in the dataset + @rtype: C{[float,...,float]} + @return: coordinates of each corner + ''' + ext = [] + xarr = [0, cols] + yarr = [0, rows] + + for px in xarr: + for py in yarr: + x = gt[0]+(px*gt[1])+(py*gt[2]) + y = gt[3]+(px*gt[4])+(py*gt[5]) + ext.append([x, y]) + yarr.reverse() + return ext + + +def ReprojectCoords(coords, src_srs, tgt_srs): + ''' Reproject a list of x,y coordinates. + + @type geom: C{tuple/list} + @param geom: List of [[x,y],...[x,y]] coordinates + @type src_srs: C{osr.SpatialReference} + @param src_srs: OSR SpatialReference object + @type tgt_srs: C{osr.SpatialReference} + @param tgt_srs: OSR SpatialReference object + @rtype: C{tuple/list} + @return: List of transformed [[x,y],...[x,y]] coordinates + ''' + trans_coords = [] + transform = osr.CoordinateTransformation(src_srs, tgt_srs) + for x, y in coords: + x, y, z = transform.TransformPoint(x, y) + trans_coords.append([x, y]) + return trans_coords + + +def AutoGdalTranslate(geo_extent, cols, rows, raster): + gdal_translate = "gdal_translate -of VRT -a_srs EPSG:4326 -gcp 0 0 " + str(geo_extent[0][0]) + " " + str(geo_extent[0][1]) + " -gcp " + str(cols) + " 0 " + str(geo_extent[3][0]) + " " + str( + geo_extent[3][1]) + " -gcp " + str(cols) + " " + str(rows) + " " + str(geo_extent[2][0]) + " " + str(geo_extent[2][1]) + " " + raster + " " + raster[:-4] + ".vrt" + print(f"Running: {gdal_translate}\n") + subprocess.Popen(gdal_translate) + + +def AutoGdal2Tiles(raster, options, outputdir): + dem = "" + if options.isDEMtile is True: + dem = " --dem" + processes = "" + if options.processes is not None: + processes = f" --processes={options.processes}" + tilesize = "" + if options.tilesize is not None: + tilesize = f" --tilesize={options.tilesize}" + zoom = "" + if options.zoom is not None: + zoom = f" --zoom={options.zoom}" + resume = "" + if options.resume is True: + resume = " --resume" + srcnodata = " --srcnodata=0,0,0" + if options.srcnodata is not None: + srcnodata = f" --srcnodata={options.srcnodata}" + output = "" + if outputdir is not None: + output = f" {outputdir}" + gdal2tiles = f"python gdal2customtiles.py -n{dem}{processes}{tilesize}{zoom}{resume}{srcnodata} {raster[:-4]}.vrt{output}" + print(f"Running: {gdal2tiles}\n") + subprocess.Popen(gdal2tiles) + + +parser = optparse_init() +options, args = parser.parse_args(args=sys.argv) + +raster = args[1] +ds = gdal.Open(raster) + +gt = ds.GetGeoTransform() +cols = ds.RasterXSize +rows = ds.RasterYSize +extent = GetExtent(gt, cols, rows) + +src_srs = osr.SpatialReference() +src_srs.ImportFromWkt(ds.GetProjection()) +tgt_srs = src_srs.CloneGeogCS() + +geo_extent = ReprojectCoords(extent, src_srs, tgt_srs) + +AutoGdalTranslate(geo_extent, cols, rows, raster) +AutoGdal2Tiles(raster, options, args[2]) diff --git a/auxiliary/gdal2customtiles/readme.md b/auxiliary/gdal2customtiles/readme.md index bc83600f9..da1cab9e7 100644 --- a/auxiliary/gdal2customtiles/readme.md +++ b/auxiliary/gdal2customtiles/readme.md @@ -1,33 +1,38 @@ -# gdal2customtiles +# gdal2customtiles.py -This wraps together: +_Python 3.10.5_ -- gdal2tiles4extent.py -- gdal2tiles1bto4b_v3.py +Accepts all [gdal2tiles.py](https://gdal.org/programs/gdal2tiles.html) options. Built off of GDAL 3.5.2 and tested with GDAL 3.4.3, it adds the following new features and capabilities. --- ## Raster Extents: -Tile partial world rasters. +Tile partial world rasters. Useful for tiling non-mercator and non-geodetic projected data. **Requires:** - `-p raster` - This is necessary for generating tiles with a custom extent. -- `-x` OR `--extentworld` followed by values `ulx,uly,lrx,lry,pixel_resolution` +- `--extentworld` followed by values `ulx,uly,lrx,lry,pixel_resolution` - The extentworld is the full bounding area of the projection for the planetary body. The extentworld is the full bounding area of the projection for the planetary body. Units are in meters using upper left (ul) and lower right (lr) order. These values are reported from gdalinfo. Units are in meters using upper left (ul) and lower right (lr) order. These values are reported from gdalinfo. Values are separated by commas with no spaces. -**Example:** +#### Example: ``` -python gdal2customtiles.py -p raster --extentworld -4022404.001,4022036.893,-4022036.893,4022404.001,367.108150109358121 input.tif output_dir +python gdal2customtiles.py -p raster --extentworld -931100.000,931100.000,931100.000,-931100.000,100 inputs/WAC_GLOBAL_P900S0000_100M.tif outputs/WAC_GLOBAL_P900S0000_100M +python gdal2customtiles.py -p raster --extentworld -931100.000,931100.000,931100.000,-931100.000,100 inputs/ldem_87s_5mpp_hillshade.tif outputs/ldem_87s_5mpp_hillshade ``` +- `WAC_GLOBAL_P900S0000_100M.tif` is in Lunar South Polar projection (IAU2000:30120). Its data covers the full bounds of that projection's world-space (it's world extent/"extentworld") thus we use its bounds and pixel resolution directly from its metadata: `--extentworld -931100.000,931100.000,931100.000,-931100.000,100` + + - _Note: If your basemap does not cover the full world-space, you would need to compute the world-space's bounds and its resolution relative to your datasets_ + +- `ldem_87s_5mpp_hillshade.tif` is also in Lunar South Polar projection (IAU2000:30120). Its data only covers a small region of the projection's world-space. We still use the previous `--extentworld -931100.000,931100.000,931100.000,-931100.000,100` + _Notes:_ - Only works if set zoom (-z 0-10) encompasses the native zoom of the raster. -- 'ERROR 5's are expected. --- @@ -35,9 +40,7 @@ _Notes:_ Generate Digital Elevation Maps (DEMs) tiles. -Any 32-bit image data can be encoded into the RGBA channels of a PNG. MMGIS uses this file type to create terrain meshes as well as for a data layer. - -- Certain resampling methods can corrupt `--dem` results. +Any 32-bit image data can be encoded into the RGBA channels of a PNG. MMGIS uses this file type to create terrain meshes as well as for Data Layers. **Requires:** @@ -51,18 +54,33 @@ python gdal2customtiles.py -p raster --extentworld -4022404.001,4022036.893,-402 _Notes:_ -- Does not include the convenience of rasterstotiles.py yet. - Can only tile 32-bit images with --dem option. - -## gdal2tiles_3.5.2.py - -- `rasters2customtiles_3.5.2.py` and `gdal2tiles_3.5.2.py` support only the `--dem` option (and not `--raster` yet). `-m` no longer works and must be `--dem`. Tested with gdal 3.4.3. Upgraded to support multi-processes. See `python rasters2customtiles_3.5.2.py --help`. Unlike `gda2customtiles.py`, does not seam-match DEM tiles (better for Data Layers and Viewshed Tool, bad for 3D Globe). -- Adds the resampling algorithm `near-composite` that uses nearest-neighbor and ovarlays the new tile onto the old tile (if any in output directory) +- Current `--dem` tiles do not seam-match tile edges. This may or may not be desired (not seam-matching is better for Data Layers and the Viewshed Tool, but bad for MMGIS' 3D Globe/LithoSphere). If seam-matching is desired use `legacy/gdal2customtiles.py` or `legacy/gdal2customtiles_py27.py` - Certain resampling methods can corrupt `--dem` results. - To support the value 0, all 0 data values get mapped to to the value 2^31 (2147483648) (RGBA=79,0,0,0) and then decoded by the MMGIS reader back to 0. This avoids clashes with other nondata-like values writing to 0,0,0,0 in the outputted pngs. +--- + +## Compositing Tiles: + +Adds the resampling algorithm `near-composite` that uses nearest-neighbor resampling and overlays the new tile onto the old tile (if any in output directory). This makes it possible to accumulate or combine tilesets at the indivdual tile image level. Data in tiles can be overwritten by this process so be cognizant of run order and input extents. + **Example:** ``` -python gdal2tiles_3.5.2.py --dem input.tif output_dir --srcnodata=-9999 -r near-composite --tilesize=128 +python gdal2customtiles.py -r near-composite --srcnodata=-9999 --processes=40 --tilesize=128 --dem input_A.tif output_dir +python gdal2customtiles.py -r near-composite --srcnodata=-9999 --processes=40 --tilesize=128 --dem input_B.tif output_dir ``` + +_Notes:_ + +- Nodata values are treated as transparent and will not overwrite existing pixels in the output tile images. + +--- + +# raster2customtiles.py + +A convience script that wraps gda2customtiles.py. Translates the input data into EPSG:4326 and sets proper ground control points. Might be outdated. Use gdal2customtiles directly for the most control. + +**Usage:** +`rasters2customtiles.py [options] input_file [output]` or see `--help` diff --git a/config/css/config.css b/config/css/config.css index f369b8fa2..fa9b8e5de 100644 --- a/config/css/config.css +++ b/config/css/config.css @@ -312,6 +312,9 @@ textarea { margin: 0px 161px 0px 173px; border-bottom: 2px solid #1565c0 !important; } +.col > .CodeMirror { + margin: 0px; +} #missions { margin: 5px 0px; @@ -442,19 +445,34 @@ textarea { display: flex; justify-content: space-between; } +.modal .layerHelp { + height: 35px; + padding: 0px 6px; + margin-right: 12px; + cursor: pointer; + color: #039be5; + transition: color 0.1s cubic-bezier(0.39, 0.575, 0.565, 1); +} .modal .clone { height: 35px; cursor: pointer; + margin-left: 12px; color: #777; transition: color 0.1s cubic-bezier(0.39, 0.575, 0.565, 1); } .modal .clone:hover { color: #000; } +#modal_uuid { + text-align: center; + margin: 20px 0px; + font-size: 14px; + color: #555; +} #toast-container { pointer-events: none; - top: 110px !important; + top: 48px !important; right: 6px !important; } diff --git a/config/css/jquery-ui.css b/config/css/jquery-ui.css index 90aa9a11e..d394bd2de 100644 --- a/config/css/jquery-ui.css +++ b/config/css/jquery-ui.css @@ -1,1225 +1,1315 @@ -/*! jQuery UI - v1.11.4 - 2015-03-11 -* http://jqueryui.com -* Includes: core.css, accordion.css, autocomplete.css, button.css, datepicker.css, dialog.css, draggable.css, menu.css, progressbar.css, resizable.css, selectable.css, selectmenu.css, slider.css, sortable.css, spinner.css, tabs.css, tooltip.css, theme.css -* To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Verdana%2CArial%2Csans-serif&fwDefault=normal&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=cccccc&bgTextureHeader=highlight_soft&bgImgOpacityHeader=75&borderColorHeader=aaaaaa&fcHeader=222222&iconColorHeader=222222&bgColorContent=ffffff&bgTextureContent=flat&bgImgOpacityContent=75&borderColorContent=aaaaaa&fcContent=222222&iconColorContent=222222&bgColorDefault=e6e6e6&bgTextureDefault=glass&bgImgOpacityDefault=75&borderColorDefault=d3d3d3&fcDefault=555555&iconColorDefault=888888&bgColorHover=dadada&bgTextureHover=glass&bgImgOpacityHover=75&borderColorHover=999999&fcHover=212121&iconColorHover=454545&bgColorActive=ffffff&bgTextureActive=glass&bgImgOpacityActive=65&borderColorActive=aaaaaa&fcActive=212121&iconColorActive=454545&bgColorHighlight=fbf9ee&bgTextureHighlight=glass&bgImgOpacityHighlight=55&borderColorHighlight=fcefa1&fcHighlight=363636&iconColorHighlight=2e83ff&bgColorError=fef1ec&bgTextureError=glass&bgImgOpacityError=95&borderColorError=cd0a0a&fcError=cd0a0a&iconColorError=cd0a0a&bgColorOverlay=aaaaaa&bgTextureOverlay=flat&bgImgOpacityOverlay=0&opacityOverlay=30&bgColorShadow=aaaaaa&bgTextureShadow=flat&bgImgOpacityShadow=0&opacityShadow=30&thicknessShadow=8px&offsetTopShadow=-8px&offsetLeftShadow=-8px&cornerRadiusShadow=8px -* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */ - -/* Layout helpers -----------------------------------*/ -.ui-helper-hidden { - display: none; -} -.ui-helper-hidden-accessible { - border: 0; - clip: rect(0 0 0 0); - height: 1px; - margin: -1px; - overflow: hidden; - padding: 0; - position: absolute; - width: 1px; -} -.ui-helper-reset { - margin: 0; - padding: 0; - border: 0; - outline: 0; - line-height: 1.3; - text-decoration: none; - font-size: 100%; - list-style: none; -} -.ui-helper-clearfix:before, -.ui-helper-clearfix:after { - content: ""; - display: table; - border-collapse: collapse; -} -.ui-helper-clearfix:after { - clear: both; -} -.ui-helper-clearfix { - min-height: 0; /* support: IE7 */ -} -.ui-helper-zfix { - width: 100%; - height: 100%; - top: 0; - left: 0; - position: absolute; - opacity: 0; - filter:Alpha(Opacity=0); /* support: IE8 */ -} - -.ui-front { - z-index: 100; -} - - -/* Interaction Cues -----------------------------------*/ -.ui-state-disabled { - cursor: default !important; -} - - -/* Icons -----------------------------------*/ - -/* states and images */ -.ui-icon { - display: block; - text-indent: -99999px; - overflow: hidden; - background-repeat: no-repeat; -} - - -/* Misc visuals -----------------------------------*/ - -/* Overlays */ -.ui-widget-overlay { - position: fixed; - top: 0; - left: 0; - width: 100%; - height: 100%; -} -.ui-accordion .ui-accordion-header { - display: block; - cursor: pointer; - position: relative; - margin: 2px 0 0 0; - padding: .5em .5em .5em .7em; - min-height: 0; /* support: IE7 */ - font-size: 100%; -} -.ui-accordion .ui-accordion-icons { - padding-left: 2.2em; -} -.ui-accordion .ui-accordion-icons .ui-accordion-icons { - padding-left: 2.2em; -} -.ui-accordion .ui-accordion-header .ui-accordion-header-icon { - position: absolute; - left: .5em; - top: 50%; - margin-top: -8px; -} -.ui-accordion .ui-accordion-content { - padding: 1em 2.2em; - border-top: 0; - overflow: auto; -} -.ui-autocomplete { - position: absolute; - top: 0; - left: 0; - cursor: default; -} -.ui-button { - display: inline-block; - position: relative; - padding: 0; - line-height: normal; - margin-right: .1em; - cursor: pointer; - vertical-align: middle; - text-align: center; - overflow: visible; /* removes extra width in IE */ -} -.ui-button, -.ui-button:link, -.ui-button:visited, -.ui-button:hover, -.ui-button:active { - text-decoration: none; -} -/* to make room for the icon, a width needs to be set here */ -.ui-button-icon-only { - width: 2.2em; -} -/* button elements seem to need a little more width */ -button.ui-button-icon-only { - width: 2.4em; -} -.ui-button-icons-only { - width: 3.4em; -} -button.ui-button-icons-only { - width: 3.7em; -} - -/* button text element */ -.ui-button .ui-button-text { - display: block; - line-height: normal; -} -.ui-button-text-only .ui-button-text { - padding: .4em 1em; -} -.ui-button-icon-only .ui-button-text, -.ui-button-icons-only .ui-button-text { - padding: .4em; - text-indent: -9999999px; -} -.ui-button-text-icon-primary .ui-button-text, -.ui-button-text-icons .ui-button-text { - padding: .4em 1em .4em 2.1em; -} -.ui-button-text-icon-secondary .ui-button-text, -.ui-button-text-icons .ui-button-text { - padding: .4em 2.1em .4em 1em; -} -.ui-button-text-icons .ui-button-text { - padding-left: 2.1em; - padding-right: 2.1em; -} -/* no icon support for input elements, provide padding by default */ -input.ui-button { - padding: .4em 1em; -} - -/* button icon element(s) */ -.ui-button-icon-only .ui-icon, -.ui-button-text-icon-primary .ui-icon, -.ui-button-text-icon-secondary .ui-icon, -.ui-button-text-icons .ui-icon, -.ui-button-icons-only .ui-icon { - position: absolute; - top: 50%; - margin-top: -8px; -} -.ui-button-icon-only .ui-icon { - left: 50%; - margin-left: -8px; -} -.ui-button-text-icon-primary .ui-button-icon-primary, -.ui-button-text-icons .ui-button-icon-primary, -.ui-button-icons-only .ui-button-icon-primary { - left: .5em; -} -.ui-button-text-icon-secondary .ui-button-icon-secondary, -.ui-button-text-icons .ui-button-icon-secondary, -.ui-button-icons-only .ui-button-icon-secondary { - right: .5em; -} - -/* button sets */ -.ui-buttonset { - margin-right: 7px; -} -.ui-buttonset .ui-button { - margin-left: 0; - margin-right: -.3em; -} - -/* workarounds */ -/* reset extra padding in Firefox, see h5bp.com/l */ -input.ui-button::-moz-focus-inner, -button.ui-button::-moz-focus-inner { - border: 0; - padding: 0; -} -.ui-datepicker { - width: 17em; - padding: .2em .2em 0; - display: none; -} -.ui-datepicker .ui-datepicker-header { - position: relative; - padding: .2em 0; -} -.ui-datepicker .ui-datepicker-prev, -.ui-datepicker .ui-datepicker-next { - position: absolute; - top: 2px; - width: 1.8em; - height: 1.8em; -} -.ui-datepicker .ui-datepicker-prev-hover, -.ui-datepicker .ui-datepicker-next-hover { - top: 1px; -} -.ui-datepicker .ui-datepicker-prev { - left: 2px; -} -.ui-datepicker .ui-datepicker-next { - right: 2px; -} -.ui-datepicker .ui-datepicker-prev-hover { - left: 1px; -} -.ui-datepicker .ui-datepicker-next-hover { - right: 1px; -} -.ui-datepicker .ui-datepicker-prev span, -.ui-datepicker .ui-datepicker-next span { - display: block; - position: absolute; - left: 50%; - margin-left: -8px; - top: 50%; - margin-top: -8px; -} -.ui-datepicker .ui-datepicker-title { - margin: 0 2.3em; - line-height: 1.8em; - text-align: center; -} -.ui-datepicker .ui-datepicker-title select { - font-size: 1em; - margin: 1px 0; -} -.ui-datepicker select.ui-datepicker-month, -.ui-datepicker select.ui-datepicker-year { - width: 45%; -} -.ui-datepicker table { - width: 100%; - font-size: .9em; - border-collapse: collapse; - margin: 0 0 .4em; -} -.ui-datepicker th { - padding: .7em .3em; - text-align: center; - font-weight: bold; - border: 0; -} -.ui-datepicker td { - border: 0; - padding: 1px; -} -.ui-datepicker td span, -.ui-datepicker td a { - display: block; - padding: .2em; - text-align: right; - text-decoration: none; -} -.ui-datepicker .ui-datepicker-buttonpane { - background-image: none; - margin: .7em 0 0 0; - padding: 0 .2em; - border-left: 0; - border-right: 0; - border-bottom: 0; -} -.ui-datepicker .ui-datepicker-buttonpane button { - float: right; - margin: .5em .2em .4em; - cursor: pointer; - padding: .2em .6em .3em .6em; - width: auto; - overflow: visible; -} -.ui-datepicker .ui-datepicker-buttonpane button.ui-datepicker-current { - float: left; -} - -/* with multiple calendars */ -.ui-datepicker.ui-datepicker-multi { - width: auto; -} -.ui-datepicker-multi .ui-datepicker-group { - float: left; -} -.ui-datepicker-multi .ui-datepicker-group table { - width: 95%; - margin: 0 auto .4em; -} -.ui-datepicker-multi-2 .ui-datepicker-group { - width: 50%; -} -.ui-datepicker-multi-3 .ui-datepicker-group { - width: 33.3%; -} -.ui-datepicker-multi-4 .ui-datepicker-group { - width: 25%; -} -.ui-datepicker-multi .ui-datepicker-group-last .ui-datepicker-header, -.ui-datepicker-multi .ui-datepicker-group-middle .ui-datepicker-header { - border-left-width: 0; -} -.ui-datepicker-multi .ui-datepicker-buttonpane { - clear: left; -} -.ui-datepicker-row-break { - clear: both; - width: 100%; - font-size: 0; -} - -/* RTL support */ -.ui-datepicker-rtl { - direction: rtl; -} -.ui-datepicker-rtl .ui-datepicker-prev { - right: 2px; - left: auto; -} -.ui-datepicker-rtl .ui-datepicker-next { - left: 2px; - right: auto; -} -.ui-datepicker-rtl .ui-datepicker-prev:hover { - right: 1px; - left: auto; -} -.ui-datepicker-rtl .ui-datepicker-next:hover { - left: 1px; - right: auto; -} -.ui-datepicker-rtl .ui-datepicker-buttonpane { - clear: right; -} -.ui-datepicker-rtl .ui-datepicker-buttonpane button { - float: left; -} -.ui-datepicker-rtl .ui-datepicker-buttonpane button.ui-datepicker-current, -.ui-datepicker-rtl .ui-datepicker-group { - float: right; -} -.ui-datepicker-rtl .ui-datepicker-group-last .ui-datepicker-header, -.ui-datepicker-rtl .ui-datepicker-group-middle .ui-datepicker-header { - border-right-width: 0; - border-left-width: 1px; -} -.ui-dialog { - overflow: hidden; - position: absolute; - top: 0; - left: 0; - padding: .2em; - outline: 0; -} -.ui-dialog .ui-dialog-titlebar { - padding: .4em 1em; - position: relative; -} -.ui-dialog .ui-dialog-title { - float: left; - margin: .1em 0; - white-space: nowrap; - width: 90%; - overflow: hidden; - text-overflow: ellipsis; -} -.ui-dialog .ui-dialog-titlebar-close { - position: absolute; - right: .3em; - top: 50%; - width: 20px; - margin: -10px 0 0 0; - padding: 1px; - height: 20px; -} -.ui-dialog .ui-dialog-content { - position: relative; - border: 0; - padding: .5em 1em; - background: none; - overflow: auto; -} -.ui-dialog .ui-dialog-buttonpane { - text-align: left; - border-width: 1px 0 0 0; - background-image: none; - margin-top: .5em; - padding: .3em 1em .5em .4em; -} -.ui-dialog .ui-dialog-buttonpane .ui-dialog-buttonset { - float: right; -} -.ui-dialog .ui-dialog-buttonpane button { - margin: .5em .4em .5em 0; - cursor: pointer; -} -.ui-dialog .ui-resizable-se { - width: 12px; - height: 12px; - right: -5px; - bottom: -5px; - background-position: 16px 16px; -} -.ui-draggable .ui-dialog-titlebar { - cursor: move; -} -.ui-draggable-handle { - -ms-touch-action: none; - touch-action: none; -} -.ui-menu { - list-style: none; - padding: 0; - margin: 0; - display: block; - outline: none; -} -.ui-menu .ui-menu { - position: absolute; -} -.ui-menu .ui-menu-item { - position: relative; - margin: 0; - padding: 3px 1em 3px .4em; - cursor: pointer; - min-height: 0; /* support: IE7 */ - /* support: IE10, see #8844 */ - list-style-image: url("data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"); -} -.ui-menu .ui-menu-divider { - margin: 5px 0; - height: 0; - font-size: 0; - line-height: 0; - border-width: 1px 0 0 0; -} -.ui-menu .ui-state-focus, -.ui-menu .ui-state-active { - margin: -1px; -} - -/* icon support */ -.ui-menu-icons { - position: relative; -} -.ui-menu-icons .ui-menu-item { - padding-left: 2em; -} - -/* left-aligned */ -.ui-menu .ui-icon { - position: absolute; - top: 0; - bottom: 0; - left: .2em; - margin: auto 0; -} - -/* right-aligned */ -.ui-menu .ui-menu-icon { - left: auto; - right: 0; -} -.ui-progressbar { - height: 2em; - text-align: left; - overflow: hidden; -} -.ui-progressbar .ui-progressbar-value { - margin: -1px; - height: 100%; -} -.ui-progressbar .ui-progressbar-overlay { - background: url("data:image/gif;base64,R0lGODlhKAAoAIABAAAAAP///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQJAQABACwAAAAAKAAoAAACkYwNqXrdC52DS06a7MFZI+4FHBCKoDeWKXqymPqGqxvJrXZbMx7Ttc+w9XgU2FB3lOyQRWET2IFGiU9m1frDVpxZZc6bfHwv4c1YXP6k1Vdy292Fb6UkuvFtXpvWSzA+HycXJHUXiGYIiMg2R6W459gnWGfHNdjIqDWVqemH2ekpObkpOlppWUqZiqr6edqqWQAAIfkECQEAAQAsAAAAACgAKAAAApSMgZnGfaqcg1E2uuzDmmHUBR8Qil95hiPKqWn3aqtLsS18y7G1SzNeowWBENtQd+T1JktP05nzPTdJZlR6vUxNWWjV+vUWhWNkWFwxl9VpZRedYcflIOLafaa28XdsH/ynlcc1uPVDZxQIR0K25+cICCmoqCe5mGhZOfeYSUh5yJcJyrkZWWpaR8doJ2o4NYq62lAAACH5BAkBAAEALAAAAAAoACgAAAKVDI4Yy22ZnINRNqosw0Bv7i1gyHUkFj7oSaWlu3ovC8GxNso5fluz3qLVhBVeT/Lz7ZTHyxL5dDalQWPVOsQWtRnuwXaFTj9jVVh8pma9JjZ4zYSj5ZOyma7uuolffh+IR5aW97cHuBUXKGKXlKjn+DiHWMcYJah4N0lYCMlJOXipGRr5qdgoSTrqWSq6WFl2ypoaUAAAIfkECQEAAQAsAAAAACgAKAAAApaEb6HLgd/iO7FNWtcFWe+ufODGjRfoiJ2akShbueb0wtI50zm02pbvwfWEMWBQ1zKGlLIhskiEPm9R6vRXxV4ZzWT2yHOGpWMyorblKlNp8HmHEb/lCXjcW7bmtXP8Xt229OVWR1fod2eWqNfHuMjXCPkIGNileOiImVmCOEmoSfn3yXlJWmoHGhqp6ilYuWYpmTqKUgAAIfkECQEAAQAsAAAAACgAKAAAApiEH6kb58biQ3FNWtMFWW3eNVcojuFGfqnZqSebuS06w5V80/X02pKe8zFwP6EFWOT1lDFk8rGERh1TTNOocQ61Hm4Xm2VexUHpzjymViHrFbiELsefVrn6XKfnt2Q9G/+Xdie499XHd2g4h7ioOGhXGJboGAnXSBnoBwKYyfioubZJ2Hn0RuRZaflZOil56Zp6iioKSXpUAAAh+QQJAQABACwAAAAAKAAoAAACkoQRqRvnxuI7kU1a1UU5bd5tnSeOZXhmn5lWK3qNTWvRdQxP8qvaC+/yaYQzXO7BMvaUEmJRd3TsiMAgswmNYrSgZdYrTX6tSHGZO73ezuAw2uxuQ+BbeZfMxsexY35+/Qe4J1inV0g4x3WHuMhIl2jXOKT2Q+VU5fgoSUI52VfZyfkJGkha6jmY+aaYdirq+lQAACH5BAkBAAEALAAAAAAoACgAAAKWBIKpYe0L3YNKToqswUlvznigd4wiR4KhZrKt9Upqip61i9E3vMvxRdHlbEFiEXfk9YARYxOZZD6VQ2pUunBmtRXo1Lf8hMVVcNl8JafV38aM2/Fu5V16Bn63r6xt97j09+MXSFi4BniGFae3hzbH9+hYBzkpuUh5aZmHuanZOZgIuvbGiNeomCnaxxap2upaCZsq+1kAACH5BAkBAAEALAAAAAAoACgAAAKXjI8By5zf4kOxTVrXNVlv1X0d8IGZGKLnNpYtm8Lr9cqVeuOSvfOW79D9aDHizNhDJidFZhNydEahOaDH6nomtJjp1tutKoNWkvA6JqfRVLHU/QUfau9l2x7G54d1fl995xcIGAdXqMfBNadoYrhH+Mg2KBlpVpbluCiXmMnZ2Sh4GBqJ+ckIOqqJ6LmKSllZmsoq6wpQAAAh+QQJAQABACwAAAAAKAAoAAAClYx/oLvoxuJDkU1a1YUZbJ59nSd2ZXhWqbRa2/gF8Gu2DY3iqs7yrq+xBYEkYvFSM8aSSObE+ZgRl1BHFZNr7pRCavZ5BW2142hY3AN/zWtsmf12p9XxxFl2lpLn1rseztfXZjdIWIf2s5dItwjYKBgo9yg5pHgzJXTEeGlZuenpyPmpGQoKOWkYmSpaSnqKileI2FAAACH5BAkBAAEALAAAAAAoACgAAAKVjB+gu+jG4kORTVrVhRlsnn2dJ3ZleFaptFrb+CXmO9OozeL5VfP99HvAWhpiUdcwkpBH3825AwYdU8xTqlLGhtCosArKMpvfa1mMRae9VvWZfeB2XfPkeLmm18lUcBj+p5dnN8jXZ3YIGEhYuOUn45aoCDkp16hl5IjYJvjWKcnoGQpqyPlpOhr3aElaqrq56Bq7VAAAOw=="); - height: 100%; - filter: alpha(opacity=25); /* support: IE8 */ - opacity: 0.25; -} -.ui-progressbar-indeterminate .ui-progressbar-value { - background-image: none; -} -.ui-resizable { - position: relative; -} -.ui-resizable-handle { - position: absolute; - font-size: 0.1px; - display: block; - -ms-touch-action: none; - touch-action: none; -} -.ui-resizable-disabled .ui-resizable-handle, -.ui-resizable-autohide .ui-resizable-handle { - display: none; -} -.ui-resizable-n { - cursor: n-resize; - height: 7px; - width: 100%; - top: -5px; - left: 0; -} -.ui-resizable-s { - cursor: s-resize; - height: 7px; - width: 100%; - bottom: -5px; - left: 0; -} -.ui-resizable-e { - cursor: e-resize; - width: 7px; - right: -5px; - top: 0; - height: 100%; -} -.ui-resizable-w { - cursor: w-resize; - width: 7px; - left: -5px; - top: 0; - height: 100%; -} -.ui-resizable-se { - cursor: se-resize; - width: 12px; - height: 12px; - right: 1px; - bottom: 1px; -} -.ui-resizable-sw { - cursor: sw-resize; - width: 9px; - height: 9px; - left: -5px; - bottom: -5px; -} -.ui-resizable-nw { - cursor: nw-resize; - width: 9px; - height: 9px; - left: -5px; - top: -5px; -} -.ui-resizable-ne { - cursor: ne-resize; - width: 9px; - height: 9px; - right: -5px; - top: -5px; -} -.ui-selectable { - -ms-touch-action: none; - touch-action: none; -} -.ui-selectable-helper { - position: absolute; - z-index: 100; - border: 1px dotted black; -} -.ui-selectmenu-menu { - padding: 0; - margin: 0; - position: absolute; - top: 0; - left: 0; - display: none; -} -.ui-selectmenu-menu .ui-menu { - overflow: auto; - /* Support: IE7 */ - overflow-x: hidden; - padding-bottom: 1px; -} -.ui-selectmenu-menu .ui-menu .ui-selectmenu-optgroup { - font-size: 1em; - font-weight: bold; - line-height: 1.5; - padding: 2px 0.4em; - margin: 0.5em 0 0 0; - height: auto; - border: 0; -} -.ui-selectmenu-open { - display: block; -} -.ui-selectmenu-button { - display: inline-block; - overflow: hidden; - position: relative; - text-decoration: none; - cursor: pointer; -} -.ui-selectmenu-button span.ui-icon { - right: 0.5em; - left: auto; - margin-top: -8px; - position: absolute; - top: 50%; -} -.ui-selectmenu-button span.ui-selectmenu-text { - text-align: left; - padding: 0.4em 2.1em 0.4em 1em; - display: block; - line-height: 1.4; - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; -} -.ui-slider { - position: relative; - text-align: left; -} -.ui-slider .ui-slider-handle { - position: absolute; - z-index: 2; - width: 1.2em; - height: 1.2em; - cursor: default; - -ms-touch-action: none; - touch-action: none; -} -.ui-slider .ui-slider-range { - position: absolute; - z-index: 1; - font-size: .7em; - display: block; - border: 0; - background-position: 0 0; -} - -/* support: IE8 - See #6727 */ -.ui-slider.ui-state-disabled .ui-slider-handle, -.ui-slider.ui-state-disabled .ui-slider-range { - filter: inherit; -} - -.ui-slider-horizontal { - height: .8em; -} -.ui-slider-horizontal .ui-slider-handle { - top: -.3em; - margin-left: -.6em; -} -.ui-slider-horizontal .ui-slider-range { - top: 0; - height: 100%; -} -.ui-slider-horizontal .ui-slider-range-min { - left: 0; -} -.ui-slider-horizontal .ui-slider-range-max { - right: 0; -} - -.ui-slider-vertical { - width: .8em; - height: 100px; -} -.ui-slider-vertical .ui-slider-handle { - left: -.3em; - margin-left: 0; - margin-bottom: -.6em; -} -.ui-slider-vertical .ui-slider-range { - left: 0; - width: 100%; -} -.ui-slider-vertical .ui-slider-range-min { - bottom: 0; -} -.ui-slider-vertical .ui-slider-range-max { - top: 0; -} -.ui-sortable-handle { - -ms-touch-action: none; - touch-action: none; -} -.ui-spinner { - position: relative; - display: inline-block; - overflow: hidden; - padding: 0; - vertical-align: middle; -} -.ui-spinner-input { - border: none; - background: none; - color: inherit; - padding: 0; - margin: .2em 0; - vertical-align: middle; - margin-left: .4em; - margin-right: 22px; -} -.ui-spinner-button { - width: 16px; - height: 50%; - font-size: .5em; - padding: 0; - margin: 0; - text-align: center; - position: absolute; - cursor: default; - display: block; - overflow: hidden; - right: 0; -} -/* more specificity required here to override default borders */ -.ui-spinner a.ui-spinner-button { - border-top: none; - border-bottom: none; - border-right: none; -} -/* vertically center icon */ -.ui-spinner .ui-icon { - position: absolute; - margin-top: -8px; - top: 50%; - left: 0; -} -.ui-spinner-up { - top: 0; -} -.ui-spinner-down { - bottom: 0; -} - -/* TR overrides */ -.ui-spinner .ui-icon-triangle-1-s { - /* need to fix icons sprite */ - background-position: -65px -16px; -} -.ui-tabs { - position: relative;/* position: relative prevents IE scroll bug (element with position: relative inside container with overflow: auto appear as "fixed") */ - padding: .2em; -} -.ui-tabs .ui-tabs-nav { - margin: 0; - padding: .2em .2em 0; -} -.ui-tabs .ui-tabs-nav li { - list-style: none; - float: left; - position: relative; - top: 0; - margin: 1px .2em 0 0; - border-bottom-width: 0; - padding: 0; - white-space: nowrap; -} -.ui-tabs .ui-tabs-nav .ui-tabs-anchor { - float: left; - padding: .5em 1em; - text-decoration: none; -} -.ui-tabs .ui-tabs-nav li.ui-tabs-active { - margin-bottom: -1px; - padding-bottom: 1px; -} -.ui-tabs .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor, -.ui-tabs .ui-tabs-nav li.ui-state-disabled .ui-tabs-anchor, -.ui-tabs .ui-tabs-nav li.ui-tabs-loading .ui-tabs-anchor { - cursor: text; -} -.ui-tabs-collapsible .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor { - cursor: pointer; -} -.ui-tabs .ui-tabs-panel { - display: block; - border-width: 0; - padding: 1em 1.4em; - background: none; -} -.ui-tooltip { - padding: 8px; - position: absolute; - z-index: 9999; - max-width: 300px; - -webkit-box-shadow: 0 0 5px #aaa; - box-shadow: 0 0 5px #aaa; -} -body .ui-tooltip { - border-width: 2px; -} - -/* Component containers -----------------------------------*/ -.ui-widget { - font-family: Verdana,Arial,sans-serif; - font-size: 1.1em; -} -.ui-widget .ui-widget { - font-size: 1em; -} -.ui-widget input, -.ui-widget select, -.ui-widget textarea, -.ui-widget button { - font-family: Verdana,Arial,sans-serif; - font-size: 1em; -} -.ui-widget-content { - border: 1px solid #aaaaaa; - background: #ffffff url("images/ui-bg_flat_75_ffffff_40x100.png") 50% 50% repeat-x; - color: #222222; -} -.ui-widget-content a { - color: #222222; -} -.ui-widget-header { - border: 1px solid #aaaaaa; - background: #cccccc url("images/ui-bg_highlight-soft_75_cccccc_1x100.png") 50% 50% repeat-x; - color: #222222; - font-weight: bold; -} -.ui-widget-header a { - color: #222222; -} - -/* Interaction states -----------------------------------*/ -.ui-state-default, -.ui-widget-content .ui-state-default, -.ui-widget-header .ui-state-default { - border: 1px solid #d3d3d3; - background: #e6e6e6 url("images/ui-bg_glass_75_e6e6e6_1x400.png") 50% 50% repeat-x; - font-weight: normal; - color: #555555; -} -.ui-state-default a, -.ui-state-default a:link, -.ui-state-default a:visited { - color: #555555; - text-decoration: none; -} -.ui-state-hover, -.ui-widget-content .ui-state-hover, -.ui-widget-header .ui-state-hover, -.ui-state-focus, -.ui-widget-content .ui-state-focus, -.ui-widget-header .ui-state-focus { - border: 1px solid #999999; - background: #dadada url("images/ui-bg_glass_75_dadada_1x400.png") 50% 50% repeat-x; - font-weight: normal; - color: #212121; -} -.ui-state-hover a, -.ui-state-hover a:hover, -.ui-state-hover a:link, -.ui-state-hover a:visited, -.ui-state-focus a, -.ui-state-focus a:hover, -.ui-state-focus a:link, -.ui-state-focus a:visited { - color: #212121; - text-decoration: none; -} -.ui-state-active, -.ui-widget-content .ui-state-active, -.ui-widget-header .ui-state-active { - border: 1px solid #aaaaaa; - background: #ffffff url("images/ui-bg_glass_65_ffffff_1x400.png") 50% 50% repeat-x; - font-weight: normal; - color: #212121; -} -.ui-state-active a, -.ui-state-active a:link, -.ui-state-active a:visited { - color: #212121; - text-decoration: none; -} - -/* Interaction Cues -----------------------------------*/ -.ui-state-highlight, -.ui-widget-content .ui-state-highlight, -.ui-widget-header .ui-state-highlight { - border: 1px solid #fcefa1; - background: #fbf9ee url("images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; - color: #363636; -} -.ui-state-highlight a, -.ui-widget-content .ui-state-highlight a, -.ui-widget-header .ui-state-highlight a { - color: #363636; -} -.ui-state-error, -.ui-widget-content .ui-state-error, -.ui-widget-header .ui-state-error { - border: 1px solid #cd0a0a; - background: #fef1ec url("images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; - color: #cd0a0a; -} -.ui-state-error a, -.ui-widget-content .ui-state-error a, -.ui-widget-header .ui-state-error a { - color: #cd0a0a; -} -.ui-state-error-text, -.ui-widget-content .ui-state-error-text, -.ui-widget-header .ui-state-error-text { - color: #cd0a0a; -} -.ui-priority-primary, -.ui-widget-content .ui-priority-primary, -.ui-widget-header .ui-priority-primary { - font-weight: bold; -} -.ui-priority-secondary, -.ui-widget-content .ui-priority-secondary, -.ui-widget-header .ui-priority-secondary { - opacity: .7; - filter:Alpha(Opacity=70); /* support: IE8 */ - font-weight: normal; -} -.ui-state-disabled, -.ui-widget-content .ui-state-disabled, -.ui-widget-header .ui-state-disabled { - opacity: .35; - filter:Alpha(Opacity=35); /* support: IE8 */ - background-image: none; -} -.ui-state-disabled .ui-icon { - filter:Alpha(Opacity=35); /* support: IE8 - See #6059 */ -} - -/* Icons -----------------------------------*/ - -/* states and images */ -.ui-icon { - width: 16px; - height: 16px; -} -.ui-icon, -.ui-widget-content .ui-icon { - background-image: url("images/ui-icons_222222_256x240.png"); -} -.ui-widget-header .ui-icon { - background-image: url("images/ui-icons_222222_256x240.png"); -} -.ui-state-default .ui-icon { - background-image: url("images/ui-icons_888888_256x240.png"); -} -.ui-state-hover .ui-icon, -.ui-state-focus .ui-icon { - background-image: url("images/ui-icons_454545_256x240.png"); -} -.ui-state-active .ui-icon { - background-image: url("images/ui-icons_454545_256x240.png"); -} -.ui-state-highlight .ui-icon { - background-image: url("images/ui-icons_2e83ff_256x240.png"); -} -.ui-state-error .ui-icon, -.ui-state-error-text .ui-icon { - background-image: url("images/ui-icons_cd0a0a_256x240.png"); -} - -/* positioning */ -.ui-icon-blank { background-position: 16px 16px; } -.ui-icon-carat-1-n { background-position: 0 0; } -.ui-icon-carat-1-ne { background-position: -16px 0; } -.ui-icon-carat-1-e { background-position: -32px 0; } -.ui-icon-carat-1-se { background-position: -48px 0; } -.ui-icon-carat-1-s { background-position: -64px 0; } -.ui-icon-carat-1-sw { background-position: -80px 0; } -.ui-icon-carat-1-w { background-position: -96px 0; } -.ui-icon-carat-1-nw { background-position: -112px 0; } -.ui-icon-carat-2-n-s { background-position: -128px 0; } -.ui-icon-carat-2-e-w { background-position: -144px 0; } -.ui-icon-triangle-1-n { background-position: 0 -16px; } -.ui-icon-triangle-1-ne { background-position: -16px -16px; } -.ui-icon-triangle-1-e { background-position: -32px -16px; } -.ui-icon-triangle-1-se { background-position: -48px -16px; } -.ui-icon-triangle-1-s { background-position: -64px -16px; } -.ui-icon-triangle-1-sw { background-position: -80px -16px; } -.ui-icon-triangle-1-w { background-position: -96px -16px; } -.ui-icon-triangle-1-nw { background-position: -112px -16px; } -.ui-icon-triangle-2-n-s { background-position: -128px -16px; } -.ui-icon-triangle-2-e-w { background-position: -144px -16px; } -.ui-icon-arrow-1-n { background-position: 0 -32px; } -.ui-icon-arrow-1-ne { background-position: -16px -32px; } -.ui-icon-arrow-1-e { background-position: -32px -32px; } -.ui-icon-arrow-1-se { background-position: -48px -32px; } -.ui-icon-arrow-1-s { background-position: -64px -32px; } -.ui-icon-arrow-1-sw { background-position: -80px -32px; } -.ui-icon-arrow-1-w { background-position: -96px -32px; } -.ui-icon-arrow-1-nw { background-position: -112px -32px; } -.ui-icon-arrow-2-n-s { background-position: -128px -32px; } -.ui-icon-arrow-2-ne-sw { background-position: -144px -32px; } -.ui-icon-arrow-2-e-w { background-position: -160px -32px; } -.ui-icon-arrow-2-se-nw { background-position: -176px -32px; } -.ui-icon-arrowstop-1-n { background-position: -192px -32px; } -.ui-icon-arrowstop-1-e { background-position: -208px -32px; } -.ui-icon-arrowstop-1-s { background-position: -224px -32px; } -.ui-icon-arrowstop-1-w { background-position: -240px -32px; } -.ui-icon-arrowthick-1-n { background-position: 0 -48px; } -.ui-icon-arrowthick-1-ne { background-position: -16px -48px; } -.ui-icon-arrowthick-1-e { background-position: -32px -48px; } -.ui-icon-arrowthick-1-se { background-position: -48px -48px; } -.ui-icon-arrowthick-1-s { background-position: -64px -48px; } -.ui-icon-arrowthick-1-sw { background-position: -80px -48px; } -.ui-icon-arrowthick-1-w { background-position: -96px -48px; } -.ui-icon-arrowthick-1-nw { background-position: -112px -48px; } -.ui-icon-arrowthick-2-n-s { background-position: -128px -48px; } -.ui-icon-arrowthick-2-ne-sw { background-position: -144px -48px; } -.ui-icon-arrowthick-2-e-w { background-position: -160px -48px; } -.ui-icon-arrowthick-2-se-nw { background-position: -176px -48px; } -.ui-icon-arrowthickstop-1-n { background-position: -192px -48px; } -.ui-icon-arrowthickstop-1-e { background-position: -208px -48px; } -.ui-icon-arrowthickstop-1-s { background-position: -224px -48px; } -.ui-icon-arrowthickstop-1-w { background-position: -240px -48px; } -.ui-icon-arrowreturnthick-1-w { background-position: 0 -64px; } -.ui-icon-arrowreturnthick-1-n { background-position: -16px -64px; } -.ui-icon-arrowreturnthick-1-e { background-position: -32px -64px; } -.ui-icon-arrowreturnthick-1-s { background-position: -48px -64px; } -.ui-icon-arrowreturn-1-w { background-position: -64px -64px; } -.ui-icon-arrowreturn-1-n { background-position: -80px -64px; } -.ui-icon-arrowreturn-1-e { background-position: -96px -64px; } -.ui-icon-arrowreturn-1-s { background-position: -112px -64px; } -.ui-icon-arrowrefresh-1-w { background-position: -128px -64px; } -.ui-icon-arrowrefresh-1-n { background-position: -144px -64px; } -.ui-icon-arrowrefresh-1-e { background-position: -160px -64px; } -.ui-icon-arrowrefresh-1-s { background-position: -176px -64px; } -.ui-icon-arrow-4 { background-position: 0 -80px; } -.ui-icon-arrow-4-diag { background-position: -16px -80px; } -.ui-icon-extlink { background-position: -32px -80px; } -.ui-icon-newwin { background-position: -48px -80px; } -.ui-icon-refresh { background-position: -64px -80px; } -.ui-icon-shuffle { background-position: -80px -80px; } -.ui-icon-transfer-e-w { background-position: -96px -80px; } -.ui-icon-transferthick-e-w { background-position: -112px -80px; } -.ui-icon-folder-collapsed { background-position: 0 -96px; } -.ui-icon-folder-open { background-position: -16px -96px; } -.ui-icon-document { background-position: -32px -96px; } -.ui-icon-document-b { background-position: -48px -96px; } -.ui-icon-note { background-position: -64px -96px; } -.ui-icon-mail-closed { background-position: -80px -96px; } -.ui-icon-mail-open { background-position: -96px -96px; } -.ui-icon-suitcase { background-position: -112px -96px; } -.ui-icon-comment { background-position: -128px -96px; } -.ui-icon-person { background-position: -144px -96px; } -.ui-icon-print { background-position: -160px -96px; } -.ui-icon-trash { background-position: -176px -96px; } -.ui-icon-locked { background-position: -192px -96px; } -.ui-icon-unlocked { background-position: -208px -96px; } -.ui-icon-bookmark { background-position: -224px -96px; } -.ui-icon-tag { background-position: -240px -96px; } -.ui-icon-home { background-position: 0 -112px; } -.ui-icon-flag { background-position: -16px -112px; } -.ui-icon-calendar { background-position: -32px -112px; } -.ui-icon-cart { background-position: -48px -112px; } -.ui-icon-pencil { background-position: -64px -112px; } -.ui-icon-clock { background-position: -80px -112px; } -.ui-icon-disk { background-position: -96px -112px; } -.ui-icon-calculator { background-position: -112px -112px; } -.ui-icon-zoomin { background-position: -128px -112px; } -.ui-icon-zoomout { background-position: -144px -112px; } -.ui-icon-search { background-position: -160px -112px; } -.ui-icon-wrench { background-position: -176px -112px; } -.ui-icon-gear { background-position: -192px -112px; } -.ui-icon-heart { background-position: -208px -112px; } -.ui-icon-star { background-position: -224px -112px; } -.ui-icon-link { background-position: -240px -112px; } -.ui-icon-cancel { background-position: 0 -128px; } -.ui-icon-plus { background-position: -16px -128px; } -.ui-icon-plusthick { background-position: -32px -128px; } -.ui-icon-minus { background-position: -48px -128px; } -.ui-icon-minusthick { background-position: -64px -128px; } -.ui-icon-close { background-position: -80px -128px; } -.ui-icon-closethick { background-position: -96px -128px; } -.ui-icon-key { background-position: -112px -128px; } -.ui-icon-lightbulb { background-position: -128px -128px; } -.ui-icon-scissors { background-position: -144px -128px; } -.ui-icon-clipboard { background-position: -160px -128px; } -.ui-icon-copy { background-position: -176px -128px; } -.ui-icon-contact { background-position: -192px -128px; } -.ui-icon-image { background-position: -208px -128px; } -.ui-icon-video { background-position: -224px -128px; } -.ui-icon-script { background-position: -240px -128px; } -.ui-icon-alert { background-position: 0 -144px; } -.ui-icon-info { background-position: -16px -144px; } -.ui-icon-notice { background-position: -32px -144px; } -.ui-icon-help { background-position: -48px -144px; } -.ui-icon-check { background-position: -64px -144px; } -.ui-icon-bullet { background-position: -80px -144px; } -.ui-icon-radio-on { background-position: -96px -144px; } -.ui-icon-radio-off { background-position: -112px -144px; } -.ui-icon-pin-w { background-position: -128px -144px; } -.ui-icon-pin-s { background-position: -144px -144px; } -.ui-icon-play { background-position: 0 -160px; } -.ui-icon-pause { background-position: -16px -160px; } -.ui-icon-seek-next { background-position: -32px -160px; } -.ui-icon-seek-prev { background-position: -48px -160px; } -.ui-icon-seek-end { background-position: -64px -160px; } -.ui-icon-seek-start { background-position: -80px -160px; } -/* ui-icon-seek-first is deprecated, use ui-icon-seek-start instead */ -.ui-icon-seek-first { background-position: -80px -160px; } -.ui-icon-stop { background-position: -96px -160px; } -.ui-icon-eject { background-position: -112px -160px; } -.ui-icon-volume-off { background-position: -128px -160px; } -.ui-icon-volume-on { background-position: -144px -160px; } -.ui-icon-power { background-position: 0 -176px; } -.ui-icon-signal-diag { background-position: -16px -176px; } -.ui-icon-signal { background-position: -32px -176px; } -.ui-icon-battery-0 { background-position: -48px -176px; } -.ui-icon-battery-1 { background-position: -64px -176px; } -.ui-icon-battery-2 { background-position: -80px -176px; } -.ui-icon-battery-3 { background-position: -96px -176px; } -.ui-icon-circle-plus { background-position: 0 -192px; } -.ui-icon-circle-minus { background-position: -16px -192px; } -.ui-icon-circle-close { background-position: -32px -192px; } -.ui-icon-circle-triangle-e { background-position: -48px -192px; } -.ui-icon-circle-triangle-s { background-position: -64px -192px; } -.ui-icon-circle-triangle-w { background-position: -80px -192px; } -.ui-icon-circle-triangle-n { background-position: -96px -192px; } -.ui-icon-circle-arrow-e { background-position: -112px -192px; } -.ui-icon-circle-arrow-s { background-position: -128px -192px; } -.ui-icon-circle-arrow-w { background-position: -144px -192px; } -.ui-icon-circle-arrow-n { background-position: -160px -192px; } -.ui-icon-circle-zoomin { background-position: -176px -192px; } -.ui-icon-circle-zoomout { background-position: -192px -192px; } -.ui-icon-circle-check { background-position: -208px -192px; } -.ui-icon-circlesmall-plus { background-position: 0 -208px; } -.ui-icon-circlesmall-minus { background-position: -16px -208px; } -.ui-icon-circlesmall-close { background-position: -32px -208px; } -.ui-icon-squaresmall-plus { background-position: -48px -208px; } -.ui-icon-squaresmall-minus { background-position: -64px -208px; } -.ui-icon-squaresmall-close { background-position: -80px -208px; } -.ui-icon-grip-dotted-vertical { background-position: 0 -224px; } -.ui-icon-grip-dotted-horizontal { background-position: -16px -224px; } -.ui-icon-grip-solid-vertical { background-position: -32px -224px; } -.ui-icon-grip-solid-horizontal { background-position: -48px -224px; } -.ui-icon-gripsmall-diagonal-se { background-position: -64px -224px; } -.ui-icon-grip-diagonal-se { background-position: -80px -224px; } - - -/* Misc visuals -----------------------------------*/ - -/* Corner radius */ -.ui-corner-all, -.ui-corner-top, -.ui-corner-left, -.ui-corner-tl { - border-top-left-radius: 4px; -} -.ui-corner-all, -.ui-corner-top, -.ui-corner-right, -.ui-corner-tr { - border-top-right-radius: 4px; -} -.ui-corner-all, -.ui-corner-bottom, -.ui-corner-left, -.ui-corner-bl { - border-bottom-left-radius: 4px; -} -.ui-corner-all, -.ui-corner-bottom, -.ui-corner-right, -.ui-corner-br { - border-bottom-right-radius: 4px; -} - -/* Overlays */ -.ui-widget-overlay { - background: #aaaaaa url("images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; - opacity: .3; - filter: Alpha(Opacity=30); /* support: IE8 */ -} -.ui-widget-shadow { - margin: -8px 0 0 -8px; - padding: 8px; - background: #aaaaaa url("images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; - opacity: .3; - filter: Alpha(Opacity=30); /* support: IE8 */ - border-radius: 8px; -} +/*! jQuery UI - v1.13.2 - 2022-07-14 +* http://jqueryui.com +* Includes: core.css, accordion.css, autocomplete.css, menu.css, button.css, controlgroup.css, checkboxradio.css, datepicker.css, dialog.css, draggable.css, resizable.css, progressbar.css, selectable.css, selectmenu.css, slider.css, sortable.css, spinner.css, tabs.css, tooltip.css, theme.css +* To view and modify this theme, visit http://jqueryui.com/themeroller/?bgShadowXPos=&bgOverlayXPos=&bgErrorXPos=&bgHighlightXPos=&bgContentXPos=&bgHeaderXPos=&bgActiveXPos=&bgHoverXPos=&bgDefaultXPos=&bgShadowYPos=&bgOverlayYPos=&bgErrorYPos=&bgHighlightYPos=&bgContentYPos=&bgHeaderYPos=&bgActiveYPos=&bgHoverYPos=&bgDefaultYPos=&bgShadowRepeat=&bgOverlayRepeat=&bgErrorRepeat=&bgHighlightRepeat=&bgContentRepeat=&bgHeaderRepeat=&bgActiveRepeat=&bgHoverRepeat=&bgDefaultRepeat=&iconsHover=url(%22images%2Fui-icons_555555_256x240.png%22)&iconsHighlight=url(%22images%2Fui-icons_777620_256x240.png%22)&iconsHeader=url(%22images%2Fui-icons_444444_256x240.png%22)&iconsError=url(%22images%2Fui-icons_cc0000_256x240.png%22)&iconsDefault=url(%22images%2Fui-icons_777777_256x240.png%22)&iconsContent=url(%22images%2Fui-icons_444444_256x240.png%22)&iconsActive=url(%22images%2Fui-icons_ffffff_256x240.png%22)&bgImgUrlShadow=&bgImgUrlOverlay=&bgImgUrlHover=&bgImgUrlHighlight=&bgImgUrlHeader=&bgImgUrlError=&bgImgUrlDefault=&bgImgUrlContent=&bgImgUrlActive=&opacityFilterShadow=Alpha(Opacity%3D30)&opacityFilterOverlay=Alpha(Opacity%3D30)&opacityShadowPerc=30&opacityOverlayPerc=30&iconColorHover=%23555555&iconColorHighlight=%23777620&iconColorHeader=%23444444&iconColorError=%23cc0000&iconColorDefault=%23777777&iconColorContent=%23444444&iconColorActive=%23ffffff&bgImgOpacityShadow=0&bgImgOpacityOverlay=0&bgImgOpacityError=95&bgImgOpacityHighlight=55&bgImgOpacityContent=75&bgImgOpacityHeader=75&bgImgOpacityActive=65&bgImgOpacityHover=75&bgImgOpacityDefault=75&bgTextureShadow=flat&bgTextureOverlay=flat&bgTextureError=flat&bgTextureHighlight=flat&bgTextureContent=flat&bgTextureHeader=flat&bgTextureActive=flat&bgTextureHover=flat&bgTextureDefault=flat&cornerRadius=3px&fwDefault=normal&ffDefault=Arial%2CHelvetica%2Csans-serif&fsDefault=1em&cornerRadiusShadow=8px&thicknessShadow=5px&offsetLeftShadow=0px&offsetTopShadow=0px&opacityShadow=.3&bgColorShadow=%23666666&opacityOverlay=.3&bgColorOverlay=%23aaaaaa&fcError=%235f3f3f&borderColorError=%23f1a899&bgColorError=%23fddfdf&fcHighlight=%23777620&borderColorHighlight=%23dad55e&bgColorHighlight=%23fffa90&fcContent=%23333333&borderColorContent=%23dddddd&bgColorContent=%23ffffff&fcHeader=%23333333&borderColorHeader=%23dddddd&bgColorHeader=%23e9e9e9&fcActive=%23ffffff&borderColorActive=%23003eff&bgColorActive=%23007fff&fcHover=%232b2b2b&borderColorHover=%23cccccc&bgColorHover=%23ededed&fcDefault=%23454545&borderColorDefault=%23c5c5c5&bgColorDefault=%23f6f6f6 +* Copyright jQuery Foundation and other contributors; Licensed MIT */ + +/* Layout helpers +----------------------------------*/ +.ui-helper-hidden { + display: none; +} +.ui-helper-hidden-accessible { + border: 0; + clip: rect(0 0 0 0); + height: 1px; + margin: -1px; + overflow: hidden; + padding: 0; + position: absolute; + width: 1px; +} +.ui-helper-reset { + margin: 0; + padding: 0; + border: 0; + outline: 0; + line-height: 1.3; + text-decoration: none; + font-size: 100%; + list-style: none; +} +.ui-helper-clearfix:before, +.ui-helper-clearfix:after { + content: ""; + display: table; + border-collapse: collapse; +} +.ui-helper-clearfix:after { + clear: both; +} +.ui-helper-zfix { + width: 100%; + height: 100%; + top: 0; + left: 0; + position: absolute; + opacity: 0; + -ms-filter: "alpha(opacity=0)"; /* support: IE8 */ +} + +.ui-front { + z-index: 100; +} + + +/* Interaction Cues +----------------------------------*/ +.ui-state-disabled { + cursor: default !important; + pointer-events: none; +} + + +/* Icons +----------------------------------*/ +.ui-icon { + display: inline-block; + vertical-align: middle; + margin-top: -.25em; + position: relative; + text-indent: -99999px; + overflow: hidden; + background-repeat: no-repeat; +} + +.ui-widget-icon-block { + left: 50%; + margin-left: -8px; + display: block; +} + +/* Misc visuals +----------------------------------*/ + +/* Overlays */ +.ui-widget-overlay { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; +} +.ui-accordion .ui-accordion-header { + display: block; + cursor: pointer; + position: relative; + margin: 2px 0 0 0; + padding: .5em .5em .5em .7em; + font-size: 100%; +} +.ui-accordion .ui-accordion-content { + padding: 1em 2.2em; + border-top: 0; + overflow: auto; +} +.ui-autocomplete { + position: absolute; + top: 0; + left: 0; + cursor: default; +} +.ui-menu { + list-style: none; + padding: 0; + margin: 0; + display: block; + outline: 0; +} +.ui-menu .ui-menu { + position: absolute; +} +.ui-menu .ui-menu-item { + margin: 0; + cursor: pointer; + /* support: IE10, see #8844 */ + list-style-image: url("data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"); +} +.ui-menu .ui-menu-item-wrapper { + position: relative; + padding: 3px 1em 3px .4em; +} +.ui-menu .ui-menu-divider { + margin: 5px 0; + height: 0; + font-size: 0; + line-height: 0; + border-width: 1px 0 0 0; +} +.ui-menu .ui-state-focus, +.ui-menu .ui-state-active { + margin: -1px; +} + +/* icon support */ +.ui-menu-icons { + position: relative; +} +.ui-menu-icons .ui-menu-item-wrapper { + padding-left: 2em; +} + +/* left-aligned */ +.ui-menu .ui-icon { + position: absolute; + top: 0; + bottom: 0; + left: .2em; + margin: auto 0; +} + +/* right-aligned */ +.ui-menu .ui-menu-icon { + left: auto; + right: 0; +} +.ui-button { + padding: .4em 1em; + display: inline-block; + position: relative; + line-height: normal; + margin-right: .1em; + cursor: pointer; + vertical-align: middle; + text-align: center; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + + /* Support: IE <= 11 */ + overflow: visible; +} + +.ui-button, +.ui-button:link, +.ui-button:visited, +.ui-button:hover, +.ui-button:active { + text-decoration: none; +} + +/* to make room for the icon, a width needs to be set here */ +.ui-button-icon-only { + width: 2em; + box-sizing: border-box; + text-indent: -9999px; + white-space: nowrap; +} + +/* no icon support for input elements */ +input.ui-button.ui-button-icon-only { + text-indent: 0; +} + +/* button icon element(s) */ +.ui-button-icon-only .ui-icon { + position: absolute; + top: 50%; + left: 50%; + margin-top: -8px; + margin-left: -8px; +} + +.ui-button.ui-icon-notext .ui-icon { + padding: 0; + width: 2.1em; + height: 2.1em; + text-indent: -9999px; + white-space: nowrap; + +} + +input.ui-button.ui-icon-notext .ui-icon { + width: auto; + height: auto; + text-indent: 0; + white-space: normal; + padding: .4em 1em; +} + +/* workarounds */ +/* Support: Firefox 5 - 40 */ +input.ui-button::-moz-focus-inner, +button.ui-button::-moz-focus-inner { + border: 0; + padding: 0; +} +.ui-controlgroup { + vertical-align: middle; + display: inline-block; +} +.ui-controlgroup > .ui-controlgroup-item { + float: left; + margin-left: 0; + margin-right: 0; +} +.ui-controlgroup > .ui-controlgroup-item:focus, +.ui-controlgroup > .ui-controlgroup-item.ui-visual-focus { + z-index: 9999; +} +.ui-controlgroup-vertical > .ui-controlgroup-item { + display: block; + float: none; + width: 100%; + margin-top: 0; + margin-bottom: 0; + text-align: left; +} +.ui-controlgroup-vertical .ui-controlgroup-item { + box-sizing: border-box; +} +.ui-controlgroup .ui-controlgroup-label { + padding: .4em 1em; +} +.ui-controlgroup .ui-controlgroup-label span { + font-size: 80%; +} +.ui-controlgroup-horizontal .ui-controlgroup-label + .ui-controlgroup-item { + border-left: none; +} +.ui-controlgroup-vertical .ui-controlgroup-label + .ui-controlgroup-item { + border-top: none; +} +.ui-controlgroup-horizontal .ui-controlgroup-label.ui-widget-content { + border-right: none; +} +.ui-controlgroup-vertical .ui-controlgroup-label.ui-widget-content { + border-bottom: none; +} + +/* Spinner specific style fixes */ +.ui-controlgroup-vertical .ui-spinner-input { + + /* Support: IE8 only, Android < 4.4 only */ + width: 75%; + width: calc( 100% - 2.4em ); +} +.ui-controlgroup-vertical .ui-spinner .ui-spinner-up { + border-top-style: solid; +} + +.ui-checkboxradio-label .ui-icon-background { + box-shadow: inset 1px 1px 1px #ccc; + border-radius: .12em; + border: none; +} +.ui-checkboxradio-radio-label .ui-icon-background { + width: 16px; + height: 16px; + border-radius: 1em; + overflow: visible; + border: none; +} +.ui-checkboxradio-radio-label.ui-checkboxradio-checked .ui-icon, +.ui-checkboxradio-radio-label.ui-checkboxradio-checked:hover .ui-icon { + background-image: none; + width: 8px; + height: 8px; + border-width: 4px; + border-style: solid; +} +.ui-checkboxradio-disabled { + pointer-events: none; +} +.ui-datepicker { + width: 17em; + padding: .2em .2em 0; + display: none; +} +.ui-datepicker .ui-datepicker-header { + position: relative; + padding: .2em 0; +} +.ui-datepicker .ui-datepicker-prev, +.ui-datepicker .ui-datepicker-next { + position: absolute; + top: 2px; + width: 1.8em; + height: 1.8em; +} +.ui-datepicker .ui-datepicker-prev-hover, +.ui-datepicker .ui-datepicker-next-hover { + top: 1px; +} +.ui-datepicker .ui-datepicker-prev { + left: 2px; +} +.ui-datepicker .ui-datepicker-next { + right: 2px; +} +.ui-datepicker .ui-datepicker-prev-hover { + left: 1px; +} +.ui-datepicker .ui-datepicker-next-hover { + right: 1px; +} +.ui-datepicker .ui-datepicker-prev span, +.ui-datepicker .ui-datepicker-next span { + display: block; + position: absolute; + left: 50%; + margin-left: -8px; + top: 50%; + margin-top: -8px; +} +.ui-datepicker .ui-datepicker-title { + margin: 0 2.3em; + line-height: 1.8em; + text-align: center; +} +.ui-datepicker .ui-datepicker-title select { + font-size: 1em; + margin: 1px 0; +} +.ui-datepicker select.ui-datepicker-month, +.ui-datepicker select.ui-datepicker-year { + width: 45%; +} +.ui-datepicker table { + width: 100%; + font-size: .9em; + border-collapse: collapse; + margin: 0 0 .4em; +} +.ui-datepicker th { + padding: .7em .3em; + text-align: center; + font-weight: bold; + border: 0; +} +.ui-datepicker td { + border: 0; + padding: 1px; +} +.ui-datepicker td span, +.ui-datepicker td a { + display: block; + padding: .2em; + text-align: right; + text-decoration: none; +} +.ui-datepicker .ui-datepicker-buttonpane { + background-image: none; + margin: .7em 0 0 0; + padding: 0 .2em; + border-left: 0; + border-right: 0; + border-bottom: 0; +} +.ui-datepicker .ui-datepicker-buttonpane button { + float: right; + margin: .5em .2em .4em; + cursor: pointer; + padding: .2em .6em .3em .6em; + width: auto; + overflow: visible; +} +.ui-datepicker .ui-datepicker-buttonpane button.ui-datepicker-current { + float: left; +} + +/* with multiple calendars */ +.ui-datepicker.ui-datepicker-multi { + width: auto; +} +.ui-datepicker-multi .ui-datepicker-group { + float: left; +} +.ui-datepicker-multi .ui-datepicker-group table { + width: 95%; + margin: 0 auto .4em; +} +.ui-datepicker-multi-2 .ui-datepicker-group { + width: 50%; +} +.ui-datepicker-multi-3 .ui-datepicker-group { + width: 33.3%; +} +.ui-datepicker-multi-4 .ui-datepicker-group { + width: 25%; +} +.ui-datepicker-multi .ui-datepicker-group-last .ui-datepicker-header, +.ui-datepicker-multi .ui-datepicker-group-middle .ui-datepicker-header { + border-left-width: 0; +} +.ui-datepicker-multi .ui-datepicker-buttonpane { + clear: left; +} +.ui-datepicker-row-break { + clear: both; + width: 100%; + font-size: 0; +} + +/* RTL support */ +.ui-datepicker-rtl { + direction: rtl; +} +.ui-datepicker-rtl .ui-datepicker-prev { + right: 2px; + left: auto; +} +.ui-datepicker-rtl .ui-datepicker-next { + left: 2px; + right: auto; +} +.ui-datepicker-rtl .ui-datepicker-prev:hover { + right: 1px; + left: auto; +} +.ui-datepicker-rtl .ui-datepicker-next:hover { + left: 1px; + right: auto; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane { + clear: right; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane button { + float: left; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane button.ui-datepicker-current, +.ui-datepicker-rtl .ui-datepicker-group { + float: right; +} +.ui-datepicker-rtl .ui-datepicker-group-last .ui-datepicker-header, +.ui-datepicker-rtl .ui-datepicker-group-middle .ui-datepicker-header { + border-right-width: 0; + border-left-width: 1px; +} + +/* Icons */ +.ui-datepicker .ui-icon { + display: block; + text-indent: -99999px; + overflow: hidden; + background-repeat: no-repeat; + left: .5em; + top: .3em; +} +.ui-dialog { + position: absolute; + top: 0; + left: 0; + padding: .2em; + outline: 0; +} +.ui-dialog .ui-dialog-titlebar { + padding: .4em 1em; + position: relative; +} +.ui-dialog .ui-dialog-title { + float: left; + margin: .1em 0; + white-space: nowrap; + width: 90%; + overflow: hidden; + text-overflow: ellipsis; +} +.ui-dialog .ui-dialog-titlebar-close { + position: absolute; + right: .3em; + top: 50%; + width: 20px; + margin: -10px 0 0 0; + padding: 1px; + height: 20px; +} +.ui-dialog .ui-dialog-content { + position: relative; + border: 0; + padding: .5em 1em; + background: none; + overflow: auto; +} +.ui-dialog .ui-dialog-buttonpane { + text-align: left; + border-width: 1px 0 0 0; + background-image: none; + margin-top: .5em; + padding: .3em 1em .5em .4em; +} +.ui-dialog .ui-dialog-buttonpane .ui-dialog-buttonset { + float: right; +} +.ui-dialog .ui-dialog-buttonpane button { + margin: .5em .4em .5em 0; + cursor: pointer; +} +.ui-dialog .ui-resizable-n { + height: 2px; + top: 0; +} +.ui-dialog .ui-resizable-e { + width: 2px; + right: 0; +} +.ui-dialog .ui-resizable-s { + height: 2px; + bottom: 0; +} +.ui-dialog .ui-resizable-w { + width: 2px; + left: 0; +} +.ui-dialog .ui-resizable-se, +.ui-dialog .ui-resizable-sw, +.ui-dialog .ui-resizable-ne, +.ui-dialog .ui-resizable-nw { + width: 7px; + height: 7px; +} +.ui-dialog .ui-resizable-se { + right: 0; + bottom: 0; +} +.ui-dialog .ui-resizable-sw { + left: 0; + bottom: 0; +} +.ui-dialog .ui-resizable-ne { + right: 0; + top: 0; +} +.ui-dialog .ui-resizable-nw { + left: 0; + top: 0; +} +.ui-draggable .ui-dialog-titlebar { + cursor: move; +} +.ui-draggable-handle { + -ms-touch-action: none; + touch-action: none; +} +.ui-resizable { + position: relative; +} +.ui-resizable-handle { + position: absolute; + font-size: 0.1px; + display: block; + -ms-touch-action: none; + touch-action: none; +} +.ui-resizable-disabled .ui-resizable-handle, +.ui-resizable-autohide .ui-resizable-handle { + display: none; +} +.ui-resizable-n { + cursor: n-resize; + height: 7px; + width: 100%; + top: -5px; + left: 0; +} +.ui-resizable-s { + cursor: s-resize; + height: 7px; + width: 100%; + bottom: -5px; + left: 0; +} +.ui-resizable-e { + cursor: e-resize; + width: 7px; + right: -5px; + top: 0; + height: 100%; +} +.ui-resizable-w { + cursor: w-resize; + width: 7px; + left: -5px; + top: 0; + height: 100%; +} +.ui-resizable-se { + cursor: se-resize; + width: 12px; + height: 12px; + right: 1px; + bottom: 1px; +} +.ui-resizable-sw { + cursor: sw-resize; + width: 9px; + height: 9px; + left: -5px; + bottom: -5px; +} +.ui-resizable-nw { + cursor: nw-resize; + width: 9px; + height: 9px; + left: -5px; + top: -5px; +} +.ui-resizable-ne { + cursor: ne-resize; + width: 9px; + height: 9px; + right: -5px; + top: -5px; +} +.ui-progressbar { + height: 2em; + text-align: left; + overflow: hidden; +} +.ui-progressbar .ui-progressbar-value { + margin: -1px; + height: 100%; +} +.ui-progressbar .ui-progressbar-overlay { + background: url("data:image/gif;base64,R0lGODlhKAAoAIABAAAAAP///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQJAQABACwAAAAAKAAoAAACkYwNqXrdC52DS06a7MFZI+4FHBCKoDeWKXqymPqGqxvJrXZbMx7Ttc+w9XgU2FB3lOyQRWET2IFGiU9m1frDVpxZZc6bfHwv4c1YXP6k1Vdy292Fb6UkuvFtXpvWSzA+HycXJHUXiGYIiMg2R6W459gnWGfHNdjIqDWVqemH2ekpObkpOlppWUqZiqr6edqqWQAAIfkECQEAAQAsAAAAACgAKAAAApSMgZnGfaqcg1E2uuzDmmHUBR8Qil95hiPKqWn3aqtLsS18y7G1SzNeowWBENtQd+T1JktP05nzPTdJZlR6vUxNWWjV+vUWhWNkWFwxl9VpZRedYcflIOLafaa28XdsH/ynlcc1uPVDZxQIR0K25+cICCmoqCe5mGhZOfeYSUh5yJcJyrkZWWpaR8doJ2o4NYq62lAAACH5BAkBAAEALAAAAAAoACgAAAKVDI4Yy22ZnINRNqosw0Bv7i1gyHUkFj7oSaWlu3ovC8GxNso5fluz3qLVhBVeT/Lz7ZTHyxL5dDalQWPVOsQWtRnuwXaFTj9jVVh8pma9JjZ4zYSj5ZOyma7uuolffh+IR5aW97cHuBUXKGKXlKjn+DiHWMcYJah4N0lYCMlJOXipGRr5qdgoSTrqWSq6WFl2ypoaUAAAIfkECQEAAQAsAAAAACgAKAAAApaEb6HLgd/iO7FNWtcFWe+ufODGjRfoiJ2akShbueb0wtI50zm02pbvwfWEMWBQ1zKGlLIhskiEPm9R6vRXxV4ZzWT2yHOGpWMyorblKlNp8HmHEb/lCXjcW7bmtXP8Xt229OVWR1fod2eWqNfHuMjXCPkIGNileOiImVmCOEmoSfn3yXlJWmoHGhqp6ilYuWYpmTqKUgAAIfkECQEAAQAsAAAAACgAKAAAApiEH6kb58biQ3FNWtMFWW3eNVcojuFGfqnZqSebuS06w5V80/X02pKe8zFwP6EFWOT1lDFk8rGERh1TTNOocQ61Hm4Xm2VexUHpzjymViHrFbiELsefVrn6XKfnt2Q9G/+Xdie499XHd2g4h7ioOGhXGJboGAnXSBnoBwKYyfioubZJ2Hn0RuRZaflZOil56Zp6iioKSXpUAAAh+QQJAQABACwAAAAAKAAoAAACkoQRqRvnxuI7kU1a1UU5bd5tnSeOZXhmn5lWK3qNTWvRdQxP8qvaC+/yaYQzXO7BMvaUEmJRd3TsiMAgswmNYrSgZdYrTX6tSHGZO73ezuAw2uxuQ+BbeZfMxsexY35+/Qe4J1inV0g4x3WHuMhIl2jXOKT2Q+VU5fgoSUI52VfZyfkJGkha6jmY+aaYdirq+lQAACH5BAkBAAEALAAAAAAoACgAAAKWBIKpYe0L3YNKToqswUlvznigd4wiR4KhZrKt9Upqip61i9E3vMvxRdHlbEFiEXfk9YARYxOZZD6VQ2pUunBmtRXo1Lf8hMVVcNl8JafV38aM2/Fu5V16Bn63r6xt97j09+MXSFi4BniGFae3hzbH9+hYBzkpuUh5aZmHuanZOZgIuvbGiNeomCnaxxap2upaCZsq+1kAACH5BAkBAAEALAAAAAAoACgAAAKXjI8By5zf4kOxTVrXNVlv1X0d8IGZGKLnNpYtm8Lr9cqVeuOSvfOW79D9aDHizNhDJidFZhNydEahOaDH6nomtJjp1tutKoNWkvA6JqfRVLHU/QUfau9l2x7G54d1fl995xcIGAdXqMfBNadoYrhH+Mg2KBlpVpbluCiXmMnZ2Sh4GBqJ+ckIOqqJ6LmKSllZmsoq6wpQAAAh+QQJAQABACwAAAAAKAAoAAAClYx/oLvoxuJDkU1a1YUZbJ59nSd2ZXhWqbRa2/gF8Gu2DY3iqs7yrq+xBYEkYvFSM8aSSObE+ZgRl1BHFZNr7pRCavZ5BW2142hY3AN/zWtsmf12p9XxxFl2lpLn1rseztfXZjdIWIf2s5dItwjYKBgo9yg5pHgzJXTEeGlZuenpyPmpGQoKOWkYmSpaSnqKileI2FAAACH5BAkBAAEALAAAAAAoACgAAAKVjB+gu+jG4kORTVrVhRlsnn2dJ3ZleFaptFrb+CXmO9OozeL5VfP99HvAWhpiUdcwkpBH3825AwYdU8xTqlLGhtCosArKMpvfa1mMRae9VvWZfeB2XfPkeLmm18lUcBj+p5dnN8jXZ3YIGEhYuOUn45aoCDkp16hl5IjYJvjWKcnoGQpqyPlpOhr3aElaqrq56Bq7VAAAOw=="); + height: 100%; + -ms-filter: "alpha(opacity=25)"; /* support: IE8 */ + opacity: 0.25; +} +.ui-progressbar-indeterminate .ui-progressbar-value { + background-image: none; +} +.ui-selectable { + -ms-touch-action: none; + touch-action: none; +} +.ui-selectable-helper { + position: absolute; + z-index: 100; + border: 1px dotted black; +} +.ui-selectmenu-menu { + padding: 0; + margin: 0; + position: absolute; + top: 0; + left: 0; + display: none; +} +.ui-selectmenu-menu .ui-menu { + overflow: auto; + overflow-x: hidden; + padding-bottom: 1px; +} +.ui-selectmenu-menu .ui-menu .ui-selectmenu-optgroup { + font-size: 1em; + font-weight: bold; + line-height: 1.5; + padding: 2px 0.4em; + margin: 0.5em 0 0 0; + height: auto; + border: 0; +} +.ui-selectmenu-open { + display: block; +} +.ui-selectmenu-text { + display: block; + margin-right: 20px; + overflow: hidden; + text-overflow: ellipsis; +} +.ui-selectmenu-button.ui-button { + text-align: left; + white-space: nowrap; + width: 14em; +} +.ui-selectmenu-icon.ui-icon { + float: right; + margin-top: 0; +} +.ui-slider { + position: relative; + text-align: left; +} +.ui-slider .ui-slider-handle { + position: absolute; + z-index: 2; + width: 1.2em; + height: 1.2em; + cursor: pointer; + -ms-touch-action: none; + touch-action: none; +} +.ui-slider .ui-slider-range { + position: absolute; + z-index: 1; + font-size: .7em; + display: block; + border: 0; + background-position: 0 0; +} + +/* support: IE8 - See #6727 */ +.ui-slider.ui-state-disabled .ui-slider-handle, +.ui-slider.ui-state-disabled .ui-slider-range { + filter: inherit; +} + +.ui-slider-horizontal { + height: .8em; +} +.ui-slider-horizontal .ui-slider-handle { + top: -.3em; + margin-left: -.6em; +} +.ui-slider-horizontal .ui-slider-range { + top: 0; + height: 100%; +} +.ui-slider-horizontal .ui-slider-range-min { + left: 0; +} +.ui-slider-horizontal .ui-slider-range-max { + right: 0; +} + +.ui-slider-vertical { + width: .8em; + height: 100px; +} +.ui-slider-vertical .ui-slider-handle { + left: -.3em; + margin-left: 0; + margin-bottom: -.6em; +} +.ui-slider-vertical .ui-slider-range { + left: 0; + width: 100%; +} +.ui-slider-vertical .ui-slider-range-min { + bottom: 0; +} +.ui-slider-vertical .ui-slider-range-max { + top: 0; +} +.ui-sortable-handle { + -ms-touch-action: none; + touch-action: none; +} +.ui-spinner { + position: relative; + display: inline-block; + overflow: hidden; + padding: 0; + vertical-align: middle; +} +.ui-spinner-input { + border: none; + background: none; + color: inherit; + padding: .222em 0; + margin: .2em 0; + vertical-align: middle; + margin-left: .4em; + margin-right: 2em; +} +.ui-spinner-button { + width: 1.6em; + height: 50%; + font-size: .5em; + padding: 0; + margin: 0; + text-align: center; + position: absolute; + cursor: default; + display: block; + overflow: hidden; + right: 0; +} +/* more specificity required here to override default borders */ +.ui-spinner a.ui-spinner-button { + border-top-style: none; + border-bottom-style: none; + border-right-style: none; +} +.ui-spinner-up { + top: 0; +} +.ui-spinner-down { + bottom: 0; +} +.ui-tabs { + position: relative;/* position: relative prevents IE scroll bug (element with position: relative inside container with overflow: auto appear as "fixed") */ + padding: .2em; +} +.ui-tabs .ui-tabs-nav { + margin: 0; + padding: .2em .2em 0; +} +.ui-tabs .ui-tabs-nav li { + list-style: none; + float: left; + position: relative; + top: 0; + margin: 1px .2em 0 0; + border-bottom-width: 0; + padding: 0; + white-space: nowrap; +} +.ui-tabs .ui-tabs-nav .ui-tabs-anchor { + float: left; + padding: .5em 1em; + text-decoration: none; +} +.ui-tabs .ui-tabs-nav li.ui-tabs-active { + margin-bottom: -1px; + padding-bottom: 1px; +} +.ui-tabs .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor, +.ui-tabs .ui-tabs-nav li.ui-state-disabled .ui-tabs-anchor, +.ui-tabs .ui-tabs-nav li.ui-tabs-loading .ui-tabs-anchor { + cursor: text; +} +.ui-tabs-collapsible .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor { + cursor: pointer; +} +.ui-tabs .ui-tabs-panel { + display: block; + border-width: 0; + padding: 1em 1.4em; + background: none; +} +.ui-tooltip { + padding: 8px; + position: absolute; + z-index: 9999; + max-width: 300px; +} +body .ui-tooltip { + border-width: 2px; +} + +/* Component containers +----------------------------------*/ +.ui-widget { + font-family: Arial,Helvetica,sans-serif; + font-size: 1em; +} +.ui-widget .ui-widget { + font-size: 1em; +} +.ui-widget input, +.ui-widget select, +.ui-widget textarea, +.ui-widget button { + font-family: Arial,Helvetica,sans-serif; + font-size: 1em; +} +.ui-widget.ui-widget-content { + border: 1px solid #c5c5c5; +} +.ui-widget-content { + border: 1px solid #dddddd; + background: #ffffff; + color: #333333; +} +.ui-widget-content a { + color: #333333; +} +.ui-widget-header { + border: 1px solid #dddddd; + background: #e9e9e9; + color: #333333; + font-weight: bold; +} +.ui-widget-header a { + color: #333333; +} + +/* Interaction states +----------------------------------*/ +.ui-state-default, +.ui-widget-content .ui-state-default, +.ui-widget-header .ui-state-default, +.ui-button, + +/* We use html here because we need a greater specificity to make sure disabled +works properly when clicked or hovered */ +html .ui-button.ui-state-disabled:hover, +html .ui-button.ui-state-disabled:active { + border: 1px solid #c5c5c5; + background: #f6f6f6; + font-weight: normal; + color: #454545; +} +.ui-state-default a, +.ui-state-default a:link, +.ui-state-default a:visited, +a.ui-button, +a:link.ui-button, +a:visited.ui-button, +.ui-button { + color: #454545; + text-decoration: none; +} +.ui-state-hover, +.ui-widget-content .ui-state-hover, +.ui-widget-header .ui-state-hover, +.ui-state-focus, +.ui-widget-content .ui-state-focus, +.ui-widget-header .ui-state-focus, +.ui-button:hover, +.ui-button:focus { + border: 1px solid #cccccc; + background: #ededed; + font-weight: normal; + color: #2b2b2b; +} +.ui-state-hover a, +.ui-state-hover a:hover, +.ui-state-hover a:link, +.ui-state-hover a:visited, +.ui-state-focus a, +.ui-state-focus a:hover, +.ui-state-focus a:link, +.ui-state-focus a:visited, +a.ui-button:hover, +a.ui-button:focus { + color: #2b2b2b; + text-decoration: none; +} + +.ui-visual-focus { + box-shadow: 0 0 3px 1px rgb(94, 158, 214); +} +.ui-state-active, +.ui-widget-content .ui-state-active, +.ui-widget-header .ui-state-active, +a.ui-button:active, +.ui-button:active, +.ui-button.ui-state-active:hover { + border: 1px solid #003eff; + background: #007fff; + font-weight: normal; + color: #ffffff; +} +.ui-icon-background, +.ui-state-active .ui-icon-background { + border: #003eff; + background-color: #ffffff; +} +.ui-state-active a, +.ui-state-active a:link, +.ui-state-active a:visited { + color: #ffffff; + text-decoration: none; +} + +/* Interaction Cues +----------------------------------*/ +.ui-state-highlight, +.ui-widget-content .ui-state-highlight, +.ui-widget-header .ui-state-highlight { + border: 1px solid #dad55e; + background: #fffa90; + color: #777620; +} +.ui-state-checked { + border: 1px solid #dad55e; + background: #fffa90; +} +.ui-state-highlight a, +.ui-widget-content .ui-state-highlight a, +.ui-widget-header .ui-state-highlight a { + color: #777620; +} +.ui-state-error, +.ui-widget-content .ui-state-error, +.ui-widget-header .ui-state-error { + border: 1px solid #f1a899; + background: #fddfdf; + color: #5f3f3f; +} +.ui-state-error a, +.ui-widget-content .ui-state-error a, +.ui-widget-header .ui-state-error a { + color: #5f3f3f; +} +.ui-state-error-text, +.ui-widget-content .ui-state-error-text, +.ui-widget-header .ui-state-error-text { + color: #5f3f3f; +} +.ui-priority-primary, +.ui-widget-content .ui-priority-primary, +.ui-widget-header .ui-priority-primary { + font-weight: bold; +} +.ui-priority-secondary, +.ui-widget-content .ui-priority-secondary, +.ui-widget-header .ui-priority-secondary { + opacity: .7; + -ms-filter: "alpha(opacity=70)"; /* support: IE8 */ + font-weight: normal; +} +.ui-state-disabled, +.ui-widget-content .ui-state-disabled, +.ui-widget-header .ui-state-disabled { + opacity: .35; + -ms-filter: "alpha(opacity=35)"; /* support: IE8 */ + background-image: none; +} +.ui-state-disabled .ui-icon { + -ms-filter: "alpha(opacity=35)"; /* support: IE8 - See #6059 */ +} + +/* Icons +----------------------------------*/ + +/* states and images */ +.ui-icon { + width: 16px; + height: 16px; +} +.ui-icon, +.ui-widget-content .ui-icon { + background-image: url("images/ui-icons_444444_256x240.png"); +} +.ui-widget-header .ui-icon { + background-image: url("images/ui-icons_444444_256x240.png"); +} +.ui-state-hover .ui-icon, +.ui-state-focus .ui-icon, +.ui-button:hover .ui-icon, +.ui-button:focus .ui-icon { + background-image: url("images/ui-icons_555555_256x240.png"); +} +.ui-state-active .ui-icon, +.ui-button:active .ui-icon { + background-image: url("images/ui-icons_ffffff_256x240.png"); +} +.ui-state-highlight .ui-icon, +.ui-button .ui-state-highlight.ui-icon { + background-image: url("images/ui-icons_777620_256x240.png"); +} +.ui-state-error .ui-icon, +.ui-state-error-text .ui-icon { + background-image: url("images/ui-icons_cc0000_256x240.png"); +} +.ui-button .ui-icon { + background-image: url("images/ui-icons_777777_256x240.png"); +} + +/* positioning */ +/* Three classes needed to override `.ui-button:hover .ui-icon` */ +.ui-icon-blank.ui-icon-blank.ui-icon-blank { + background-image: none; +} +.ui-icon-caret-1-n { background-position: 0 0; } +.ui-icon-caret-1-ne { background-position: -16px 0; } +.ui-icon-caret-1-e { background-position: -32px 0; } +.ui-icon-caret-1-se { background-position: -48px 0; } +.ui-icon-caret-1-s { background-position: -65px 0; } +.ui-icon-caret-1-sw { background-position: -80px 0; } +.ui-icon-caret-1-w { background-position: -96px 0; } +.ui-icon-caret-1-nw { background-position: -112px 0; } +.ui-icon-caret-2-n-s { background-position: -128px 0; } +.ui-icon-caret-2-e-w { background-position: -144px 0; } +.ui-icon-triangle-1-n { background-position: 0 -16px; } +.ui-icon-triangle-1-ne { background-position: -16px -16px; } +.ui-icon-triangle-1-e { background-position: -32px -16px; } +.ui-icon-triangle-1-se { background-position: -48px -16px; } +.ui-icon-triangle-1-s { background-position: -65px -16px; } +.ui-icon-triangle-1-sw { background-position: -80px -16px; } +.ui-icon-triangle-1-w { background-position: -96px -16px; } +.ui-icon-triangle-1-nw { background-position: -112px -16px; } +.ui-icon-triangle-2-n-s { background-position: -128px -16px; } +.ui-icon-triangle-2-e-w { background-position: -144px -16px; } +.ui-icon-arrow-1-n { background-position: 0 -32px; } +.ui-icon-arrow-1-ne { background-position: -16px -32px; } +.ui-icon-arrow-1-e { background-position: -32px -32px; } +.ui-icon-arrow-1-se { background-position: -48px -32px; } +.ui-icon-arrow-1-s { background-position: -65px -32px; } +.ui-icon-arrow-1-sw { background-position: -80px -32px; } +.ui-icon-arrow-1-w { background-position: -96px -32px; } +.ui-icon-arrow-1-nw { background-position: -112px -32px; } +.ui-icon-arrow-2-n-s { background-position: -128px -32px; } +.ui-icon-arrow-2-ne-sw { background-position: -144px -32px; } +.ui-icon-arrow-2-e-w { background-position: -160px -32px; } +.ui-icon-arrow-2-se-nw { background-position: -176px -32px; } +.ui-icon-arrowstop-1-n { background-position: -192px -32px; } +.ui-icon-arrowstop-1-e { background-position: -208px -32px; } +.ui-icon-arrowstop-1-s { background-position: -224px -32px; } +.ui-icon-arrowstop-1-w { background-position: -240px -32px; } +.ui-icon-arrowthick-1-n { background-position: 1px -48px; } +.ui-icon-arrowthick-1-ne { background-position: -16px -48px; } +.ui-icon-arrowthick-1-e { background-position: -32px -48px; } +.ui-icon-arrowthick-1-se { background-position: -48px -48px; } +.ui-icon-arrowthick-1-s { background-position: -64px -48px; } +.ui-icon-arrowthick-1-sw { background-position: -80px -48px; } +.ui-icon-arrowthick-1-w { background-position: -96px -48px; } +.ui-icon-arrowthick-1-nw { background-position: -112px -48px; } +.ui-icon-arrowthick-2-n-s { background-position: -128px -48px; } +.ui-icon-arrowthick-2-ne-sw { background-position: -144px -48px; } +.ui-icon-arrowthick-2-e-w { background-position: -160px -48px; } +.ui-icon-arrowthick-2-se-nw { background-position: -176px -48px; } +.ui-icon-arrowthickstop-1-n { background-position: -192px -48px; } +.ui-icon-arrowthickstop-1-e { background-position: -208px -48px; } +.ui-icon-arrowthickstop-1-s { background-position: -224px -48px; } +.ui-icon-arrowthickstop-1-w { background-position: -240px -48px; } +.ui-icon-arrowreturnthick-1-w { background-position: 0 -64px; } +.ui-icon-arrowreturnthick-1-n { background-position: -16px -64px; } +.ui-icon-arrowreturnthick-1-e { background-position: -32px -64px; } +.ui-icon-arrowreturnthick-1-s { background-position: -48px -64px; } +.ui-icon-arrowreturn-1-w { background-position: -64px -64px; } +.ui-icon-arrowreturn-1-n { background-position: -80px -64px; } +.ui-icon-arrowreturn-1-e { background-position: -96px -64px; } +.ui-icon-arrowreturn-1-s { background-position: -112px -64px; } +.ui-icon-arrowrefresh-1-w { background-position: -128px -64px; } +.ui-icon-arrowrefresh-1-n { background-position: -144px -64px; } +.ui-icon-arrowrefresh-1-e { background-position: -160px -64px; } +.ui-icon-arrowrefresh-1-s { background-position: -176px -64px; } +.ui-icon-arrow-4 { background-position: 0 -80px; } +.ui-icon-arrow-4-diag { background-position: -16px -80px; } +.ui-icon-extlink { background-position: -32px -80px; } +.ui-icon-newwin { background-position: -48px -80px; } +.ui-icon-refresh { background-position: -64px -80px; } +.ui-icon-shuffle { background-position: -80px -80px; } +.ui-icon-transfer-e-w { background-position: -96px -80px; } +.ui-icon-transferthick-e-w { background-position: -112px -80px; } +.ui-icon-folder-collapsed { background-position: 0 -96px; } +.ui-icon-folder-open { background-position: -16px -96px; } +.ui-icon-document { background-position: -32px -96px; } +.ui-icon-document-b { background-position: -48px -96px; } +.ui-icon-note { background-position: -64px -96px; } +.ui-icon-mail-closed { background-position: -80px -96px; } +.ui-icon-mail-open { background-position: -96px -96px; } +.ui-icon-suitcase { background-position: -112px -96px; } +.ui-icon-comment { background-position: -128px -96px; } +.ui-icon-person { background-position: -144px -96px; } +.ui-icon-print { background-position: -160px -96px; } +.ui-icon-trash { background-position: -176px -96px; } +.ui-icon-locked { background-position: -192px -96px; } +.ui-icon-unlocked { background-position: -208px -96px; } +.ui-icon-bookmark { background-position: -224px -96px; } +.ui-icon-tag { background-position: -240px -96px; } +.ui-icon-home { background-position: 0 -112px; } +.ui-icon-flag { background-position: -16px -112px; } +.ui-icon-calendar { background-position: -32px -112px; } +.ui-icon-cart { background-position: -48px -112px; } +.ui-icon-pencil { background-position: -64px -112px; } +.ui-icon-clock { background-position: -80px -112px; } +.ui-icon-disk { background-position: -96px -112px; } +.ui-icon-calculator { background-position: -112px -112px; } +.ui-icon-zoomin { background-position: -128px -112px; } +.ui-icon-zoomout { background-position: -144px -112px; } +.ui-icon-search { background-position: -160px -112px; } +.ui-icon-wrench { background-position: -176px -112px; } +.ui-icon-gear { background-position: -192px -112px; } +.ui-icon-heart { background-position: -208px -112px; } +.ui-icon-star { background-position: -224px -112px; } +.ui-icon-link { background-position: -240px -112px; } +.ui-icon-cancel { background-position: 0 -128px; } +.ui-icon-plus { background-position: -16px -128px; } +.ui-icon-plusthick { background-position: -32px -128px; } +.ui-icon-minus { background-position: -48px -128px; } +.ui-icon-minusthick { background-position: -64px -128px; } +.ui-icon-close { background-position: -80px -128px; } +.ui-icon-closethick { background-position: -96px -128px; } +.ui-icon-key { background-position: -112px -128px; } +.ui-icon-lightbulb { background-position: -128px -128px; } +.ui-icon-scissors { background-position: -144px -128px; } +.ui-icon-clipboard { background-position: -160px -128px; } +.ui-icon-copy { background-position: -176px -128px; } +.ui-icon-contact { background-position: -192px -128px; } +.ui-icon-image { background-position: -208px -128px; } +.ui-icon-video { background-position: -224px -128px; } +.ui-icon-script { background-position: -240px -128px; } +.ui-icon-alert { background-position: 0 -144px; } +.ui-icon-info { background-position: -16px -144px; } +.ui-icon-notice { background-position: -32px -144px; } +.ui-icon-help { background-position: -48px -144px; } +.ui-icon-check { background-position: -64px -144px; } +.ui-icon-bullet { background-position: -80px -144px; } +.ui-icon-radio-on { background-position: -96px -144px; } +.ui-icon-radio-off { background-position: -112px -144px; } +.ui-icon-pin-w { background-position: -128px -144px; } +.ui-icon-pin-s { background-position: -144px -144px; } +.ui-icon-play { background-position: 0 -160px; } +.ui-icon-pause { background-position: -16px -160px; } +.ui-icon-seek-next { background-position: -32px -160px; } +.ui-icon-seek-prev { background-position: -48px -160px; } +.ui-icon-seek-end { background-position: -64px -160px; } +.ui-icon-seek-start { background-position: -80px -160px; } +/* ui-icon-seek-first is deprecated, use ui-icon-seek-start instead */ +.ui-icon-seek-first { background-position: -80px -160px; } +.ui-icon-stop { background-position: -96px -160px; } +.ui-icon-eject { background-position: -112px -160px; } +.ui-icon-volume-off { background-position: -128px -160px; } +.ui-icon-volume-on { background-position: -144px -160px; } +.ui-icon-power { background-position: 0 -176px; } +.ui-icon-signal-diag { background-position: -16px -176px; } +.ui-icon-signal { background-position: -32px -176px; } +.ui-icon-battery-0 { background-position: -48px -176px; } +.ui-icon-battery-1 { background-position: -64px -176px; } +.ui-icon-battery-2 { background-position: -80px -176px; } +.ui-icon-battery-3 { background-position: -96px -176px; } +.ui-icon-circle-plus { background-position: 0 -192px; } +.ui-icon-circle-minus { background-position: -16px -192px; } +.ui-icon-circle-close { background-position: -32px -192px; } +.ui-icon-circle-triangle-e { background-position: -48px -192px; } +.ui-icon-circle-triangle-s { background-position: -64px -192px; } +.ui-icon-circle-triangle-w { background-position: -80px -192px; } +.ui-icon-circle-triangle-n { background-position: -96px -192px; } +.ui-icon-circle-arrow-e { background-position: -112px -192px; } +.ui-icon-circle-arrow-s { background-position: -128px -192px; } +.ui-icon-circle-arrow-w { background-position: -144px -192px; } +.ui-icon-circle-arrow-n { background-position: -160px -192px; } +.ui-icon-circle-zoomin { background-position: -176px -192px; } +.ui-icon-circle-zoomout { background-position: -192px -192px; } +.ui-icon-circle-check { background-position: -208px -192px; } +.ui-icon-circlesmall-plus { background-position: 0 -208px; } +.ui-icon-circlesmall-minus { background-position: -16px -208px; } +.ui-icon-circlesmall-close { background-position: -32px -208px; } +.ui-icon-squaresmall-plus { background-position: -48px -208px; } +.ui-icon-squaresmall-minus { background-position: -64px -208px; } +.ui-icon-squaresmall-close { background-position: -80px -208px; } +.ui-icon-grip-dotted-vertical { background-position: 0 -224px; } +.ui-icon-grip-dotted-horizontal { background-position: -16px -224px; } +.ui-icon-grip-solid-vertical { background-position: -32px -224px; } +.ui-icon-grip-solid-horizontal { background-position: -48px -224px; } +.ui-icon-gripsmall-diagonal-se { background-position: -64px -224px; } +.ui-icon-grip-diagonal-se { background-position: -80px -224px; } + + +/* Misc visuals +----------------------------------*/ + +/* Corner radius */ +.ui-corner-all, +.ui-corner-top, +.ui-corner-left, +.ui-corner-tl { + border-top-left-radius: 3px; +} +.ui-corner-all, +.ui-corner-top, +.ui-corner-right, +.ui-corner-tr { + border-top-right-radius: 3px; +} +.ui-corner-all, +.ui-corner-bottom, +.ui-corner-left, +.ui-corner-bl { + border-bottom-left-radius: 3px; +} +.ui-corner-all, +.ui-corner-bottom, +.ui-corner-right, +.ui-corner-br { + border-bottom-right-radius: 3px; +} + +/* Overlays */ +.ui-widget-overlay { + background: #aaaaaa; + opacity: .003; + -ms-filter: Alpha(Opacity=.3); /* support: IE8 */ +} +.ui-widget-shadow { + -webkit-box-shadow: 0px 0px 5px #666666; + box-shadow: 0px 0px 5px #666666; +} diff --git a/config/js/config.js b/config/js/config.js index bf7920ada..d9d492cde 100644 --- a/config/js/config.js +++ b/config/js/config.js @@ -1,11 +1,15 @@ //So that each layer bar will always have a unique id var grandLayerCounter = 0; var mission = ""; +var configId = -1; +var lockConfig = false; +var lockConfigCount = false; //The active mission filepath var missionPath = ""; var tData; var editors; var layerEditors; +var tabEditors; var usingCustomProjection; var availableKinds = []; @@ -27,7 +31,9 @@ function initialize() { url: calls.logout.url, data: {}, success: function (data) { - window.location = "/"; + // Remove last directory from pathname + const path = window.location.pathname.split("/"); + window.location.href = path.slice(0, path.length - 1).join("/") || "/"; }, }); }); @@ -130,6 +136,7 @@ function initialize() { editors = {}; layerEditors = {}; + tabEditors = {}; for (var i = 0; i < tData.length; i++) { // prettier-ignore @@ -197,6 +204,39 @@ function initialize() { } } + // Setup tabEditors + tabEditors["coordinatesVariables"] = CodeMirror.fromTextArea( + document.getElementById("coordinatesVariables"), + { + path: "js/codemirror/codemirror-5.19.0/", + mode: "javascript", + theme: "elegant", + viewportMargin: Infinity, + lineNumbers: true, + autoRefresh: true, + matchBrackets: true, + } + ); + $("#coordinatesVariables_example").html( + JSON.stringify( + { + rightClickMenuActions: [ + { + name: "The text for this menu entry when users right-click", + link: "https://domain?I={ll[0]}&will={ll[1]}&replace={ll[2]}&these={en[0]}&brackets={en[1]}&for={cproj[0]}&you={sproj[0]}&with={rxy[0]}&coordinates={site[2]}", + }, + { + name: "WKT text insertions. Do so only for polygons.", + link: "https://domain?regularWKT={wkt}&wkt_where_commas_are_replaced_with_underscores={wkt_}", + for: "polygon", + }, + ], + }, + null, + 4 + ) || "" + ); + //Make materialize initialize tabs $("ul.tabs#missions").tabs(); @@ -228,6 +268,7 @@ function initialize() { mission = $(this).find("a").html(); missionPath = calls.missionPath + mission + "/config.json"; + configId = parseInt(Math.random() * 100000); $.ajax({ type: calls.get.type, @@ -240,6 +281,12 @@ function initialize() { if (data.status == "success") { var cData = data.config; + clearLockConfig(); + + for (var e in tabEditors) { + tabEditors[e].setValue(""); + } + //overall $("#overall_mission_name").text(mission); @@ -492,6 +539,11 @@ function initialize() { $( `.coordinates_coordMain[value="${cData.coordinates?.coordmain}"]` ).prop("checked", true); + tabEditors["coordinatesVariables"].setValue( + cData.coordinates?.variables + ? JSON.stringify(cData.coordinates?.variables, null, 4) + : "" + ); //look $("#tab_look #look_pagename").val("MMGIS"); @@ -499,15 +551,33 @@ function initialize() { $("#tab_look #look_pagename").val(cData.look.pagename); } $("#tab_look input").prop("checked", false); - if (cData.look && cData.look.minimalist == true) { + if (cData.look && cData.look.minimalist != false) { $("#tab_look #look_minimalist").prop("checked", true); } - if (cData.look && cData.look.zoomcontrol == true) { + if (cData.look && cData.look.topbar != false) { + $("#tab_look #look_topbar").prop("checked", true); + } + if (cData.look && cData.look.toolbar != false) { + $("#tab_look #look_toolbar").prop("checked", true); + } + if (cData.look && cData.look.scalebar != false) { + $("#tab_look #look_scalebar").prop("checked", true); + } + if (cData.look && cData.look.coordinates != false) { + $("#tab_look #look_coordinates").prop("checked", true); + } + if (cData.look && cData.look.zoomcontrol != false) { $("#tab_look #look_zoomcontrol").prop("checked", true); } - if (cData.look && cData.look.graticule == true) { + if (cData.look && cData.look.graticule != false) { $("#tab_look #look_graticule").prop("checked", true); } + if (cData.look && cData.look.miscellaneous != false) { + $("#tab_look #look_miscellaneous").prop("checked", true); + } + if (cData.look && cData.look.settings != false) { + $("#tab_look #look_settings").prop("checked", true); + } //look colors $("#tab_look #look_primarycolor").val( @@ -558,6 +628,12 @@ function initialize() { ) { $("#tab_look #look_fullscreen").prop("checked", true); } + if (cData.look && cData.look.info == true) { + $("#tab_look #look_info").prop("checked", true); + } + $("#tab_look #look_infourl").val( + cData.look ? cData.look.infourl : "" + ); if ( cData.look && (cData.look.help == true || cData.look.help == null) @@ -601,10 +677,26 @@ function initialize() { "checked", cData.time.visible ? true : false ); + $("#tab_time #time_initiallyOpen").prop( + "checked", + cData.time.initiallyOpen ? true : false + ); } $("#tab_time #time_format").val( cData.time ? cData.time.format : "%Y-%m-%dT%H:%M:%SZ" ); + $("#tab_time #time_initialstart").val( + cData.time ? cData.time.initialstart : "" + ); + $("#tab_time #time_initialend").val( + cData.time ? cData.time.initialend : "now" + ); + $("#tab_time #time_initialwindowstart").val( + cData.time ? cData.time.initialwindowstart : "" + ); + $("#tab_time #time_initialwindowend").val( + cData.time ? cData.time.initialwindowend : "now" + ); //tools //uncheck all tools @@ -786,6 +878,17 @@ function initialize() { $("#deleteMissionName").val(""); }); + //Download working config button + $("#download_working_config").on("click", function () { + downloadObject( + save("returnJSON"), + mission + "_config_WORKING", + ".json", + true + ); + toast("success", "Download Successful."); + }); + //Save changes button $("#save_changes").on("click", save); } @@ -870,7 +973,10 @@ function makeLayerBarAndModal(d, level, options) { vtLayerSetStylesEl = "block", timeEl = "block", timeTypeEl = "block", + timeStartPropEl = "block", + timeEndPropEl = "block", timeFormatEl = "block", + timeCompositeTileEl = "block", timeRefreshEl = "none", timeIncrementEl = "none", shapeEl = "none", @@ -889,11 +995,11 @@ function makeLayerBarAndModal(d, level, options) { maxnzEl = "none"; maxzEl = "none"; strcolEl = "none"; filcolEl = "none"; weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "none"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; - timeEl = "none"; timeTypeEl = "none"; timeFormatEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + timeEl = "none"; timeTypeEl = "none"; timeStartPropEl = "none"; timeEndPropEl = "none"; timeFormatEl = "none"; timeCompositeTileEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; queryEndpointEl = "none"; queryTypeEl = "none"; break; case "tile": - nameEl = "block"; kindEl = "none"; typeEl = "block"; urlEl = "block"; demtileurlEl = "block"; demparserEl = "block"; controlledEl = "none"; + nameEl = "block"; kindEl = "none"; typeEl = "block"; urlEl = "block"; demtileurlEl = "block"; demparserEl = "block"; controlledEl = "block"; descriptionEl = "block"; tagsEl = "block"; legendEl = "block"; visEl = "block"; viscutEl = "none"; initOpacEl = "block"; togwheadEl = "block"; minzEl = "block"; layer3dEl = "none"; tileformatEl = "block"; @@ -902,7 +1008,7 @@ function makeLayerBarAndModal(d, level, options) { maxnzEl = "block"; maxzEl = "block"; strcolEl = "none"; filcolEl = "none"; weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "none"; xmlEl = "block"; bbEl = "block"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; - timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + timeEl = "block"; timeTypeEl = "block"; timeStartPropEl = "none"; timeEndPropEl = "none"; timeFormatEl = "block"; timeCompositeTileEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; queryEndpointEl = "none"; queryTypeEl = "none"; break; case "vectortile": @@ -915,7 +1021,7 @@ function makeLayerBarAndModal(d, level, options) { maxnzEl = "block"; maxzEl = "block"; strcolEl = "none"; filcolEl = "none"; weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "block"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "block"; vtIdEl = "block"; vtKeyEl = "block"; vtLayerSetStylesEl = "block"; - timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "block"; + timeEl = "block"; timeTypeEl = "block"; timeStartPropEl = "none"; timeEndPropEl = "none"; timeFormatEl = "block"; timeCompositeTileEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "block"; queryEndpointEl = "none"; queryTypeEl = "none"; break; case "data": @@ -928,7 +1034,7 @@ function makeLayerBarAndModal(d, level, options) { maxnzEl = "block"; maxzEl = "block"; strcolEl = "none"; filcolEl = "none"; weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "block"; xmlEl = "block"; bbEl = "block"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; - timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + timeEl = "block"; timeTypeEl = "block"; timeStartPropEl = "none"; timeEndPropEl = "none"; timeFormatEl = "block"; timeCompositeTileEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; queryEndpointEl = "none"; queryTypeEl = "none"; break; case "query": @@ -941,7 +1047,7 @@ function makeLayerBarAndModal(d, level, options) { maxnzEl = "none"; maxzEl = "none"; strcolEl = "block"; filcolEl = "block"; weightEl = "block"; opacityEl = "block"; radiusEl = "block"; variableEl = "block"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; - timeEl = "none"; timeTypeEl = "none"; timeFormatEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + timeEl = "none"; timeTypeEl = "none"; timeStartPropEl = "none"; timeEndPropEl = "none"; timeFormatEl = "none"; timeCompositeTileEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; queryEndpointEl = "block"; queryTypeEl = "block"; break; case "vector": @@ -954,7 +1060,7 @@ function makeLayerBarAndModal(d, level, options) { maxnzEl = "none"; maxzEl = "block"; strcolEl = "block"; filcolEl = "block"; weightEl = "block"; opacityEl = "block"; radiusEl = "block"; variableEl = "block"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; - timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "block"; + timeEl = "block"; timeTypeEl = "block"; timeStartPropEl = "block"; timeEndPropEl = "block"; timeFormatEl = "block"; timeCompositeTileEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "block"; break; case "model": nameEl = "block"; kindEl = "none"; typeEl = "block"; urlEl = "block"; demtileurlEl = "none"; demparserEl = "none"; controlledEl = "none"; @@ -966,7 +1072,7 @@ function makeLayerBarAndModal(d, level, options) { maxnzEl = "none"; maxzEl = "none"; strcolEl = "none"; filcolEl = "none"; weightEl = "none"; opacityEl = "none"; radiusEl = "none"; variableEl = "none"; xmlEl = "none"; bbEl = "none"; vtLayerEl = "none"; vtIdEl = "none"; vtKeyEl = "none"; vtLayerSetStylesEl = "none"; - timeEl = "block"; timeTypeEl = "block"; timeFormatEl = "block"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; + timeEl = "block"; timeTypeEl = "block"; timeStartPropEl = "none"; timeEndPropEl = "none"; timeFormatEl = "block"; timeCompositeTileEl = "none"; timeRefreshEl = "none"; timeIncrementEl = "none"; shapeEl = "none"; queryEndpointEl = "none"; queryTypeEl = "none"; break; default: @@ -1097,20 +1203,36 @@ function makeLayerBarAndModal(d, level, options) { timeFalseSel = "selected"; } - var timeGlobalSel = "", - timeIndividualSel = ""; + var timeRequerySel = "", + timeLocalSel = ""; if (typeof d.time != "undefined") { switch (d.time.type) { - case "global": - timeGlobalSel = "selected"; + case "requery": + timeRequerySel = "selected"; break; - case "individual": - timeIndividualSel = "selected"; + case "local": + timeLocalSel = "selected"; + break; + default: + } + } else { + timeRequerySel = "selected"; + } + + var timeCompositeTileTrueSel = "", + timeCompositeTileFalseSel = ""; + if (typeof d.time != "undefined") { + switch (d.time.compositeTile) { + case true: + case "true": + timeCompositeTileTrueSel = "selected"; break; default: + timeCompositeTileFalseSel = "selected"; + break; } } else { - timeGlobalSel = "selected"; + timeCompositeTileFalseSel = "selected"; } var togwheadTrueSel = "", @@ -1219,23 +1341,26 @@ function makeLayerBarAndModal(d, level, options) { "