aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorEven Rouault <even.rouault@spatialys.com>2018-11-14 17:40:42 +0100
committerEven Rouault <even.rouault@spatialys.com>2018-11-14 22:48:29 +0100
commitd928db15d53805d9b728b440079756081961c536 (patch)
treee862a961d26bedb34c58e4f28ef0bdeedb5f3225 /scripts
parent330e8bf686f9c4524075ca1ff50cbca6c9e091da (diff)
downloadPROJ-d928db15d53805d9b728b440079756081961c536.tar.gz
PROJ-d928db15d53805d9b728b440079756081961c536.zip
Implement RFC 2: Initial integration of "GDAL SRS barn" work
This work mostly consists of: - a C++ implementation of the ISO-19111:2018 / OGC Topic 2 "Referencing by coordinates" classes to represent Datums, Coordinate systems, CRSs (Coordinate Reference Systems) and Coordinate Operations. - methods to convert between this C++ modeling and WKT1, WKT2 and PROJ string representations of those objects - management and query of a SQLite3 database of CRS and Coordinate Operation definition - a C API binding part of those capabilities This is all-in-one squashed commit of the work of https://github.com/OSGeo/proj.4/pull/1040
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/build_db.py589
-rwxr-xr-xscripts/build_db_create_ignf.py527
-rwxr-xr-xscripts/build_db_create_ignf_from_xml.py1071
-rwxr-xr-xscripts/build_db_from_esri.py1339
-rw-r--r--scripts/build_esri_projection_mapping.py738
-rwxr-xr-xscripts/build_grid_alternatives_generated.py157
-rwxr-xr-xscripts/cppcheck.sh5
-rwxr-xr-xscripts/create_c_api_projections.py159
-rwxr-xr-xscripts/create_proj_symbol_rename.sh91
-rwxr-xr-xscripts/doxygen.sh60
-rwxr-xr-xscripts/filter_lcov_info.py26
-rwxr-xr-xscripts/fix_typos.sh2
-rwxr-xr-xscripts/gen_html_coverage.sh40
-rw-r--r--scripts/generate_breathe_friendly_general_doc.py130
-rwxr-xr-xscripts/reformat.sh17
-rwxr-xr-xscripts/reformat_cpp.sh22
16 files changed, 4969 insertions, 4 deletions
diff --git a/scripts/build_db.py b/scripts/build_db.py
new file mode 100755
index 00000000..2626f286
--- /dev/null
+++ b/scripts/build_db.py
@@ -0,0 +1,589 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Build SRS and coordinate transform database
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+import os
+import sqlite3
+
+EPSG_AUTHORITY = 'EPSG'
+
+def ingest_sqlite_dump(cursor, filename):
+ sql = ''
+ for line in open(filename, 'rt').readlines():
+ sql += line
+ if sqlite3.complete_statement(sql):
+ sql = sql.strip()
+ if sql != 'COMMIT;':
+ cursor.execute(sql)
+ sql = ''
+
+
+def ingest_epsg():
+
+ for f in ['PostgreSQL_Data_Script.sql', 'PostgreSQL_Table_Script.sql']:
+ if not os.path.exists(f):
+ raise Exception('Missing file: ' + f)
+
+ epsg_tmp_db_filename = 'tmp_epsg.db'
+
+ if os.path.exists(epsg_tmp_db_filename):
+ os.unlink(epsg_tmp_db_filename)
+
+ conn = sqlite3.connect(epsg_tmp_db_filename)
+ cursor = conn.cursor()
+ cursor.execute('PRAGMA journal_mode = OFF;')
+ ingest_sqlite_dump(cursor, 'PostgreSQL_Table_Script.sql')
+ ingest_sqlite_dump(cursor, 'PostgreSQL_Data_Script.sql')
+ cursor.close()
+ conn.commit()
+
+ return (conn, epsg_tmp_db_filename)
+
+
+def fill_unit_of_measure(proj_db_cursor):
+ proj_db_cursor.execute(
+ "INSERT INTO unit_of_measure SELECT ?, uom_code, unit_of_meas_name, unit_of_meas_type, factor_b / factor_c, deprecated FROM epsg.epsg_unitofmeasure", (EPSG_AUTHORITY,))
+
+
+def fill_ellipsoid(proj_db_cursor):
+ proj_db_cursor.execute(
+ "INSERT INTO ellipsoid SELECT ?, ellipsoid_code, ellipsoid_name, NULL, 'PROJ', 'EARTH', semi_major_axis, ?, uom_code, inv_flattening, semi_minor_axis, deprecated FROM epsg.epsg_ellipsoid", (EPSG_AUTHORITY, EPSG_AUTHORITY))
+
+
+def fill_area(proj_db_cursor):
+ proj_db_cursor.execute(
+ "INSERT INTO area SELECT ?, area_code, area_name, area_of_use, area_south_bound_lat, area_north_bound_lat, area_west_bound_lon, area_east_bound_lon, deprecated FROM epsg.epsg_area", (EPSG_AUTHORITY,))
+
+
+def fill_prime_meridian(proj_db_cursor):
+ proj_db_cursor.execute(
+ "INSERT INTO prime_meridian SELECT ?, prime_meridian_code, prime_meridian_name, greenwich_longitude, ?, uom_code, deprecated FROM epsg.epsg_primemeridian", (EPSG_AUTHORITY, EPSG_AUTHORITY))
+
+
+def fill_geodetic_datum(proj_db_cursor):
+ proj_db_cursor.execute(
+ "SELECT DISTINCT * FROM epsg.epsg_datum WHERE datum_type NOT IN ('geodetic', 'vertical', 'engineering')")
+ res = proj_db_cursor.fetchall()
+ if res:
+ raise Exception('Found unexpected datum_type in epsg_datum: %s' % str(res))
+
+ proj_db_cursor.execute(
+ "INSERT INTO geodetic_datum SELECT ?, datum_code, datum_name, NULL, NULL, ?, ellipsoid_code, ?, prime_meridian_code, ?, area_of_use_code, deprecated FROM epsg.epsg_datum WHERE datum_type = 'geodetic'", (EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY))
+
+
+def fill_vertical_datum(proj_db_cursor):
+ proj_db_cursor.execute(
+ "INSERT INTO vertical_datum SELECT ?, datum_code, datum_name, NULL, NULL, ?, area_of_use_code, deprecated FROM epsg.epsg_datum WHERE datum_type = 'vertical'", (EPSG_AUTHORITY,EPSG_AUTHORITY))
+
+
+def fill_coordinate_system(proj_db_cursor):
+ proj_db_cursor.execute(
+ "INSERT INTO coordinate_system SELECT ?, coord_sys_code, coord_sys_type, dimension FROM epsg.epsg_coordinatesystem", (EPSG_AUTHORITY,))
+
+
+def fill_axis(proj_db_cursor):
+ proj_db_cursor.execute("INSERT INTO axis SELECT ?, coord_axis_code, coord_axis_name, coord_axis_abbreviation, coord_axis_orientation, ?, coord_sys_code, coord_axis_order, ?, uom_code FROM epsg.epsg_coordinateaxis ca LEFT JOIN epsg.epsg_coordinateaxisname can ON ca.coord_axis_name_code = can.coord_axis_name_code", (EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY))
+
+
+def fill_geodetic_crs(proj_db_cursor):
+ proj_db_cursor.execute(
+ "SELECT DISTINCT * FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind NOT IN ('projected', 'geographic 2D', 'geographic 3D', 'geocentric', 'vertical', 'compound', 'engineering')")
+ res = proj_db_cursor.fetchall()
+ if res:
+ raise Exception('Found unexpected coord_ref_sys_kind in epsg_coordinatereferencesystem: %s' % str(res))
+
+ #proj_db_cursor.execute(
+ # "INSERT INTO crs SELECT ?, coord_ref_sys_code, coord_ref_sys_kind FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('geographic 2D', 'geographic 3D', 'geocentric') AND datum_code IS NOT NULL", (EPSG_AUTHORITY,))
+ proj_db_cursor.execute("INSERT INTO geodetic_crs SELECT ?, coord_ref_sys_code, coord_ref_sys_name, NULL, NULL, coord_ref_sys_kind, ?, coord_sys_code, ?, datum_code, ?, area_of_use_code, NULL, deprecated FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('geographic 2D', 'geographic 3D', 'geocentric') AND datum_code IS NOT NULL", (EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY))
+
+
+def fill_vertical_crs(proj_db_cursor):
+ #proj_db_cursor.execute(
+ # "INSERT INTO crs SELECT ?, coord_ref_sys_code, coord_ref_sys_kind FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('vertical') AND datum_code IS NOT NULL", (EPSG_AUTHORITY,))
+ proj_db_cursor.execute("INSERT INTO vertical_crs SELECT ?, coord_ref_sys_code, coord_ref_sys_name, NULL, NULL, ?, coord_sys_code, ?, datum_code, ?, area_of_use_code, deprecated FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('vertical') AND datum_code IS NOT NULL", (EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY))
+
+def fill_conversion(proj_db_cursor):
+
+ already_mapped_methods = set()
+ trigger_sql = """
+CREATE TRIGGER conversion_method_check_insert_trigger
+BEFORE INSERT ON conversion
+FOR EACH ROW BEGIN
+"""
+
+ proj_db_cursor.execute("SELECT coord_op_code, coord_op_name, area_of_use_code, coord_op_method_code, coord_op_method_name, epsg_coordoperation.deprecated FROM epsg.epsg_coordoperation LEFT JOIN epsg.epsg_coordoperationmethod USING (coord_op_method_code) WHERE coord_op_type = 'conversion' AND coord_op_name NOT LIKE '%to DMSH'")
+ for (code, name, area_of_use_code, method_code, method_name, deprecated) in proj_db_cursor.fetchall():
+ expected_order = 1
+ max_n_params = 7
+ param_auth_name = [None for i in range(max_n_params)]
+ param_code = [None for i in range(max_n_params)]
+ param_name = [None for i in range(max_n_params)]
+ param_value = [None for i in range(max_n_params)]
+ param_uom_auth_name = [None for i in range(max_n_params)]
+ param_uom_code = [None for i in range(max_n_params)]
+ param_uom_type = [None for i in range(max_n_params)]
+
+ iterator = proj_db_cursor.execute("SELECT sort_order, cop.parameter_code, parameter_name, parameter_value, uom_code, uom.unit_of_meas_type FROM epsg_coordoperationparam cop LEFT JOIN epsg_coordoperationparamvalue copv LEFT JOIN epsg_unitofmeasure uom USING (uom_code) LEFT JOIN epsg_coordoperationparamusage copu ON cop.parameter_code = copv.parameter_code AND copu.parameter_code = copv.parameter_code WHERE copu.coord_op_method_code = copv.coord_op_method_code AND coord_op_code = ? AND copv.coord_op_method_code = ? ORDER BY sort_order", (code, method_code))
+ for (order, parameter_code, parameter_name, parameter_value, uom_code, uom_type) in iterator:
+ # Modified Krovak and Krovak North Oriented: keep only the 7 first parameters
+ if order == max_n_params + 1 and method_code in (1042, 1043):
+ break
+ assert order <= max_n_params
+ assert order == expected_order
+ param_auth_name[order - 1] = EPSG_AUTHORITY
+ param_code[order - 1] = parameter_code
+ param_name[order - 1] = parameter_name
+ param_value[order - 1] = parameter_value
+ param_uom_auth_name[order - 1] = EPSG_AUTHORITY if uom_code else None
+ param_uom_code[order - 1] = uom_code
+ param_uom_type[order - 1] = uom_type
+ expected_order += 1
+
+ if method_code not in already_mapped_methods:
+ already_mapped_methods.add(method_code)
+ trigger_sql += """
+ SELECT RAISE(ABORT, 'insert on conversion violates constraint: bad parameters for %(method_name)s')
+ WHERE NEW.deprecated != 1 AND NEW.method_auth_name = 'EPSG' AND NEW.method_code = '%(method_code)s' AND (NEW.method_name != '%(method_name)s'""" % {'method_name': method_name, 'method_code' : method_code}
+ for i in range(expected_order-1):
+ trigger_sql += " OR NEW.param%(n)d_auth_name != 'EPSG' OR NEW.param%(n)d_code != '%(code)d' OR NEW.param%(n)d_name != '%(param_name)s'" % {'n': i+1, 'code': param_code[i], 'param_name': param_name[i]}
+
+ if method_name in ('Change of Vertical Unit'):
+ trigger_sql += " OR (NOT((NEW.param%(n)d_value IS NULL AND NEW.param%(n)d_uom_auth_name IS NULL AND NEW.param%(n)d_uom_code IS NULL) OR (NEW.param%(n)d_value IS NOT NULL AND (SELECT type FROM unit_of_measure WHERE auth_name = NEW.param%(n)s_uom_auth_name AND code = NEW.param%(n)s_uom_code) = 'scale')))" % {'n': i+1, 'param_name': param_name[i]}
+ else:
+ trigger_sql += " OR NEW.param%(n)d_value IS NULL OR NEW.param%(n)d_uom_auth_name IS NULL OR NEW.param%(n)d_uom_code IS NULL" % {'n': i+1, 'param_name': param_name[i]}
+
+ if param_uom_type[i]:
+ trigger_sql += " OR (SELECT type FROM unit_of_measure WHERE auth_name = NEW.param%(n)s_uom_auth_name AND code = NEW.param%(n)s_uom_code) != '%(uom_type)s'" % {'n': i+1, 'uom_type': param_uom_type[i]}
+ for i in range(expected_order-1, max_n_params):
+ trigger_sql += " OR NEW.param%(n)d_auth_name IS NOT NULL OR NEW.param%(n)d_code IS NOT NULL OR NEW.param%(n)d_name IS NOT NULL OR NEW.param%(n)d_value IS NOT NULL OR NEW.param%(n)d_uom_auth_name IS NOT NULL OR NEW.param%(n)d_uom_code IS NOT NULL" % {'n': i+1}
+ trigger_sql += ");\n"
+
+ arg = (EPSG_AUTHORITY, code, name,
+ None, None, # description + scope
+ EPSG_AUTHORITY, area_of_use_code,
+ EPSG_AUTHORITY, method_code, method_name,
+ param_auth_name[0], param_code[0], param_name[0],
+ param_value[0], param_uom_auth_name[0], param_uom_code[0],
+ param_auth_name[1], param_code[1], param_name[1], param_value[1],
+ param_uom_auth_name[1], param_uom_code[1], param_auth_name[2],
+ param_code[2], param_name[2], param_value[2],
+ param_uom_auth_name[2], param_uom_code[2],
+ param_auth_name[3], param_code[3], param_name[3], param_value[3],
+ param_uom_auth_name[3], param_uom_code[3], param_auth_name[4],
+ param_code[4], param_name[4], param_value[4],
+ param_uom_auth_name[4], param_uom_code[4], param_auth_name[5],
+ param_code[5], param_name[5], param_value[5],
+ param_uom_auth_name[5], param_uom_code[5], param_auth_name[6],
+ param_code[6], param_name[6], param_value[6],
+ param_uom_auth_name[6], param_uom_code[6],
+ deprecated)
+
+ #proj_db_cursor.execute("INSERT INTO coordinate_operation VALUES (?,?,'conversion')", (EPSG_AUTHORITY, code))
+ proj_db_cursor.execute('INSERT INTO conversion VALUES (' +
+ '?,?,?, ?,?, ?,?, ?,?,?, ?,?,?,?,?,?, ?,?,?,?,?,?, ?,?,?,?,?,?, ' +
+ '?,?,?,?,?,?, ?,?,?,?,?,?, ?,?,?,?,?,?, ?,?,?,?,?,?, ?)', arg)
+
+ trigger_sql += "END;";
+ #print(trigger_sql)
+ proj_db_cursor.execute(trigger_sql)
+
+
+def fill_projected_crs(proj_db_cursor):
+ #proj_db_cursor.execute(
+ # "INSERT INTO crs SELECT 'EPSG', coord_ref_sys_code, coord_ref_sys_kind FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('projected')")
+ #proj_db_cursor.execute("INSERT INTO projected_crs SELECT 'EPSG', coord_ref_sys_code, coord_ref_sys_name, 'EPSG', coord_sys_code, 'EPSG', source_geogcrs_code, 'EPSG', projection_conv_code, 'EPSG', area_of_use_code, deprecated FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('projected')")
+ proj_db_cursor.execute("SELECT ?, coord_ref_sys_code, coord_ref_sys_name, NULL, NULL, ?, coord_sys_code, ?, source_geogcrs_code, ?, projection_conv_code, ?, area_of_use_code, deprecated FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('projected')", (EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY))
+ for row in proj_db_cursor.fetchall():
+ (auth_name, code, name, description, scope, coordinate_system_auth_name, coordinate_system_code, geodetic_crs_auth_name, geodetic_crs_code, conversion_auth_name, conversion_code, area_of_use_auth_name, area_of_use_code, deprecated) = row
+ proj_db_cursor.execute("SELECT 1 FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_code = ? AND coord_ref_sys_kind IN ('geographic 2D', 'geographic 3D', 'geocentric')", (geodetic_crs_code,))
+ if proj_db_cursor.fetchone():
+ #proj_db_cursor.execute("INSERT INTO crs VALUES (?, ?, 'projected')", (EPSG_AUTHORITY, code))
+ proj_db_cursor.execute("INSERT INTO projected_crs VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,NULL,?)", row)
+
+def fill_compound_crs(proj_db_cursor):
+ #proj_db_cursor.execute(
+ # "INSERT INTO crs SELECT ?, coord_ref_sys_code, coord_ref_sys_kind FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('compound')", (EPSG_AUTHORITY,))
+ proj_db_cursor.execute("INSERT INTO compound_crs SELECT ?, coord_ref_sys_code, coord_ref_sys_name, NULL, NULL, ?, cmpd_horizcrs_code, ?, cmpd_vertcrs_code, ?, area_of_use_code, deprecated FROM epsg.epsg_coordinatereferencesystem WHERE coord_ref_sys_kind IN ('compound')", (EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY, EPSG_AUTHORITY))
+
+def fill_helmert_transformation(proj_db_cursor):
+ proj_db_cursor.execute("SELECT coord_op_code, coord_op_name, coord_op_method_code, coord_op_method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, epsg_coordoperation.deprecated FROM epsg.epsg_coordoperation LEFT JOIN epsg.epsg_coordoperationmethod USING (coord_op_method_code) WHERE coord_op_type = 'transformation' AND coord_op_method_code IN (1031, 1032, 1033, 1034, 1035, 1037, 1038, 1039, 1053, 1054, 1055, 1056, 1057, 1058, 1061, 1062, 1063, 1065, 1066, 9603, 9606, 9607, 9636) ")
+ for (code, name, method_code, method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, deprecated) in proj_db_cursor.fetchall():
+ expected_order = 1
+ max_n_params = 15
+ param_auth_name = [None for i in range(max_n_params)]
+ param_code = [None for i in range(max_n_params)]
+ param_name = [None for i in range(max_n_params)]
+ param_value = [None for i in range(max_n_params)]
+ param_uom_code = [None for i in range(max_n_params)]
+
+ iterator = proj_db_cursor.execute("SELECT sort_order, cop.parameter_code, parameter_name, parameter_value, uom_code from epsg_coordoperationparam cop LEFT JOIN epsg_coordoperationparamvalue copv LEFT JOIN epsg_coordoperationparamusage copu ON cop.parameter_code = copv.parameter_code AND copu.parameter_code = copv.parameter_code WHERE copu.coord_op_method_code = copv.coord_op_method_code AND coord_op_code = ? AND copv.coord_op_method_code = ? ORDER BY sort_order", (code, method_code))
+ for (order, parameter_code, parameter_name, parameter_value, uom_code) in iterator:
+ assert order <= max_n_params
+ assert order == expected_order
+ param_auth_name[order - 1] = EPSG_AUTHORITY
+ param_code[order - 1] = parameter_code
+ param_name[order - 1] = parameter_name
+ param_value[order - 1] = parameter_value
+ param_uom_code[order - 1] = uom_code
+ expected_order += 1
+ n_params = expected_order - 1
+
+ if param_value[0] is None and deprecated:
+ continue # silently discard non sense deprecated transforms (like EPSG:1076)
+
+ assert param_code[0] == 8605
+ assert param_code[1] == 8606
+ assert param_code[2] == 8607
+ assert param_uom_code[0] == param_uom_code[1]
+ assert param_uom_code[0] == param_uom_code[2]
+ px = None
+ py = None
+ pz = None
+ pivot_uom_code = None
+ if n_params > 3:
+ assert param_code[3] == 8608
+ assert param_code[4] == 8609
+ assert param_code[5] == 8610
+ assert param_code[6] == 8611
+ assert param_uom_code[3] == param_uom_code[4]
+ assert param_uom_code[3] == param_uom_code[5]
+ if n_params == 8: # Time-specific transformation
+ assert param_code[7] == 1049, (code, name, param_code[7])
+ param_value[14] = param_value[7]
+ param_uom_code[14] = param_uom_code[7]
+ param_value[7] = None
+ param_uom_code[7] = None
+
+ elif n_params == 10: # Molodensky-Badekas
+ assert param_code[7] == 8617, (code, name, param_code[7])
+ assert param_code[8] == 8618, (code, name, param_code[8])
+ assert param_code[9] == 8667, (code, name, param_code[9])
+ assert param_uom_code[7] == param_uom_code[8]
+ assert param_uom_code[7] == param_uom_code[9]
+ px = param_value[7]
+ py = param_value[8]
+ pz = param_value[9]
+ pivot_uom_code = param_uom_code[7]
+ param_value[7] = None
+ param_uom_code[7] = None
+ param_value[8] = None
+ param_uom_code[8] = None
+ param_value[9] = None
+ param_uom_code[9] = None
+
+ elif n_params > 7: # Time-dependant transformation
+ assert param_code[7] == 1040, (code, name, param_code[7])
+ assert param_code[8] == 1041
+ assert param_code[9] == 1042
+ assert param_code[10] == 1043
+ assert param_code[11] == 1044
+ assert param_code[12] == 1045
+ assert param_code[13] == 1046
+ assert param_code[14] == 1047
+ assert param_uom_code[7] == param_uom_code[8]
+ assert param_uom_code[7] == param_uom_code[9]
+ assert param_uom_code[10] == param_uom_code[11]
+ assert param_uom_code[10] == param_uom_code[12]
+
+ arg = (EPSG_AUTHORITY, code, name,
+ None, None, # description + scope
+ EPSG_AUTHORITY, method_code, method_name,
+ EPSG_AUTHORITY, source_crs_code,
+ EPSG_AUTHORITY, target_crs_code,
+ EPSG_AUTHORITY, area_of_use_code,
+ coord_op_accuracy,
+ param_value[0], param_value[1], param_value[2], EPSG_AUTHORITY, param_uom_code[0],
+ param_value[3], param_value[4], param_value[5], EPSG_AUTHORITY if param_uom_code[3] else None, param_uom_code[3],
+ param_value[6], EPSG_AUTHORITY if param_uom_code[6] else None, param_uom_code[6],
+ param_value[7], param_value[8], param_value[9], EPSG_AUTHORITY if param_uom_code[7] else None, param_uom_code[7],
+ param_value[10], param_value[11], param_value[12], EPSG_AUTHORITY if param_uom_code[10] else None, param_uom_code[10],
+ param_value[13], EPSG_AUTHORITY if param_uom_code[13] else None, param_uom_code[13],
+ param_value[14], EPSG_AUTHORITY if param_uom_code[14] else None, param_uom_code[14],
+ px, py, pz, EPSG_AUTHORITY if px else None, pivot_uom_code,
+ deprecated
+ )
+
+ #proj_db_cursor.execute("INSERT INTO coordinate_operation VALUES (?,?,'helmert_transformation')", (EPSG_AUTHORITY, code))
+ proj_db_cursor.execute('INSERT INTO helmert_transformation VALUES (' +
+ '?,?,?, ?,?, ?,?,?, ?,?, ?,?, ?,?, ?, ?,?,?,?,?, ?,?,?,?,?, ?,?,?, ?,?,?,?,?, ?,?,?,?,?, ?,?,?, ?,?,?, ?,?,?,?,?, ?)', arg)
+
+def fill_grid_transformation(proj_db_cursor):
+ proj_db_cursor.execute("SELECT coord_op_code, coord_op_name, coord_op_method_code, coord_op_method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, epsg_coordoperation.deprecated FROM epsg.epsg_coordoperation LEFT JOIN epsg.epsg_coordoperationmethod USING (coord_op_method_code) WHERE coord_op_type = 'transformation' AND (coord_op_method_name LIKE 'Geographic3D to%' OR coord_op_method_name LIKE 'Geog3D to%' OR coord_op_method_name LIKE 'Point motion by grid%' OR coord_op_method_name LIKE 'Vertical Offset by Grid Interpolation%' OR coord_op_method_name IN ('NADCON', 'NTv1', 'NTv2', 'VERTCON'))")
+ for (code, name, method_code, method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, deprecated) in proj_db_cursor.fetchall():
+ expected_order = 1
+ max_n_params = 2
+ param_auth_name = [None for i in range(max_n_params)]
+ param_code = [None for i in range(max_n_params)]
+ param_name = [None for i in range(max_n_params)]
+ param_value = [None for i in range(max_n_params)]
+ param_uom_code = [None for i in range(max_n_params)]
+
+ iterator = proj_db_cursor.execute("SELECT sort_order, cop.parameter_code, parameter_name, parameter_value, param_value_file_ref, uom_code from epsg_coordoperationparam cop LEFT JOIN epsg_coordoperationparamvalue copv LEFT JOIN epsg_coordoperationparamusage copu ON cop.parameter_code = copv.parameter_code AND copu.parameter_code = copv.parameter_code WHERE copu.coord_op_method_code = copv.coord_op_method_code AND coord_op_code = ? AND copv.coord_op_method_code = ? ORDER BY sort_order", (code, method_code))
+ for (order, parameter_code, parameter_name, parameter_value, param_value_file_ref, uom_code) in iterator:
+ assert order <= max_n_params
+ assert order == expected_order
+ if parameter_value is not None:
+ assert param_value_file_ref is None or len(param_value_file_ref) == 0, (order, parameter_code, parameter_name, parameter_value, param_value_file_ref, uom_code)
+ if param_value_file_ref is not None and len(param_value_file_ref) != 0:
+ assert parameter_value is None, (order, parameter_code, parameter_name, parameter_value, param_value_file_ref, uom_code)
+ param_auth_name[order - 1] = EPSG_AUTHORITY
+ param_code[order - 1] = parameter_code
+ param_name[order - 1] = parameter_name
+ param_value[order - 1] = parameter_value if parameter_value else param_value_file_ref
+ param_uom_code[order - 1] = uom_code
+ expected_order += 1
+ n_params = expected_order - 1
+
+ assert param_code[0] in (1050, 8656, 8657, 8666, 8732), (code, param_code[0])
+
+ grid2_param_auth_name = None
+ grid2_param_code = None
+ grid2_param_name = None
+ grid2_value = None
+ interpolation_crs_auth_name = None
+ interpolation_crs_code = None
+
+ if method_code == 9613: # NADCON
+ assert param_code[1] == 8658, param_code[1]
+ grid2_param_auth_name = EPSG_AUTHORITY
+ grid2_param_code = param_code[1]
+ grid2_param_name = param_name[1]
+ grid2_value = param_value[1]
+ elif method_code == 1071: # Vertical Offset by Grid Interpolation (NZLVD)
+ assert param_code[1] == 1048, param_code[1]
+ interpolation_crs_auth_name = EPSG_AUTHORITY
+ interpolation_crs_code = str(int(param_value[1])) # needed to avoid codes like XXXX.0
+ else:
+ assert n_params == 1, (code, method_code)
+
+
+ arg = (EPSG_AUTHORITY, code, name,
+ None, None, # description + scope
+ EPSG_AUTHORITY, method_code, method_name,
+ EPSG_AUTHORITY, source_crs_code,
+ EPSG_AUTHORITY, target_crs_code,
+ EPSG_AUTHORITY, area_of_use_code,
+ coord_op_accuracy,
+ EPSG_AUTHORITY, param_code[0], param_name[0], param_value[0],
+ grid2_param_auth_name, grid2_param_code, grid2_param_name, grid2_value,
+ interpolation_crs_auth_name, interpolation_crs_code,
+ deprecated
+ )
+
+ #proj_db_cursor.execute("INSERT INTO coordinate_operation VALUES (?,?,'grid_transformation')", (EPSG_AUTHORITY, code))
+ proj_db_cursor.execute('INSERT INTO grid_transformation VALUES (' +
+ '?,?,?, ?,?, ?,?,?, ?,?, ?,?, ?,?, ?, ?,?,?,?, ?,?,?,?, ?,?, ?)', arg)
+
+def fill_other_transformation(proj_db_cursor):
+ # 9601: Longitude rotation
+ # 9616: Vertical offset
+ # 9618: Geographic2D with Height offsets
+ # 9619: Geographic2D offsets
+ # 9624: Affine Parametric Transformation
+ # 9660: Geographic3D offsets
+ proj_db_cursor.execute("SELECT coord_op_code, coord_op_name, coord_op_method_code, coord_op_method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, epsg_coordoperation.deprecated FROM epsg.epsg_coordoperation LEFT JOIN epsg.epsg_coordoperationmethod USING (coord_op_method_code) WHERE coord_op_type = 'transformation' AND coord_op_method_code IN (9601, 9616, 9618, 9619, 9624, 9660)")
+ for (code, name, method_code, method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, deprecated) in proj_db_cursor.fetchall():
+ expected_order = 1
+ max_n_params = 7
+ param_auth_name = [None for i in range(max_n_params)]
+ param_code = [None for i in range(max_n_params)]
+ param_name = [None for i in range(max_n_params)]
+ param_value = [None for i in range(max_n_params)]
+ param_uom_auth_name = [None for i in range(max_n_params)]
+ param_uom_code = [None for i in range(max_n_params)]
+
+ iterator = proj_db_cursor.execute("SELECT sort_order, cop.parameter_code, parameter_name, parameter_value, uom_code from epsg_coordoperationparam cop LEFT JOIN epsg_coordoperationparamvalue copv LEFT JOIN epsg_coordoperationparamusage copu ON cop.parameter_code = copv.parameter_code AND copu.parameter_code = copv.parameter_code WHERE copu.coord_op_method_code = copv.coord_op_method_code AND coord_op_code = ? AND copv.coord_op_method_code = ? ORDER BY sort_order", (code, method_code))
+ for (order, parameter_code, parameter_name, parameter_value, uom_code) in iterator:
+ assert order <= max_n_params
+ assert order == expected_order
+ param_auth_name[order - 1] = EPSG_AUTHORITY
+ param_code[order - 1] = parameter_code
+ param_name[order - 1] = parameter_name
+ param_value[order - 1] = parameter_value
+ param_uom_auth_name[order - 1] = EPSG_AUTHORITY
+ param_uom_code[order - 1] = uom_code
+ expected_order += 1
+
+ arg = (EPSG_AUTHORITY, code, name,
+ None, None, # description + scope
+ EPSG_AUTHORITY, method_code, method_name,
+ EPSG_AUTHORITY, source_crs_code,
+ EPSG_AUTHORITY, target_crs_code,
+ EPSG_AUTHORITY, area_of_use_code,
+ coord_op_accuracy,
+ param_auth_name[0], param_code[0], param_name[0],
+ param_value[0], param_uom_auth_name[0], param_uom_code[0],
+ param_auth_name[1], param_code[1], param_name[1], param_value[1],
+ param_uom_auth_name[1], param_uom_code[1], param_auth_name[2],
+ param_code[2], param_name[2], param_value[2],
+ param_uom_auth_name[2], param_uom_code[2],
+ param_auth_name[3], param_code[3], param_name[3], param_value[3],
+ param_uom_auth_name[3], param_uom_code[3], param_auth_name[4],
+ param_code[4], param_name[4], param_value[4],
+ param_uom_auth_name[4], param_uom_code[4], param_auth_name[5],
+ param_code[5], param_name[5], param_value[5],
+ param_uom_auth_name[5], param_uom_code[5], param_auth_name[6],
+ param_code[6], param_name[6], param_value[6],
+ param_uom_auth_name[6], param_uom_code[6],
+ deprecated)
+
+ #proj_db_cursor.execute("INSERT INTO coordinate_operation VALUES (?,?,'other_transformation')", (EPSG_AUTHORITY, code))
+ proj_db_cursor.execute('INSERT INTO other_transformation VALUES (' +
+ '?,?,?, ?,?, ?,?,?, ?,?, ?,?, ?,?, ?, ?,?,?,?,?,?, ?,?,?,?,?,?, ?,?,?,?,?,?, ' +
+ '?,?,?,?,?,?, ?,?,?,?,?,?, ?,?,?,?,?,?, ?,?,?,?,?,?, ?)', arg)
+
+def fill_concatenated_operation(proj_db_cursor):
+ proj_db_cursor.execute("SELECT coord_op_code, coord_op_name, coord_op_method_code, coord_op_method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, epsg_coordoperation.deprecated FROM epsg.epsg_coordoperation LEFT JOIN epsg.epsg_coordoperationmethod USING (coord_op_method_code) WHERE coord_op_type = 'concatenated operation'")
+ for (code, name, method_code, method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, deprecated) in proj_db_cursor.fetchall():
+ expected_order = 1
+ max_n_params = 3
+ step_code = [None for i in range(max_n_params)]
+
+ iterator = proj_db_cursor.execute("SELECT op_path_step, single_operation_code FROM epsg_coordoperationpath WHERE concat_operation_code = ? ORDER BY op_path_step", (code,))
+ for (order, single_operation_code) in iterator:
+ assert order <= max_n_params
+ assert order == expected_order
+ step_code[order - 1] = single_operation_code
+ expected_order += 1
+ n_params = expected_order - 1
+ if n_params == 0: # For example http://www.epsg-registry.org//export.htm?gml=urn:ogc:def:coordinateOperation:EPSG::8658
+ continue
+ assert n_params in (2, 3), (code, n_params)
+
+ arg = (EPSG_AUTHORITY, code, name,
+ None, None, # description + scope
+ EPSG_AUTHORITY, source_crs_code,
+ EPSG_AUTHORITY, target_crs_code,
+ EPSG_AUTHORITY, area_of_use_code,
+ coord_op_accuracy,
+ EPSG_AUTHORITY, step_code[0],
+ EPSG_AUTHORITY, step_code[1],
+ EPSG_AUTHORITY if step_code[2] else None, step_code[2],
+ deprecated
+ )
+
+ proj_db_cursor.execute("SELECT 1 FROM coordinate_operation_with_conversion_view WHERE code = ?", (step_code[0],))
+ step1_exists = proj_db_cursor.fetchone() is not None
+
+ proj_db_cursor.execute("SELECT 1 FROM coordinate_operation_with_conversion_view WHERE code = ?", (step_code[1],))
+ step2_exists = proj_db_cursor.fetchone() is not None
+
+ step3_exists = True
+ if step_code[2]:
+ proj_db_cursor.execute("SELECT 1 FROM coordinate_operation_with_conversion_view WHERE code = ?", (step_code[2],))
+ step3_exists = proj_db_cursor.fetchone() is not None
+
+ if step1_exists and step2_exists and step3_exists:
+ #proj_db_cursor.execute("INSERT INTO coordinate_operation VALUES (?,?,'concatenated_operation')", (EPSG_AUTHORITY, code))
+ proj_db_cursor.execute('INSERT INTO concatenated_operation VALUES (' +
+ '?,?,?, ?,?, ?,?, ?,?, ?,?, ?, ?,?, ?,?, ?,?, ?)', arg)
+
+
+def report_non_imported_operations(proj_db_cursor):
+ proj_db_cursor.execute("SELECT coord_op_code, coord_op_type, coord_op_name, coord_op_method_code, coord_op_method_name, source_crs_code, target_crs_code, area_of_use_code, coord_op_accuracy, epsg_coordoperation.deprecated FROM epsg.epsg_coordoperation LEFT JOIN epsg.epsg_coordoperationmethod USING (coord_op_method_code) WHERE coord_op_code NOT IN (SELECT code FROM coordinate_operation_with_conversion_view)")
+ rows = []
+ first = True
+ for row in proj_db_cursor.fetchall():
+ if first:
+ print('Non imported coordinate_operation:')
+ first = False
+ print(' ' + str(row))
+ rows.append(row)
+ return rows
+
+epsg_db_conn, epsg_tmp_db_filename = ingest_epsg()
+
+script_dir_name = os.path.dirname(os.path.realpath(__file__))
+sql_dir_name = os.path.join(os.path.dirname(script_dir_name), 'data', 'sql')
+
+proj_db_filename = ':memory:'
+if os.path.exists(proj_db_filename):
+ os.unlink(proj_db_filename)
+proj_db_conn = sqlite3.connect(proj_db_filename)
+proj_db_cursor = proj_db_conn.cursor()
+proj_db_cursor.execute('PRAGMA foreign_keys = 1;')
+
+ingest_sqlite_dump(proj_db_cursor, os.path.join(sql_dir_name, 'proj_db_table_defs.sql'))
+proj_db_cursor.execute("ATTACH DATABASE '%s' AS epsg;" % epsg_tmp_db_filename)
+
+fill_unit_of_measure(proj_db_cursor)
+fill_ellipsoid(proj_db_cursor)
+fill_area(proj_db_cursor)
+fill_prime_meridian(proj_db_cursor)
+fill_geodetic_datum(proj_db_cursor)
+fill_vertical_datum(proj_db_cursor)
+fill_coordinate_system(proj_db_cursor)
+fill_axis(proj_db_cursor)
+fill_geodetic_crs(proj_db_cursor)
+fill_vertical_crs(proj_db_cursor)
+fill_conversion(proj_db_cursor)
+fill_projected_crs(proj_db_cursor)
+fill_compound_crs(proj_db_cursor)
+fill_helmert_transformation(proj_db_cursor)
+fill_grid_transformation(proj_db_cursor)
+fill_other_transformation(proj_db_cursor)
+fill_concatenated_operation(proj_db_cursor)
+non_imported_operations = report_non_imported_operations(proj_db_cursor)
+
+proj_db_cursor.close()
+proj_db_conn.commit()
+
+files = {}
+
+# Dump the generated database and split it one .sql file per table
+for line in proj_db_conn.iterdump():
+ if line.startswith('INSERT INTO "'):
+ table_name = line[len('INSERT INTO "'):]
+ table_name = table_name[0:table_name.find('"')]
+ if table_name in files:
+ f = files[table_name]
+ else:
+ f = open(os.path.join(sql_dir_name, table_name) + '.sql', 'wb')
+ f.write("--- This file has been generated by scripts/build_db.py. DO NOT EDIT !\n\n".encode('UTF-8'))
+ files[table_name] = f
+ f.write((line + '\n').encode('UTF-8'))
+ elif line.startswith('CREATE TRIGGER conversion_method_check_insert_trigger'):
+ table_name = 'conversion_triggers'
+ if table_name in files:
+ f = files[table_name]
+ else:
+ f = open(os.path.join(sql_dir_name, table_name) + '.sql', 'wb')
+ f.write("--- This file has been generated by scripts/build_db.py. DO NOT EDIT !\n\n".encode('UTF-8'))
+ files[table_name] = f
+ f.write((line + '\n').encode('UTF-8'))
+#f = files['coordinate_operation']
+#for row in non_imported_operations:
+# f.write(("--- Non imported: " + str(row) + '\n').encode('UTF-8'))
+del files
+
+proj_db_conn = None
+
+epsg_db_conn = None
+if os.path.exists(epsg_tmp_db_filename):
+ os.unlink(epsg_tmp_db_filename)
diff --git a/scripts/build_db_create_ignf.py b/scripts/build_db_create_ignf.py
new file mode 100755
index 00000000..97157ec5
--- /dev/null
+++ b/scripts/build_db_create_ignf.py
@@ -0,0 +1,527 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Build SRS and coordinate transform database
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+import copy
+import json
+import os
+import sqlite3
+import sys
+
+if len(sys.argv) != 3:
+ print('Usage: build_db_create_ignf.py path_to_IGNF_file proj.db')
+ sys.exit(1)
+
+def escape_literal(x):
+ return x.replace("'", "''")
+
+def find_ellipsoid(cursor, line, d):
+ for prec in (10,9,8,7,6):
+ cursor.execute('SELECT code, name FROM ellipsoid WHERE code != 1024 AND semi_major_axis = ? AND (inv_flattening = ? OR (abs(semi_major_axis / (semi_major_axis - semi_minor_axis)) - ?) < 1e-%d)' % prec, (float(d['a']), float(d['rf']), float(d['rf'])))
+ row = cursor.fetchone()
+ if prec > 6 and row is None:
+ continue
+ assert row, line
+ ellps_code, ellps_name = row
+ return ellps_code, ellps_name
+
+def find_geogcrs_code(line,d):
+ pm = None
+ if 'pm' in d:
+ pm = d['pm']
+ assert pm == '2.337229167', pm
+
+ key = { 'a' : d['a'], 'rf' : d['rf'], 'towgs84' : d['towgs84'], 'pm': pm }
+ key_geog_crs = json.dumps(key)
+ if 'W84' in code:
+ geogcrs_code = "WGS84G"
+ elif 'RGFG95' in code:
+ geogcrs_code = "RGFG95GEO"
+ elif 'ETRS89' in code:
+ geogcrs_code = "ETRS89GEO"
+ elif 'RGF93' in code:
+ geogcrs_code = "RGF93G"
+ elif 'LAMB93' in code:
+ geogcrs_code = "RGF93G"
+ elif 'MILLER' in code:
+ geogcrs_code = "WGS84G"
+ elif code.startswith('GEOPORTAL'):
+ geogcrs_code = 'RGF93G'
+ elif key_geog_crs not in map_geog_crs:
+ max_count_common = 0
+ max_common_code = None
+ for k in map_geog_crs:
+ temp = copy.copy(key)
+ temp['code'] = map_geog_crs[k]['code']
+ if temp == json.loads(k):
+ count_common = 0
+ while True:
+ if len(code) <= count_common:
+ break
+ if len(map_geog_crs[k]['code']) <= count_common:
+ break
+ if code[count_common] == map_geog_crs[k]['code'][count_common]:
+ count_common += 1
+ else:
+ break
+ if count_common > max_count_common:
+ max_count_common = count_common
+ max_common_code = map_geog_crs[k]['code']
+ assert max_count_common >= 4, (max_common_code, max_count_common, line)
+ geogcrs_code = max_common_code
+ else:
+ geogcrs_code = map_geog_crs[key_geog_crs]['code']
+ return geogcrs_code
+
+
+IGNF_file = sys.argv[1]
+proj_db = sys.argv[2]
+
+conn = sqlite3.connect(proj_db)
+cursor = conn.cursor()
+
+all_sql = []
+
+geocentric_crs_name_to_datum_code = {}
+map_geog_crs = {}
+
+for line in open(IGNF_file, 'rt').readlines():
+ line = line.strip('\n')
+ if line[0] == '#' or line.startswith('<metadata>'):
+ continue
+ assert line[0] == '<'
+ code = line[1:line.find('>')]
+ proj4_string = line[line.find('>')+1:]
+ #print(code, proj4_string)
+ tokens = proj4_string.split(' +')
+ assert tokens[0] == ''
+ tokens = tokens[1:]
+ d = {}
+ for token in tokens:
+ if token == 'no_defs <>' or token == 'wktext':
+ continue
+ pos_equal = token.find('=')
+ assert pos_equal > 0, line
+ key = token[0:pos_equal]
+ value = token[pos_equal+1:]
+ d[key] = value
+ assert 'proj' in d, line
+ assert 'title' in d, line
+ proj = d['proj']
+
+ assert 'a' in d
+ assert 'rf' in d
+ assert (not 'nadgrids' in d) or d['nadgrids'] in ('null', '@null', 'ntf_r93.gsb,null'), line
+ assert 'towgs84' in d
+
+ if proj == 'geocent':
+
+ ellps_code, ellps_name = find_ellipsoid(cursor, line, d)
+
+ datum_code = 'DATUM_' + code
+
+ geocentric_crs_name_to_datum_code[d['title']] = datum_code
+
+ sql = """INSERT INTO "geodetic_datum" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','%s','EPSG','8901','EPSG','1262',0);""" % (datum_code, escape_literal(d['title']), ellps_code)
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','geocentric');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('IGNF','%s','%s',NULL,NULL,'geocentric','EPSG','6500','IGNF','%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), datum_code)
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','IGNF_%s_TO_EPSG_4978','helmert_transformation');""" % (code)
+ #all_sql.append(sql)
+
+ towgs84 = d['towgs84'].split(',')
+ assert len(towgs84) in (3, 7), towgs84
+ x = towgs84[0]
+ y = towgs84[1]
+ z = towgs84[2]
+ if len(towgs84) == 3:
+ rx = 'NULL'
+ ry = 'NULL'
+ rz = 'NULL'
+ s = 'NULL'
+ r_uom_auth_name = 'NULL'
+ r_uom_code = 'NULL'
+ s_uom_auth_name = 'NULL'
+ s_uom_code = 'NULL'
+ method_code = "'1031'"
+ method_name = "'Geocentric translations (geocentric domain)'"
+ else:
+ rx = towgs84[3]
+ ry = towgs84[4]
+ rz = towgs84[5]
+ s = towgs84[6]
+ r_uom_auth_name = "'EPSG'"
+ r_uom_code = "'9104'"
+ s_uom_auth_name = "'EPSG'"
+ s_uom_code = "'9202'"
+ method_code = "'1033'"
+ method_name = "'Position Vector transformation (geocentric domain)'"
+
+ sql = """INSERT INTO "helmert_transformation" VALUES('IGNF','IGNF_%s_TO_EPSG_4978','%s to WGS 84',NULL,NULL,'EPSG',%s,%s,'IGNF','%s','EPSG','4978','EPSG','1262',NULL,%s,%s,%s,'EPSG','9001',%s,%s,%s,%s,%s,%s,%s, %s,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), method_code, method_name, code, x, y, z, rx, ry, rz, r_uom_auth_name, r_uom_code, s, s_uom_auth_name, s_uom_code)
+ all_sql.append(sql)
+
+ if 'nadgrids' in d and d['nadgrids'] == 'ntf_r93.gsb,null':
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','IGNF_%s_TO_EPSG_4978_GRID','grid_transformation');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "grid_transformation" VALUES('IGNF','IGNF_%s_TO_EPSG_4978_GRID','%s to WGS 84 (2)',NULL,NULL,'EPSG','9615','NTv2','IGNF','%s','EPSG','4978','EPSG','1262',NULL,'EPSG','8656','Latitude and longitude difference file','ntf_r93.gsb',NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'longlat':
+
+ pm = None
+ pm_code = "'8901'" # Greenwich
+ if 'pm' in d:
+ pm = d['pm']
+ assert pm == '2.337229167', pm
+ pm_code = "'8903'" # Paris
+
+ ellps_code, ellps_name = find_ellipsoid(cursor, line, d)
+
+ if d['title'] in geocentric_crs_name_to_datum_code:
+ datum_code = geocentric_crs_name_to_datum_code[d['title']]
+ else:
+ datum_code = 'DATUM_' + code
+ sql = """INSERT INTO "geodetic_datum" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','%s','EPSG',%s,'EPSG','1262',0);""" % (datum_code, escape_literal(d['title']), ellps_code, pm_code)
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','geographic 2D');""" % (code)
+ #all_sql.append(sql)
+
+ cs_code = "'6424'" # Long Lat deg
+ if 'grades' in d['title']:
+ cs_code = "'6425'" # Long Lat grad
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('IGNF','%s','%s',NULL,NULL,'geographic 2D','EPSG',%s,'IGNF','%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), cs_code, datum_code)
+ all_sql.append(sql)
+
+ key = { 'a' : d['a'], 'rf' : d['rf'], 'towgs84' : d['towgs84'], 'pm': pm }
+ if "+towgs84=0.0000,0.0000,0.0000 +a=6378137.0000 +rf=298.2572221010000" in line or code == 'WGS84G':
+ key['code'] = code
+ key_geog_crs = json.dumps(key)
+ assert key_geog_crs not in map_geog_crs, (line, map_geog_crs[key_geog_crs])
+ map_geog_crs[ key_geog_crs ] = { 'code': code, 'title': d['title'] }
+
+ if code == 'NTFP': # Grades
+ assert cs_code == "'6425'" # Long Lat grad
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','IGNF_NTFP_TO_IGNF_NTFG','other_transformation');"""
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "other_transformation" VALUES('IGNF','IGNF_NTFP_TO_IGNF_NTFG','Nouvelle Triangulation Francaise Paris grades to Nouvelle Triangulation Francaise Greenwich degres sexagesimaux',NULL,NULL,'EPSG','9601','Longitude rotation','IGNF','NTFP','IGNF','NTFG','EPSG','1262',0.0,'EPSG','8602','Longitude offset',2.5969213,'EPSG','9105',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);"""
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','IGNF_%s_TO_EPSG_4326','concatenated_operation');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "concatenated_operation" VALUES('IGNF','IGNF_NTFP_TO_EPSG_4326','Nouvelle Triangulation Francaise Paris grades to WGS 84',NULL,NULL,'IGNF','NTFP','EPSG','4326','EPSG','1262',NULL,'IGNF','IGNF_NTFP_TO_IGNF_NTFG','IGNF','IGNF_NTFG_TO_EPSG_4326',NULL,NULL,0);"""
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','IGNF_%s_TO_EPSG_4326_GRID','concatenated_operation');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "concatenated_operation" VALUES('IGNF','IGNF_NTFP_TO_EPSG_4326_GRID','Nouvelle Triangulation Francaise Paris grades to WGS 84 (2)',NULL,NULL,'IGNF','NTFP','EPSG','4326','EPSG','1262',NULL,'IGNF','IGNF_NTFP_TO_IGNF_NTFG','IGNF','IGNF_NTFG_TO_EPSG_4326_GRID',NULL,NULL,0);"""
+ all_sql.append(sql)
+
+ continue
+
+ assert cs_code == "'6424'" # Long Lat deg
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','IGNF_%s_TO_EPSG_4326','helmert_transformation');""" % (code)
+ #all_sql.append(sql)
+
+ towgs84 = d['towgs84'].split(',')
+ assert len(towgs84) in (3, 7), towgs84
+ x = towgs84[0]
+ y = towgs84[1]
+ z = towgs84[2]
+ if len(towgs84) == 3:
+ rx = 'NULL'
+ ry = 'NULL'
+ rz = 'NULL'
+ s = 'NULL'
+ r_uom_auth_name = 'NULL'
+ r_uom_code = 'NULL'
+ s_uom_auth_name = 'NULL'
+ s_uom_code = 'NULL'
+ method_code = "'9603'"
+ method_name = "'Geocentric translations (geog2D domain)'"
+
+ else:
+ rx = towgs84[3]
+ ry = towgs84[4]
+ rz = towgs84[5]
+ s = towgs84[6]
+ r_uom_auth_name = "'EPSG'"
+ r_uom_code = "'9104'"
+ s_uom_auth_name = "'EPSG'"
+ s_uom_code = "'9202'"
+ method_code = "'9606'"
+ method_name = "'Position Vector transformation (geog2D domain)'"
+
+ sql = """INSERT INTO "helmert_transformation" VALUES('IGNF','IGNF_%s_TO_EPSG_4326','%s to WGS 84',NULL,NULL,'EPSG',%s,%s,'IGNF','%s','EPSG','4326','EPSG','1262',NULL,%s,%s,%s,'EPSG','9001',%s,%s,%s,%s,%s,%s,%s, %s,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), method_code, method_name, code, x, y, z, rx, ry, rz, r_uom_auth_name, r_uom_code, s, s_uom_auth_name, s_uom_code)
+ all_sql.append(sql)
+
+ if 'nadgrids' in d and d['nadgrids'] == 'ntf_r93.gsb,null':
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','IGNF_%s_TO_EPSG_4326_GRID','grid_transformation');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "grid_transformation" VALUES('IGNF','IGNF_%s_TO_EPSG_4326_GRID','%s to WGS 84 (2)',NULL,NULL,'EPSG','9615','NTv2','IGNF','%s','EPSG','4326','EPSG','1262',NULL,'EPSG','8656','Latitude and longitude difference file','ntf_r93.gsb',NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'tmerc':
+
+ geogcrs_code = find_geogcrs_code(line,d)
+
+ assert 'x_0' in d
+ assert 'y_0' in d
+ assert 'k_0' in d
+ assert 'lat_0' in d
+ assert 'lon_0' in d
+ assert 'units' in d and d['units'] == 'm'
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','CONV_%s','conversion');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262','EPSG','9807','Transverse Mercator','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','4499','IGNF','%s','IGNF','CONV_%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), geogcrs_code, code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'laea':
+
+ geogcrs_code = find_geogcrs_code(line,d)
+
+ assert 'x_0' in d
+ assert 'y_0' in d
+ assert 'lat_0' in d
+ assert 'lon_0' in d
+ assert 'units' in d and d['units'] == 'm'
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','CONV_%s','conversion');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262','EPSG','9820','Lambert Azimuthal Equal Area','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), d['lat_0'], d['lon_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','4499','IGNF','%s','IGNF','CONV_%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), geogcrs_code, code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'lcc':
+
+ geogcrs_code = find_geogcrs_code(line,d)
+
+ assert 'x_0' in d
+ assert 'y_0' in d
+ assert 'lat_0' in d
+ assert 'lat_1' in d
+ assert 'k_0' in d or 'lat_2' in d, line
+ assert 'lon_0' in d
+ assert 'units' in d and d['units'] == 'm'
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','CONV_%s','conversion');""" % (code)
+ #all_sql.append(sql)
+
+ if 'lat_2' in d:
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262','EPSG','9802','Lambert Conic Conformal (2SP)','EPSG','8821','Latitude of false origin',%s,'EPSG','9102','EPSG','8822','Longitude of false origin',%s,'EPSG','9102','EPSG','8823','Latitude of 1st standard parallel',%s,'EPSG','9102','EPSG','8824','Latitude of 2nd standard parallel',%s,'EPSG','9102','EPSG','8826','Easting at false origin',%s,'EPSG','9001','EPSG','8827','Northing at false origin',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), d['lat_0'], d['lon_0'], d['lat_1'], d['lat_2'], d['x_0'], d['y_0'])
+ else:
+ assert d['lat_0'] == d['lat_1'], line
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262','EPSG','9801','Lambert Conic Conformal (1SP)','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','4499','IGNF','%s','IGNF','CONV_%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), geogcrs_code, code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'eqc':
+
+ geogcrs_code = find_geogcrs_code(line,d)
+
+ assert 'x_0' in d, line
+ assert 'y_0' in d, line
+ assert 'lat_0' in d, line
+ assert 'lat_ts' in d, line
+ assert 'lon_0' in d, line
+ assert 'units' in d and d['units'] == 'm', line
+ assert float(d['lat_0']) == 0, line
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','CONV_%s','conversion');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262','EPSG','1028','Equidistant Cylindrical','EPSG','8823','Latitude of 1st standard parallel',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), d['lat_ts'], d['lon_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','4499','IGNF','%s','IGNF','CONV_%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), geogcrs_code, code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'gstmerc':
+
+ geogcrs_code = find_geogcrs_code(line,d)
+
+ assert 'x_0' in d
+ assert 'y_0' in d
+ assert 'k_0' in d
+ assert 'lat_0' in d
+ assert 'lon_0' in d
+ assert 'units' in d and d['units'] == 'm'
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','CONV_%s','conversion');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262',NULL,NULL,'Gauss Schreiber Transverse Mercator','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','4499','IGNF','%s','IGNF','CONV_%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), geogcrs_code, code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'stere':
+
+ geogcrs_code = find_geogcrs_code(line,d)
+
+ assert 'x_0' in d
+ assert 'y_0' in d
+ if 'k' in d or 'k_0' in d:
+ print('k/k_0 ignored for ' + line)
+ assert 'lat_0' in d
+ assert float(d['lat_0']) == -90, line
+ assert 'lat_ts' in d
+ assert 'lon_0' in d
+ assert float(d['lon_0']) == 140, line
+ assert 'units' in d and d['units'] == 'm'
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','CONV_%s','conversion');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262','EPSG','9829','Polar Stereographic (variant B)','EPSG','8832','Latitude of standard parallel',%s,'EPSG','9102','EPSG','8833','Longitude of origin',%s,'EPSG','9102','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']), d['lat_ts'], d['lon_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (code)
+ #all_sql.append(sql)
+
+ # Hardcoding 1025 for coordinate system since the only instance uses it
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1025','IGNF','%s','IGNF','CONV_%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), geogcrs_code, code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ elif proj == 'mill':
+
+ geogcrs_code = find_geogcrs_code(line,d)
+
+ assert 'x_0' in d
+ assert 'y_0' in d
+ assert 'lon_0' in d
+ assert 'units' in d and d['units'] == 'm'
+ assert float(d['x_0']) == 0, d
+ assert float(d['y_0']) == 0, d
+ assert float(d['lon_0']) == 0, d
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','CONV_%s','conversion');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','CONV_%s','Conversion for %s',NULL,NULL,'EPSG','1262',NULL,NULL,'PROJ mill',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (code, escape_literal(d['title']))
+ all_sql.append(sql)
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (code)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','4499','IGNF','%s','IGNF','CONV_%s','EPSG','1262',NULL,0);""" % (code, escape_literal(d['title']), geogcrs_code, code)
+ all_sql.append(sql)
+
+ all_sql.append('')
+
+ else:
+ assert False, line
+
+
+all_sql.append("""INSERT INTO grid_alternatives(original_grid_name,
+ proj_grid_name,
+ proj_grid_format,
+ proj_method,
+ inverse_direction,
+ package_name,
+ url, direct_download, open_license, directory)
+ VALUES ('ntf_r93.gsb', -- as referenced by the IGNF registry
+ 'ntf_r93.gsb',
+ 'NTv2',
+ 'hgridshift',
+ 0,
+ 'proj-datumgrid',
+ NULL, NULL, NULL, NULL);
+""")
+
+#for k in map_geog_crs:
+# print k, map_geog_crs[k]
+
+script_dir_name = os.path.dirname(os.path.realpath(__file__))
+sql_dir_name = os.path.join(os.path.dirname(script_dir_name), 'data', 'sql')
+
+f = open(os.path.join(sql_dir_name, 'ignf') + '.sql', 'wb')
+f.write("--- This file has been generated by scripts/build_db_create_ignf.py from the PROJ4 IGNF definition file. DO NOT EDIT !\n\n".encode('UTF-8'))
+for sql in all_sql:
+ f.write((sql + '\n').encode('UTF-8'))
+f.close()
+
diff --git a/scripts/build_db_create_ignf_from_xml.py b/scripts/build_db_create_ignf_from_xml.py
new file mode 100755
index 00000000..a0ab0168
--- /dev/null
+++ b/scripts/build_db_create_ignf_from_xml.py
@@ -0,0 +1,1071 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Build SRS and coordinate transform database from IGNF registry
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+from lxml import etree
+import os
+import requests
+import sys
+
+url = "http://librairies.ign.fr/geoportail/resources/IGNF.xml"
+
+if len(sys.argv) not in (1, 2) or (len(sys.argv) == 2 and sys.argv[1].startswith('-')):
+ print('Usage: build_db_create_ignf.py [path_to_IGNF.xml]')
+ print('')
+ print('If local filename is not used, then %s is downloaded.' % url)
+ sys.exit(1)
+
+def escape_literal(x):
+ return x.replace("'", "''")
+
+def strip_ns_prefix(tree):
+ for element in tree.iter('*'):
+ element.tag = etree.QName(element).localname
+ for att in element.attrib:
+ newkey = att[att.find('}')+1:]
+ val = element.attrib[att]
+ del element.attrib[att]
+ element.attrib[newkey] = val
+ return tree
+
+all_sql = []
+all_sql_concat = [] # for concatenated_operation
+
+if len(sys.argv) == 1:
+ r = requests.get(url)
+ root = etree.fromstring(r.content)
+else:
+ IGNF_file = sys.argv[1]
+ tree = etree.parse(open(IGNF_file, 'rt'))
+ root = tree.getroot()
+
+root = strip_ns_prefix(root)
+
+# Retrieve and insert metada
+version = root.find('versionNumber').find('CharacterString').text
+date = root.find('versionDate').find('Date').text
+
+all_sql.append("""INSERT INTO "metadata" VALUES('IGNF.SOURCE', '%s');""" % escape_literal(url))
+all_sql.append("""INSERT INTO "metadata" VALUES('IGNF.VERSION', '%s');""" % version)
+all_sql.append("""INSERT INTO "metadata" VALUES('IGNF.DATE', '%s');""" % date)
+
+def get_epsg_code(txt):
+ assert ':EPSG:' in txt
+ return txt[txt.rfind(':')+1:]
+
+
+def ingest_ellipsoids(root, all_sql):
+ mapEllpsId = {}
+ for ellps in root.iter('ellipsoid'):
+ E = ellps.find('Ellipsoid')
+ id = E.attrib['id']
+ names = [name.text for name in E.iter('name')]
+ #print(id, names)
+ if len(names) == 2:
+ mapEllpsId[id] = ('EPSG', get_epsg_code(names[1]))
+ else:
+ assert len(names) == 1
+ assert E.find('secondDefiningParameter').find('SecondDefiningParameter').find('isSphere').text == 'sphere'
+ mapEllpsId[id] = ('IGNF', id)
+ all_sql.append("""INSERT INTO "ellipsoid" VALUES('IGNF','%s','%s',NULL,'PROJ', 'EARTH', %s,'EPSG','9001',0,NULL,0);""" % (id, names[0], E.find('semiMajorAxis').text))
+
+ return mapEllpsId
+
+
+def ingest_prime_meridians(root, all_sql):
+ mapPmId = {}
+ for pm in root.iter('primeMeridian'):
+ PM = pm.find('PrimeMeridian')
+ id = PM.attrib['id']
+ names = [name.text for name in PM.iter('name')]
+ #print(id, names)
+ assert len(names) == 2
+ mapPmId[id] = ('EPSG', get_epsg_code(names[1]))
+
+ return mapPmId
+
+def extract_id_from_href(txt):
+ assert '#' in txt
+ return txt[txt.rfind('#')+1:]
+
+def ingest_datums(root, all_sql, mapEllpsId, mapPmId):
+ mapDatumId = {}
+ invalidDatumId = set()
+ mapVerticalDatumId = {}
+
+ for datum in root.iter('datum'):
+ node = datum.find('GeodeticDatum')
+ if node is not None:
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ if len(names) == 2:
+ if id == 'REG0020002':
+ assert get_epsg_code(names[1]) == '6275'
+ print('Error in registry. it points to EPSG:6275 instead of EPSG:6807 for NTF Paris')
+ mapDatumId[id] = ('EPSG', '6807')
+ else:
+ mapDatumId[id] = ('EPSG', get_epsg_code(names[1]))
+ else:
+ assert len(names) == 1, names
+ pmNode = node.find('usesPrimeMeridian')
+ if pmNode is None or 'href' not in pmNode.attrib:
+ print('Invalid GeodeticDatum: ' + id)
+ invalidDatumId.add(id)
+ continue
+ pmCode = extract_id_from_href(pmNode.attrib['href'])
+ assert pmCode in mapPmId
+ ellpsCode = extract_id_from_href(node.find('usesEllipsoid').attrib['href'])
+ assert ellpsCode in mapEllpsId
+
+ # We sheat by using EPSG:1262 = World for area of use
+ sql = """INSERT INTO "geodetic_datum" VALUES('IGNF','%s','%s',NULL,NULL,'%s','%s','%s','%s','EPSG','1262',0);""" % (id, names[0], mapEllpsId[ellpsCode][0], mapEllpsId[ellpsCode][1], mapPmId[pmCode][0], mapPmId[pmCode][1])
+ all_sql.append(sql)
+
+ mapDatumId[id] = ('IGNF', id)
+ else:
+ node = datum.find('VerticalDatum')
+ if node is not None:
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+
+ sql = """INSERT INTO "vertical_datum" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262',0);"""% (id, names[0])
+ all_sql.append(sql)
+
+ mapVerticalDatumId[id] = ('IGNF', id)
+ else:
+ assert False
+
+ return mapDatumId, mapVerticalDatumId, invalidDatumId
+
+
+mapEllpsId = ingest_ellipsoids(root, all_sql)
+mapPmId = ingest_prime_meridians(root, all_sql)
+mapDatumId, mapVerticalDatumId, invalidDatumId = ingest_datums(root, all_sql, mapEllpsId, mapPmId)
+
+areaOfUseMap = {}
+
+def get_area_of_use(domainOfValidity):
+ extent = domainOfValidity.find('EX_Extent')
+ desc = extent.find('description').find('CharacterString').text
+ if desc is None:
+ return 'EPSG', '1262'
+
+ if desc in areaOfUseMap:
+ return areaOfUseMap[desc]
+
+ geographicElement = extent.find('geographicElement')
+ if geographicElement is None:
+ print('No geographicElement for area of use ' + desc)
+ return 'EPSG', '1262'
+
+ code = str(len(areaOfUseMap) + 1)
+ areaOfUseMap[desc] = ['IGNF', code ]
+ EX_GeographicBoundingBox = geographicElement.find('EX_GeographicBoundingBox')
+ south = EX_GeographicBoundingBox.find('southBoundLatitude').find('Decimal').text
+ west = EX_GeographicBoundingBox.find('westBoundLongitude').find('Decimal').text
+ north = EX_GeographicBoundingBox.find('northBoundLatitude').find('Decimal').text
+ east = EX_GeographicBoundingBox.find('eastBoundLongitude').find('Decimal').text
+ all_sql.append("""INSERT INTO "area" VALUES('IGNF','%s','%s','%s',%s,%s,%s,%s,0);""" % (code, escape_literal(desc), escape_literal(desc), south, north, west, east))
+ return areaOfUseMap[desc]
+
+mapCrsId = {}
+mapGeocentricId = {}
+
+# This is a trick to find a GeocentricCRS and its related GeographicCRS
+# We could use the name, but if we use the datum code + area of use, it is
+# more reliable
+# We need this since the registry only exposes GeocentricCRS <--> GeocentricCRS
+# transformations, and we need to port them to GeographicCRS as well
+mapDatumAndAreaToGeocentricId = {}
+mapGeocentricIdToDatumAndArea = {}
+
+aliasOfCRS = {}
+
+for node in root.iterfind('.//GeocentricCRS'):
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ name = names[0]
+
+ cartesianCS = extract_id_from_href(node.find('usesCartesianCS').attrib['href'])
+ assert cartesianCS == 'TYP_CRG10'
+ datumCode = extract_id_from_href(node.find('usesGeodeticDatum').attrib['href'])
+ if datumCode in invalidDatumId:
+ print('Skipping GeocentricCRS %s since its datum is unknown' % id)
+ continue
+ assert datumCode in mapDatumId, (id, name, datumCode)
+
+ area_of_use = get_area_of_use(node.find('domainOfValidity'))
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','geocentric');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('IGNF','%s','%s',NULL,NULL,'geocentric','EPSG','6500','%s','%s','%s','%s',NULL,0);""" % (id, name, mapDatumId[datumCode][0], mapDatumId[datumCode][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ mapCrsId[id] = ('IGNF', id)
+ mapGeocentricId[id] = ('IGNF', id)
+
+ if len(names) >= 2 and names[1].startswith('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):
+ alias = names[1][len('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):]
+ aliasOfCRS[id] = [('IGNF', alias)]
+
+ if id == 'WGS84':
+ aliasOfCRS[id].append(('EPSG', '4978'))
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','geocentric'); -- alias of %s""" % (alias, id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('IGNF','%s','%s',NULL,NULL,'geocentric','EPSG','6500','%s','%s','%s','%s',NULL,0);""" % (alias, name, mapDatumId[datumCode][0], mapDatumId[datumCode][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ key = str((mapDatumId[datumCode], area_of_use))
+ assert key not in mapDatumAndAreaToGeocentricId, (id, name)
+ mapDatumAndAreaToGeocentricId[key] = ('IGNF', id)
+ mapGeocentricIdToDatumAndArea[id] = key
+
+mapGeographicId = {}
+mapDatumAndAreaToGeographicId = {}
+
+for node in root.iterfind('.//GeographicCRS'):
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ name = names[0]
+
+ ellipsoidalCS = extract_id_from_href(node.find('usesEllipsoidalCS').attrib['href'])
+ assert ellipsoidalCS in ('TYP_CRG24', 'TYP_CRG26', 'TYP_CRG22', 'TYP_CRG28', 'TYP_CRG29'), (id, name, ellipsoidalCS)
+ datumCode = extract_id_from_href(node.find('usesGeodeticDatum').attrib['href'])
+ if datumCode in invalidDatumId:
+ print('Skipping GeographicCRS %s since its datum is unknown' % id)
+ continue
+ assert datumCode in mapDatumId, (id, name, datumCode)
+
+ area_of_use = get_area_of_use(node.find('domainOfValidity'))
+
+ csCode = None
+ type = 'geographic 2D'
+ if ellipsoidalCS in ('TYP_CRG24', 'TYP_CRG28'): # Long, Lat deg
+ csCode = '6424'
+ if ellipsoidalCS in ('TYP_CRG26', 'TYP_CRG29'): # Long, Lat deg, h m
+ csCode = '6426'
+ type = 'geographic 3D'
+ if ellipsoidalCS == 'TYP_CRG22': # Long, Lat grad
+ csCode = '6425'
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','%s');""" % (id, type)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('IGNF','%s','%s',NULL,NULL,'%s','EPSG','%s','%s','%s','%s','%s',NULL,0);""" % (id, name, type, csCode, mapDatumId[datumCode][0], mapDatumId[datumCode][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ if id == 'WGS84G':
+ aliasOfCRS[id] = [('EPSG','4326')]
+
+ if len(names) >= 2 and names[1].startswith('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):
+ alias = names[1][len('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):]
+ assert id != 'WGS84G'
+ aliasOfCRS[id] = [('IGNF', alias)]
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','%s'); -- alias of %s""" % (alias, type, id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('IGNF','%s','%s',NULL,NULL,'%s','EPSG','%s','%s','%s','%s','%s',NULL,0);""" % (alias, name, type, csCode, mapDatumId[datumCode][0], mapDatumId[datumCode][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ mapCrsId[id] = ('IGNF', id)
+ mapGeographicId[id] = ('IGNF', id)
+ key = str((mapDatumId[datumCode], area_of_use))
+ if key in mapDatumAndAreaToGeographicId:
+ #print('Adding ' + id + ' to ' + str(mapDatumAndAreaToGeographicId[key]))
+ mapDatumAndAreaToGeographicId[key].append(id)
+ else:
+ mapDatumAndAreaToGeographicId[key] = [id]
+
+
+ # Create a 2D version to be able to create compoundCRS with it
+ if id == 'RGWF96GEO':
+
+ id = 'RGWF96G'
+ csCode = '6424'
+ type = 'geographic 2D'
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','%s');""" % (id, type)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('IGNF','%s','%s',NULL,NULL,'%s','EPSG','%s','%s','%s','%s','%s',NULL,0);""" % (id, name, type, csCode, mapDatumId[datumCode][0], mapDatumId[datumCode][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ mapCrsId[id] = ('IGNF', id)
+ mapGeographicId[id] = ('IGNF', id)
+ key = str((mapDatumId[datumCode], area_of_use))
+ if key in mapDatumAndAreaToGeographicId:
+ #print('Adding ' + id + ' to ' + str(mapDatumAndAreaToGeographicId[key]))
+ mapDatumAndAreaToGeographicId[key].append(id)
+ else:
+ mapDatumAndAreaToGeographicId[key] = [id]
+
+
+mapVerticalCrsId = {}
+for node in root.iterfind('.//VerticalCRS'):
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ name = names[0]
+
+ verticalCS = extract_id_from_href(node.find('usesVerticalCS').attrib['href'])
+ assert verticalCS in ('TYP_CRG92','TYP_CRG91'), verticalCS
+ datumCode = extract_id_from_href(node.find('usesVerticalDatum').attrib['href'])
+ assert datumCode in mapVerticalDatumId, (id, name, datumCode)
+
+ # VerticalCRS and GeocentricCRS can have same IDs ! like 'STPM50'
+ id_modified = id
+ if id in mapCrsId:
+ print('VerticalCRS %s conflicts with a Geodetic one of same name. Appending _V for disambiguation'% id)
+ id_modified += '_V'
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','vertical');""" % (id_modified)
+ #all_sql.append(sql)
+
+ area_of_use = get_area_of_use(node.find('domainOfValidity'))
+
+ sql = """INSERT INTO "vertical_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','6499','%s','%s','%s','%s',0);""" % (id_modified, name, mapVerticalDatumId[datumCode][0], mapVerticalDatumId[datumCode][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ if len(names) >= 2 and names[1].startswith('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):
+ assert False
+
+ mapCrsId[id] = ('IGNF', id_modified)
+ mapVerticalCrsId[id] = ('IGNF', id_modified)
+
+
+mapGridURLs = {
+ # France metropole
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/RAF09.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/metropole/RAF09.mnt',
+
+ # Corse
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/RAC09.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/metropole/RAC09.mnt',
+
+
+
+ # Guadeloupe RGAF09
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_gtbt.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RAGTBT2016.mnt',
+
+ 'RAGTBT2016.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RAGTBT2016.mnt',
+
+ # Les Saintes RGAF09
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_ls.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RALS2016.mnt',
+
+ 'RALS2016.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RALS2016.mnt',
+
+ # Martinique RGAF09
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_mart.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RAMART2016.mnt',
+
+ 'RAMART2016.MNT':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RAMART2016.mnt',
+
+ # Marie Galante RGAF09
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_mg.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RAMG2016.mnt',
+
+ 'RAMG2016.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RAMG2016.mnt',
+
+ # Saint Barthelemy RGAF09
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_sb.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_sbv2.mnt',
+
+ 'gg10_sbv2.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_sbv2.mnt',
+
+ # Saint Martin RGAF09
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_sm.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_smv2.mnt',
+
+ 'gg10_smv2.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_smv2.mnt',
+
+ # La Desirade RGAF09
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/gg10_ld.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RALD2016.mnt',
+
+ 'RALD2016.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RALD2016.mnt',
+
+
+ # Guadeloupe WGS84
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00.txt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00v2.mnt',
+
+ # Les Saintes WGS84
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_ls.txt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_lsv2.mnt',
+
+ 'ggg00_lsv2.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_lsv2.mnt',
+
+ # Martinique WGS84
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggm00.txt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggm00v2.mnt',
+
+ # Saint Barthelemy WGS84
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_sb.txt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_sbv2.mnt',
+
+ # Saint Martin WGS84
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_sm.txt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_smv2.mnt',
+
+ # La Desirade WGS84
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggg00_ld.txt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RALDW842016.mnt',
+
+ 'RALDW842016.mnt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RALDW842016.mnt',
+
+
+ # Guyane RGF95
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggguy00.txt':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/ggguy15.mnt',
+
+
+ # Reunion grille RAR
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/RAR07_bl.gra':
+ 'http://geodesie.ign.fr/contenu/fichiers/documentation/grilles/outremer/RAR07_bl.gra',
+}
+
+for node in root.iterfind('.//Transformation'):
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ name = names[0]
+
+ sourceCRS = extract_id_from_href(node.find('sourceCRS').attrib['href'])
+ if not sourceCRS in mapCrsId:
+ print('Skipping ' + name + ', missing sourceCRS')
+ continue
+
+ targetCRS = node.find('targetCRS')
+ if targetCRS is None or 'href' not in targetCRS.attrib:
+ print('Skipping ' + name + ', missing targetCRS')
+ continue
+ targetCRS = extract_id_from_href(targetCRS.attrib['href'])
+ if not targetCRS in mapCrsId:
+ print('Skipping ' + name + ', missing targetCRS')
+ continue
+
+ area_of_use = get_area_of_use(node.find('domainOfValidity'))
+
+ usesMethod = extract_id_from_href(node.find('usesMethod').attrib['href'])
+ if usesMethod in ('Geographic3DtoGravityRelatedHeight_IGN'):
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','grid_transformation');""" % id
+ #all_sql.append(sql)
+
+ usesValue = node.find('usesValue')
+ paramValue = usesValue.find('ParameterValue')
+ filename = paramValue.find('valueFile').text
+
+ if filename in mapGridURLs:
+ print('Fixing URL of ' + filename + ' to ' + mapGridURLs[filename])
+ filename = mapGridURLs[filename]
+
+ r = requests.head(filename, allow_redirects = True )
+ if r.status_code not in (200, 302):
+ assert False, (r.status_code, id, name, filename)
+
+ assert sourceCRS in mapVerticalCrsId, (id, name, sourceCRS)
+ assert targetCRS in mapGeographicId, (id, name, targetCRS)
+
+ # Switching source and target to be consistent with the EPSG practice and the naming of the method
+ sql = """INSERT INTO "grid_transformation" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','9664','Geographic3D to GravityRelatedHeight (IGN1997)','%s','%s','%s','%s','%s','%s',NULL,'EPSG','8666','Geoid (height correction) model file','%s',NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, mapCrsId[targetCRS][0], mapCrsId[targetCRS][1], mapCrsId[sourceCRS][0], mapCrsId[sourceCRS][1], area_of_use[0], area_of_use[1], filename)
+ all_sql.append(sql)
+
+ continue
+
+
+ def get_alias_of(code):
+ if code in aliasOfCRS:
+ return [ ('IGNF', code) ] + aliasOfCRS[code]
+ return [ ('IGNF', code) ]
+
+
+
+ if id == 'TSG1240': # 'NTF geographiques Paris (gr) vers NTF GEOGRAPHIQUES GREENWICH (DMS)', 'from1Dto1D')
+ #print('Skipping ' + str((id, name)))
+ assert usesMethod == 'from1Dto1D', usesMethod
+
+ for src in get_alias_of(sourceCRS):
+ for target in get_alias_of(targetCRS):
+
+ custom_id = id
+ if not ((src == ('IGNF', sourceCRS) and target == ('IGNF', targetCRS))):
+ custom_id = id + '_' + src[0] + '_' + src[1] + '_TO_' + target[0] + '_' + target[1]
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','other_transformation');""" % custom_id
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "other_transformation" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','9601','Longitude rotation','%s','%s','%s','%s','%s','%s',0.0,'EPSG','8602','Longitude offset',2.5969213,'EPSG','9105',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);"""% (custom_id, name, src[0], src[1], target[0], target[1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ continue
+
+ if usesMethod == 'TSGM510': # geocentric interpolation
+
+ id = 'NTFG_TO_RGF93G'
+
+ assert sourceCRS == 'NTF'
+ assert targetCRS == 'RGF93'
+
+ sourceCRS = 'NTFG'
+ targetCRS = 'RGF93G'
+
+ for src in get_alias_of(sourceCRS):
+ # As the transformation from RGF93G to WGS84 is a zero-translation helmert,
+ # we can also use the grid for NTF->WGS84. This makes the coordinate
+ # operation finder happier
+ for target in get_alias_of(targetCRS) + [('EPSG','4326')]:
+
+ custom_id = id
+ if not ((src == ('IGNF', sourceCRS) and target == ('IGNF', targetCRS))):
+ custom_id = src[0] + '_' + src[1] + '_TO_' + target[0] + '_' + target[1]
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','grid_transformation');""" % (custom_id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "grid_transformation" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','9615','NTv2','%s','%s','%s','%s','%s','%s',NULL,'EPSG','8656','Latitude and longitude difference file','ntf_r93.gsb',NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (custom_id, name, src[0], src[1], target[0], target[1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ continue
+
+ if usesMethod == 'Vfrom1Dto1D':
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','other_transformation');""" % id
+ #all_sql.append(sql)
+
+ usesValue = node.find('usesValue')
+ paramValue = usesValue.find('ParameterValue')
+ value = paramValue.find('value').text
+ uom = paramValue.find('value').attrib['uom']
+ assert uom == 'm'
+
+ sql = """INSERT INTO "other_transformation" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','9616','Vertical Offset','%s','%s','%s','%s','%s','%s',NULL,'EPSG','8603','Vertical Offset',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);"""% (id, name, mapCrsId[sourceCRS][0], mapCrsId[sourceCRS][1], mapCrsId[targetCRS][0], mapCrsId[targetCRS][1], area_of_use[0], area_of_use[1], value)
+ all_sql.append(sql)
+
+ continue
+
+ assert usesMethod in ('TSGM120', 'TSGM110', 'TSGM112', 'TSGM111'), (id, name, usesMethod)
+
+ assert sourceCRS in mapGeocentricId
+ assert targetCRS in mapGeocentricId
+
+ vals =[val for val in node.iterfind('usesValue')]
+ assert len(vals) in (3,7)
+ x = vals[0].find('ParameterValue').find('value').text
+ assert vals[0].find('ParameterValue').find('value').attrib['uom'] == 'm'
+ y = vals[1].find('ParameterValue').find('value').text
+ assert vals[1].find('ParameterValue').find('value').attrib['uom'] == 'm'
+ z = vals[2].find('ParameterValue').find('value').text
+ assert vals[2].find('ParameterValue').find('value').attrib['uom'] == 'm'
+
+ if len(vals) == 3:
+ rx = 'NULL'
+ ry = 'NULL'
+ rz = 'NULL'
+ s = 'NULL'
+ r_uom_auth_name = 'NULL'
+ r_uom_code = 'NULL'
+ s_uom_auth_name = 'NULL'
+ s_uom_code = 'NULL'
+
+ method_code = "'1031'"
+ method_name = "'Geocentric translations (geocentric domain)'"
+
+ method_geog_code = "'9603'"
+ method_geog_name = "'Geocentric translations (geog2D domain)'"
+
+ else:
+ s = vals[3].find('ParameterValue').find('value').text
+ assert vals[3].find('ParameterValue').find('value').attrib['uom'] == 'UNITE'
+
+ rx = vals[4].find('ParameterValue').find('value').text
+ assert vals[4].find('ParameterValue').find('value').attrib['uom'] == 'sec'
+
+ ry = vals[5].find('ParameterValue').find('value').text
+ assert vals[5].find('ParameterValue').find('value').attrib['uom'] == 'sec'
+
+ rz = vals[6].find('ParameterValue').find('value').text
+ assert vals[6].find('ParameterValue').find('value').attrib['uom'] == 'sec'
+
+ r_uom_auth_name = "'EPSG'"
+ r_uom_code = "'9104'"
+ s_uom_auth_name = "'EPSG'"
+ s_uom_code = "'9202'"
+
+ method_code = "'1033'"
+ method_name = "'Position Vector transformation (geocentric domain)'"
+
+ method_geog_code = "'9606'"
+ method_geog_name = "'Position Vector transformation (geog2D domain)'"
+
+ for src in get_alias_of(sourceCRS):
+ for target in get_alias_of(targetCRS):
+
+ custom_id = id
+ if not ((src == ('IGNF', sourceCRS) and target == ('IGNF', targetCRS))):
+ custom_id += '_' + src[1] + '_' + target[1]
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','helmert_transformation');""" % (custom_id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "helmert_transformation" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG',%s,%s,'%s','%s','%s','%s','%s','%s',NULL,%s,%s,%s,'EPSG','9001',%s,%s,%s,%s,%s,%s,%s, %s,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (custom_id, name, method_code, method_name, src[0], src[1], target[0], target[1], area_of_use[0], area_of_use[1], x, y, z, rx, ry, rz, r_uom_auth_name, r_uom_code, s, s_uom_auth_name, s_uom_code)
+ all_sql.append(sql)
+
+
+ key = mapGeocentricIdToDatumAndArea[sourceCRS]
+ assert key in mapDatumAndAreaToGeographicId
+ sourceGeogIdAr = mapDatumAndAreaToGeographicId[key]
+
+ key = mapGeocentricIdToDatumAndArea[targetCRS]
+ assert key in mapDatumAndAreaToGeographicId
+ targetGeogIdAr = mapDatumAndAreaToGeographicId[key]
+
+ for sourceGeogId in sourceGeogIdAr:
+ for targetGeogId in targetGeogIdAr:
+
+ for src in get_alias_of(sourceGeogId):
+ for target in get_alias_of(targetGeogId):
+
+ id_geog = id + '_' + src[1] + '_TO_' + target[1]
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','helmert_transformation');""" % (id_geog)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "helmert_transformation" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG',%s,%s,'%s','%s','%s','%s','%s','%s',NULL,%s,%s,%s,'EPSG','9001',%s,%s,%s,%s,%s,%s,%s, %s,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id_geog, name, method_geog_code, method_geog_name, src[0], src[1], target[0], target[1], area_of_use[0], area_of_use[1], x, y, z, rx, ry, rz, r_uom_auth_name, r_uom_code, s, s_uom_auth_name, s_uom_code)
+ all_sql.append(sql)
+
+ if src[1] == 'NTFG':
+
+ for NTFPalias, idFirstOp in (('NTFPGRAD', 'TSG1240'), ('NTFP', 'TSG1240_IGNF_NTFP_TO_IGNF_NTFG')):
+
+ id_concat = id + '_' + NTFPalias + '_TO_' + target[1]
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','concatenated_operation');""" % (id_concat)
+ #all_sql_concat.append(sql)
+
+ sql = """INSERT INTO "concatenated_operation" VALUES('IGNF','%s','Nouvelle Triangulation Francaise Paris grades to %s',NULL,NULL,'IGNF','%s','%s','%s','%s','%s',NULL,'IGNF','%s','IGNF','%s',NULL,NULL,0);""" % (id_concat, target[1], NTFPalias, target[0], target[1], area_of_use[0], area_of_use[1], idFirstOp, id_geog)
+ all_sql_concat.append(sql)
+
+
+mapConversionId = {}
+
+
+def getParameter(node, code, expected_uom):
+ for val in node.iterfind('usesValue'):
+ parameter_code = extract_id_from_href(val.find('ParameterValue').find('valueOfParameter').attrib['href'])
+ if parameter_code == code:
+
+ dms = val.find('ParameterValue').find('dmsAngleValue')
+ if expected_uom == 'deg' and dms is not None:
+ deg_val = float(dms.find('degrees').text)
+ direction_deg = dms.find('degrees').attrib['direction']
+ assert direction_deg in ('E', 'W', 'S', 'N')
+ min_val = float(dms.find('minutes').text)
+ if dms.find('secondes') is not None:
+ sec_val = float(dms.find('secondes').text)
+ else:
+ sec_val = 0
+
+ ret_val = deg_val + min_val / 60.0 + sec_val / 3600.0
+ if direction_deg in ('W', 'S'):
+ ret_val = -ret_val
+ return ret_val
+
+ assert val.find('ParameterValue').find('value').attrib['uom'] == expected_uom
+ return float(val.find('ParameterValue').find('value').text)
+ raise Exception('cannot find value for parameter ' + code)
+
+for node in root.iterfind('.//Conversion'):
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ name = names[0]
+ #print(id, name)
+
+ reuse_epsg_conversion = False ###
+ if reuse_epsg_conversion and len(names) == 2:
+
+ if id == 'PRC9601581': # PSEUDO MERCATOR (POPULAR VISUALISATION)
+ assert get_epsg_code(names[1]) == '3857' # this is wrong, this is the projectedCRS code, note the conversion one
+ mapConversionId[id] = ('EPSG', '3856')
+ else:
+ mapConversionId[id] = ('EPSG', get_epsg_code(names[1]))
+ continue
+
+ usesMethod = extract_id_from_href(node.find('usesMethod').attrib['href'])
+
+ d = {}
+
+ if usesMethod == 'PVPM001From2Dto2D': # Popular Visualisation Pseudo-Mercator
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 4
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ assert d['x_0'] == 0
+ assert d['y_0'] == 0
+ assert d['lon_0'] == 0
+ assert d['lat_0'] == 0
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','1024','Popular Visualisation Pseudo Mercator','EPSG','8801','Latitude of natural origin',0.0,'EPSG','9102','EPSG','8802','Longitude of natural origin',0.0,'EPSG','9102','EPSG','8806','False easting',0.0,'EPSG','9001','EPSG','8807','False northing',0.0,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name)
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod == 'EQRC001from2Dto2D': # Equirectangular
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ d['lat_ts'] = getParameter(node, 'PRCP600', 'deg')
+ assert d['lat_0'] == 0, (id, name, d)
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','1028','Equidistant Cylindrical','EPSG','8823','Latitude of 1st standard parallel',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_ts'], d['lon_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod in ('PRCM030from2Dto2D', 'PRCM020from2Dto2D', 'PRCM040from2Dto2D', 'PRCM030from3Dto2D'): # Transverse Mercator
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ d['k_0'] = getParameter(node, 'PRCP500', 'UNITE')
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1886','EPSG','9807','Transverse Mercator','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod == 'PRCM060from2Dto2D': # Bonne
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 6
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'gr')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'gr')
+ d['k_0'] = getParameter(node, 'PRCP500', 'UNITE')
+ d['lat_1'] = getParameter(node, 'PRCP600', 'gr')
+ assert d['lat_0'] == d['lat_1']
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','9827','Bonne','EPSG','8801','Latitude of natural origin',%s,'EPSG','9105','EPSG','8802','Longitude of natural origin',%s,'EPSG','9105','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod == 'PRCM015from2Dto2D': # LAEA
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ d['k_0'] = getParameter(node, 'PRCP500', 'UNITE')
+ assert d['k_0'] == 1
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','9820','Lambert Azimuthal Equal Area','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod == 'PRCM013from2Dto2D': # LCC_2SP
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 6
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ d['lat_1'] = getParameter(node, 'PRCP600', 'deg')
+ d['lat_2'] = getParameter(node, 'PRCP700', 'deg')
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','9802','Lambert Conic Conformal (2SP)','EPSG','8821','Latitude of false origin',%s,'EPSG','9102','EPSG','8822','Longitude of false origin',%s,'EPSG','9102','EPSG','8823','Latitude of 1st standard parallel',%s,'EPSG','9102','EPSG','8824','Latitude of 2nd standard parallel',%s,'EPSG','9102','EPSG','8826','Easting at false origin',%s,'EPSG','9001','EPSG','8827','Northing at false origin',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['lat_1'], d['lat_2'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+
+ elif usesMethod == 'PRCM070from2Dto2D': # Mercator (variant A)
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ d['k_0'] = getParameter(node, 'PRCP500', 'UNITE')
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','9804','Mercator (variant A)','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+
+ elif usesMethod in ('PRCM012from2Dto2D', 'PRCM012from3Dto2D'): # LCC_1SP
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'gr')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'gr')
+ d['k_0'] = getParameter(node, 'PRCP500', 'UNITE')
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','9801','Lambert Conic Conformal (1SP)','EPSG','8801','Latitude of natural origin',%s,'EPSG','9105','EPSG','8802','Longitude of natural origin',%s,'EPSG','9105','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+
+ elif usesMethod in ('PRCM014from2Dto2D'): # LCC_1SP
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP110', 'm')
+ d['y_0'] = getParameter(node, 'PRCP210', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP310', 'gr')
+ d['lat_0'] = getParameter(node, 'PRCP410', 'gr')
+ d['k_0'] = getParameter(node, 'PRCP510', 'UNITE')
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','9801','Lambert Conic Conformal (1SP)','EPSG','8801','Latitude of natural origin',%s,'EPSG','9105','EPSG','8802','Longitude of natural origin',%s,'EPSG','9105','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+
+
+ elif usesMethod == 'PRCM053from2Dto2D': # Gauss Schreiber Transverse Mercator
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ d['k_0'] = getParameter(node, 'PRCP500', 'UNITE')
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262',NULL,NULL,'Gauss Schreiber Transverse Mercator','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod == 'PRCM094from2Dto2D': # Polar Stereographic
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 5
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ d['k_0'] = getParameter(node, 'PRCP500', 'UNITE')
+
+ assert float(d['lat_0']) == -90
+ assert float(d['lon_0']) == 140
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262','EPSG','9810','Polar Stereographic (variant A)','EPSG','8801','Latitude of natural origin',%s,'EPSG','9102','EPSG','8802','Longitude of natural origin',%s,'EPSG','9102','EPSG','8805','Scale factor at natural origin',%s,'EPSG','9201','EPSG','8806','False easting',%s,'EPSG','9001','EPSG','8807','False northing',%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id, name, d['lat_0'], d['lon_0'], d['k_0'], d['x_0'], d['y_0'])
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod == 'MILL001from2Dto2D': # Miller
+
+ assert len([1 for val in node.iterfind('usesValue')]) == 4
+ d['x_0'] = getParameter(node, 'PRCP100', 'm')
+ d['y_0'] = getParameter(node, 'PRCP200', 'm')
+ d['lon_0'] = getParameter(node, 'PRCP300', 'deg')
+ d['lat_0'] = getParameter(node, 'PRCP400', 'deg')
+ assert d['x_0'] == 0
+ assert d['y_0'] == 0
+ assert d['lon_0'] == 0
+ assert d['lat_0'] == 0
+
+ #sql = """INSERT INTO "coordinate_operation" VALUES('IGNF','%s','conversion');""" % (id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "conversion" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','1262',NULL,NULL,'PROJ mill',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,0);""" % (id,name)
+ all_sql.append(sql)
+
+ mapConversionId[id] = ('IGNF', id)
+
+ elif usesMethod == 'PRCM095from2Dto2D':
+ print('Unhandled conversion PRCM095from2Dto2D = Polar Sterographic (Variant C) %s' % (str((id, name, usesMethod))))
+ continue
+
+ else:
+ print('Unknown conversion %s' % (str((id, name, usesMethod))))
+ assert False
+
+mapProjectedId = {}
+
+for node in root.iterfind('.//ProjectedCRS'):
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ name = names[0]
+
+ usesCartesianCS = extract_id_from_href(node.find('usesCartesianCS').attrib['href'])
+ assert usesCartesianCS in ('TYP_CRG32', 'TYP_CRG70', 'TYP_CRG34'), (id, name, usesCartesianCS)
+ baseGeographicCRS = extract_id_from_href(node.find('baseGeographicCRS').attrib['href'])
+ if baseGeographicCRS not in mapGeographicId:
+ print('Skipping ProjectedCRS %s since its baseGeographicCRS %s is unknown' % (id, baseGeographicCRS))
+ continue
+
+ definedByConversion = extract_id_from_href(node.find('definedByConversion').attrib['href'])
+ if definedByConversion in ('PRC0909577'):
+ print('Skipping ProjectedCRS %s since its definedByConversion %s is unhandled' % (id, definedByConversion))
+ continue
+ assert definedByConversion in mapConversionId, (id, name, definedByConversion)
+
+ area_of_use = get_area_of_use(node.find('domainOfValidity'))
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected');""" % (id, )
+ #all_sql.append(sql)
+
+ cs_code = 4499 # TYP_CRG32
+ if usesCartesianCS == 'TYP_CRG70':
+ cs_code = 4400
+ if usesCartesianCS == 'TYP_CRG34':
+ cs_code = 4530
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','%s','%s','%s','%s','%s','%s','%s',NULL,0);""" % (id,name,cs_code,mapGeographicId[baseGeographicCRS][0], mapGeographicId[baseGeographicCRS][1],mapConversionId[definedByConversion][0], mapConversionId[definedByConversion][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ if len(names) >= 2 and names[1].startswith('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):
+ alias = names[1][len('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):]
+ aliasOfCRS[id] = [('IGNF', alias)]
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','projected'); -- alias of %s""" % (alias, id)
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "projected_crs" VALUES('IGNF','%s','%s',NULL,NULL,'EPSG','%s','%s','%s','%s','%s','%s','%s',NULL,0);""" % (alias,name,cs_code,mapGeographicId[baseGeographicCRS][0], mapGeographicId[baseGeographicCRS][1],mapConversionId[definedByConversion][0], mapConversionId[definedByConversion][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ mapProjectedId[id] = ('IGNF', id)
+
+
+
+for node in root.iterfind('.//CompoundCRS'):
+ id = node.attrib['id']
+ names = [_name.text for _name in node.iter('name')]
+ name = names[0]
+
+ singleCRS = [extract_id_from_href(includesSingleCRS.attrib['href']) for includesSingleCRS in node.iter('includesSingleCRS')]
+ assert len(singleCRS) == 2
+
+ if singleCRS[0] == 'RGWF96GEO':
+ singleCRS[0] = 'RGWF96G'
+
+ assert singleCRS[0] in mapProjectedId or singleCRS[0] in mapGeographicId, (id, name)
+ assert singleCRS[1] in mapVerticalCrsId, (id, name, singleCRS[1])
+
+ if singleCRS[0] in mapProjectedId:
+ horiz = mapProjectedId[singleCRS[0]]
+ else:
+ horiz = mapGeographicId[singleCRS[0]]
+
+ area_of_use = get_area_of_use(node.find('domainOfValidity'))
+
+ #sql = """INSERT INTO "crs" VALUES('IGNF','%s','compound');""" % (id, )
+ #all_sql.append(sql)
+
+ sql = """INSERT INTO "compound_crs" VALUES('IGNF','%s','%s',NULL,NULL,'%s','%s','%s','%s','%s','%s',0);""" % (id,name,horiz[0], horiz[1],mapVerticalCrsId[singleCRS[1]][0], mapVerticalCrsId[singleCRS[1]][1], area_of_use[0], area_of_use[1])
+ all_sql.append(sql)
+
+ if len(names) >= 2 and names[1].startswith('http://registre.ign.fr/ign/IGNF/crs/IGNF/'):
+ assert False
+
+
+all_sql.append("""INSERT INTO grid_alternatives(original_grid_name,
+ proj_grid_name,
+ proj_grid_format,
+ proj_method,
+ inverse_direction,
+ package_name,
+ url, direct_download, open_license, directory)
+ VALUES ('ntf_r93.gsb', -- as referenced by the IGNF registry
+ 'ntf_r93.gsb',
+ 'NTv2',
+ 'hgridshift',
+ 0,
+ 'proj-datumgrid',
+ NULL, NULL, NULL, NULL);
+""")
+
+
+script_dir_name = os.path.dirname(os.path.realpath(__file__))
+sql_dir_name = os.path.join(os.path.dirname(script_dir_name), 'data', 'sql')
+
+f = open(os.path.join(sql_dir_name, 'ignf') + '.sql', 'wb')
+f.write("--- This file has been generated by scripts/build_db_create_ignf_from_xml.py from the http://librairies.ign.fr/geoportail/resources/IGNF.xml definition file. DO NOT EDIT !\n\n".encode('UTF-8'))
+for sql in all_sql + all_sql_concat:
+ f.write((sql + '\n').encode('UTF-8'))
+f.close()
+
diff --git a/scripts/build_db_from_esri.py b/scripts/build_db_from_esri.py
new file mode 100755
index 00000000..f642b473
--- /dev/null
+++ b/scripts/build_db_from_esri.py
@@ -0,0 +1,1339 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Build SRS and coordinate transform database from ESRI database
+# provided at https://github.com/Esri/projection-engine-db-doc
+# (from the csv/ directory content)
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+import csv
+import os
+import sqlite3
+import sys
+
+if len(sys.argv) != 3:
+ print('Usage: build_db_from_esri.py path_to_esri_csv_dir proj.db')
+ print('')
+ print('path_to_esri_csv_dir is typically the path to the "csv" directory ')
+ print('of a "git clone https://github.com/Esri/projection-engine-db-doc"')
+ sys.exit(1)
+
+path_to_csv = sys.argv[1]
+proj_db = sys.argv[2]
+
+conn = sqlite3.connect(proj_db)
+cursor = conn.cursor()
+
+all_sql = []
+
+# TODO: update this !
+version = 'ArcMap 10.6.1 / ArcGISPro 2.2'
+all_sql.append(
+ """INSERT INTO "metadata" VALUES('ESRI.VERSION', '%s');""" % (version))
+
+
+def escape_literal(x):
+ return x.replace("'", "''")
+
+
+########################
+
+map_areaname_to_auth_code = {}
+esri_area_counter = 1
+
+
+def find_area(areaname, slat, nlat, llon, rlon):
+
+ global esri_area_counter
+
+ if areaname in map_areaname_to_auth_code:
+ return map_areaname_to_auth_code[areaname]
+
+ deg = b'\xC2\xB0'.decode('utf-8')
+ cursor.execute("SELECT auth_name, code FROM area WHERE name = ? AND auth_name != 'ESRI'",
+ (areaname.replace('~', deg),))
+ row = cursor.fetchone()
+ if row is None:
+ cursor.execute(
+ "SELECT auth_name, code FROM area WHERE auth_name != 'ESRI' AND south_lat = ? AND north_lat = ? AND west_lon = ? AND east_lon = ?", (slat, nlat, llon, rlon))
+ row = cursor.fetchone()
+
+ if row is None:
+ #print('unknown area inserted: ' + areaname)
+
+ if float(rlon) > 180:
+ new_rlon = '%s' % (float(rlon) - 360)
+ print('Correcting rlon from %s to %s for %s' %
+ (rlon, new_rlon, areaname))
+ rlon = new_rlon
+
+ assert float(slat) >= -90 and float(slat) <= 90, (areaname,
+ slat, nlat, llon, rlon)
+ assert float(nlat) >= -90 and float(nlat) <= 90, (areaname,
+ slat, nlat, llon, rlon)
+ assert float(nlat) > float(slat), (areaname, slat, nlat, llon, rlon)
+ assert float(llon) >= -180 and float(llon) <= 180, (areaname,
+ slat, nlat, llon, rlon)
+ assert float(rlon) >= -180 and float(rlon) <= 180, (areaname,
+ slat, nlat, llon, rlon)
+
+ sql = """INSERT INTO "area" VALUES('ESRI','%d','%s','%s',%s,%s,%s,%s,0);""" % (
+ esri_area_counter, escape_literal(areaname), escape_literal(areaname), slat, nlat, llon, rlon)
+ all_sql.append(sql)
+ map_areaname_to_auth_code[areaname] = [
+ 'ESRI', '%d' % esri_area_counter]
+ esri_area_counter += 1
+ else:
+ auth_name = row[0]
+ code = row[1]
+ map_areaname_to_auth_code[areaname] = [auth_name, code]
+
+ return map_areaname_to_auth_code[areaname]
+
+#################
+
+def import_linunit():
+ with open(os.path.join(path_to_csv, 'pe_list_linunit.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ wkt = row[idx_wkt]
+ assert wkt.startswith('UNIT[') and wkt.endswith(']')
+ tokens = wkt[len('UNIT['):len(wkt) - 1].split(',')
+ assert len(tokens) == 2
+ esri_conv_factor = float(tokens[1])
+
+ if authority == 'EPSG':
+
+ cursor.execute(
+ "SELECT name, conv_factor FROM unit_of_measure WHERE code = ? AND auth_name = 'EPSG'", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+ epsg_conv_factor = src_row[1]
+ assert abs(esri_conv_factor - epsg_conv_factor) <= 1e-15 * epsg_conv_factor, (esri_name, esri_conv_factor, epsg_conv_factor)
+
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('unit_of_measure','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+
+
+#################
+map_spheroid_esri_name_to_auth_code = {}
+
+
+def import_spheroid():
+ with open(os.path.join(path_to_csv, 'pe_list_spheroid.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ body_set = set()
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ cursor.execute(
+ "SELECT name FROM ellipsoid WHERE code = ? AND auth_name = 'EPSG'", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+
+ map_spheroid_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', latestWkid]
+
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('ellipsoid','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+
+ else:
+ assert authority.upper() == 'ESRI', row
+
+ wkt = row[idx_wkt]
+ assert wkt.startswith('SPHEROID[') and wkt.endswith(']')
+ tokens = wkt[len('SPHEROID['):len(wkt) - 1].split(',')
+ assert len(tokens) == 3
+ a = tokens[1]
+ rf = tokens[2]
+
+ description = row[idx_description]
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ map_spheroid_esri_name_to_auth_code[esri_name] = [
+ 'ESRI', latestWkid]
+
+ if abs(float(a) - 6375000) > 0.01 * 6375000:
+ pos = esri_name.find('_19')
+ if pos < 0:
+ pos = esri_name.find('_20')
+ assert pos > 0
+ body_code = esri_name[0:pos]
+
+ if body_code not in body_set:
+ body_set.add(body_code)
+ sql = """INSERT INTO celestial_body VALUES('ESRI', '%s', '%s', %s);""" % (
+ body_code, body_code, a)
+ all_sql.append(sql)
+ body_auth = 'ESRI'
+ else:
+ body_auth = 'PROJ'
+ body_code = 'EARTH'
+
+ sql = """INSERT INTO ellipsoid VALUES('ESRI','%s','%s','%s','%s','%s',%s,'EPSG','9001',%s,NULL,%d);""" % (
+ latestWkid, esri_name, description, body_auth, body_code, a, rf, deprecated)
+ all_sql.append(sql)
+
+########################
+
+
+map_pm_esri_name_to_auth_code = {}
+
+
+def import_prime_meridian():
+ with open(os.path.join(path_to_csv, 'pe_list_primem.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ cursor.execute(
+ "SELECT name FROM prime_meridian WHERE code = ? AND auth_name = 'EPSG'", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+
+ map_pm_esri_name_to_auth_code[esri_name] = ['EPSG', latestWkid]
+
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('prime_meridian','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+
+ else:
+ assert authority.upper() == 'ESRI', row
+
+ wkt = row[idx_wkt]
+ assert wkt.startswith('PRIMEM[') and wkt.endswith(']')
+ tokens = wkt[len('PRIMEM['):len(wkt) - 1].split(',')
+ assert len(tokens) == 2
+ value = tokens[1]
+
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ map_pm_esri_name_to_auth_code[esri_name] = ['ESRI', latestWkid]
+
+ sql = """INSERT INTO "prime_meridian" VALUES('ESRI','%s','%s',%s,'EPSG','9110',%d);""" % (
+ latestWkid, esri_name, value, deprecated)
+ all_sql.append(sql)
+
+
+########################
+
+map_datum_esri_name_to_auth_code = {}
+map_datum_esri_to_parameters = {}
+
+
+def import_datum():
+ with open(os.path.join(path_to_csv, 'pe_list_datum.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ map_datum_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', latestWkid]
+
+ cursor.execute(
+ "SELECT name FROM geodetic_datum WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+ esri_name = row[idx_name]
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('geodetic_datum','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+ else:
+ assert authority.upper() == 'ESRI', row
+
+ map_datum_esri_name_to_auth_code[esri_name] = [
+ 'ESRI', latestWkid]
+
+ wkt = row[idx_wkt]
+ pos = wkt.find('SPHEROID["')
+ assert pos >= 0
+ pos += len('SPHEROID["')
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0
+ end_pos += pos
+ ellps_name = wkt[pos:end_pos]
+
+ assert ellps_name in map_spheroid_esri_name_to_auth_code, (
+ ellps_name, row)
+ ellps_auth_name, ellps_code = map_spheroid_esri_name_to_auth_code[ellps_name]
+
+ description = row[idx_description]
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ map_datum_esri_to_parameters[latestWkid] = {
+ 'esri_name': esri_name,
+ 'description': description,
+ 'ellps_auth_name': ellps_auth_name,
+ 'ellps_code': ellps_code,
+ 'deprecated': deprecated
+ }
+ # We cannot write it since we lack the prime meridian !!! (and
+ # the area of use)
+
+########################
+
+
+map_geogcs_esri_name_to_auth_code = {}
+
+
+def import_geogcs():
+ with open(os.path.join(path_to_csv, 'pe_list_geogcs.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ idx_areaname = header.index('areaname')
+ assert idx_areaname >= 0
+
+ idx_slat = header.index('slat')
+ assert idx_slat >= 0
+
+ idx_nlat = header.index('nlat')
+ assert idx_nlat >= 0
+
+ idx_llon = header.index('llon')
+ assert idx_llon >= 0
+
+ idx_rlon = header.index('rlon')
+ assert idx_rlon >= 0
+
+ datum_written = set()
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ map_geogcs_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', latestWkid]
+
+ cursor.execute(
+ "SELECT name FROM geodetic_crs WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+ esri_name = row[idx_name]
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('geodetic_crs','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+ else:
+ assert authority.upper() == 'ESRI', row
+
+ code = row[idx_wkid]
+
+ wkt = row[idx_wkt]
+ pos = wkt.find('DATUM["')
+ assert pos >= 0
+ pos += len('DATUM["')
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0
+ end_pos += pos
+ datum_name = wkt[pos:end_pos]
+
+ assert datum_name in map_datum_esri_name_to_auth_code, (
+ datum_name, row)
+ datum_auth_name, datum_code = map_datum_esri_name_to_auth_code[datum_name]
+
+ pos = wkt.find('PRIMEM["')
+ assert pos >= 0
+ pos += len('PRIMEM["')
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0
+ end_pos += pos
+ pm_name = wkt[pos:end_pos]
+
+ assert pm_name in map_pm_esri_name_to_auth_code, (pm_name, row)
+ pm_auth_name, pm_code = map_pm_esri_name_to_auth_code[pm_name]
+
+ is_degree = 'UNIT["Degree' in wkt
+ is_grad = 'UNIT["Grad' in wkt
+ assert is_degree or is_grad, row
+ cs_code = '6422' if is_degree else '6403'
+
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ area_auth_name, area_code = find_area(
+ row[idx_areaname], row[idx_slat], row[idx_nlat], row[idx_llon], row[idx_rlon])
+
+ if datum_auth_name == 'ESRI':
+ if datum_code not in datum_written:
+ datum_written.add(datum_code)
+
+ p = map_datum_esri_to_parameters[datum_code]
+
+ sql = """INSERT INTO "geodetic_datum" VALUES('ESRI','%s','%s','%s',NULL,'%s','%s','%s','%s','%s','%s',%d);""" % (
+ datum_code, p['esri_name'], p['description'], p['ellps_auth_name'], p['ellps_code'], pm_auth_name, pm_code, area_auth_name, area_code, p['deprecated'])
+ all_sql.append(sql)
+ p['pm_auth_name'] = pm_auth_name
+ p['pm_code'] = pm_code
+ map_datum_esri_to_parameters[datum_code] = p
+ else:
+ assert map_datum_esri_to_parameters[datum_code]['pm_auth_name'] == pm_auth_name, (
+ row, map_datum_esri_to_parameters[datum_code]['pm_auth_name'], pm_auth_name)
+ if map_datum_esri_to_parameters[datum_code]['pm_code'] != pm_code:
+
+ # Case of GCS_Voirol_Unifie_1960 and GCS_Voirol_Unifie_1960_Paris which use the same
+ # datum D_Voirol_Unifie_1960 but with different prime meridian
+ # We create an artificial datum to avoid that issue
+ datum_name += '_' + pm_name
+ datum_code += '_' + pm_name
+
+ datum_written.add(datum_code)
+ map_datum_esri_name_to_auth_code[datum_name] = [
+ 'ESRI', latestWkid]
+ map_datum_esri_to_parameters[datum_code] = {
+ 'esri_name': datum_name,
+ 'description': p['description'] + ' with ' + pm_name + ' prime meridian',
+ 'ellps_auth_name': p['ellps_auth_name'],
+ 'ellps_code': p['ellps_code'],
+ 'deprecated': p['deprecated']
+ }
+ p = map_datum_esri_to_parameters[datum_code]
+
+ sql = """INSERT INTO "geodetic_datum" VALUES('ESRI','%s','%s',NULL,'%s','%s','%s','%s','%s','%s','%s',%d);""" % (
+ datum_code, p['esri_name'], p['description'], p['ellps_auth_name'], p['ellps_code'], pm_auth_name, pm_code, area_auth_name, area_code, p['deprecated'])
+ all_sql.append(sql)
+ p['pm_auth_name'] = pm_auth_name
+ p['pm_code'] = pm_code
+ map_datum_esri_to_parameters[datum_code] = p
+
+
+ # We may have already the EPSG entry, so use it preferably
+ if esri_name not in map_geogcs_esri_name_to_auth_code:
+ map_geogcs_esri_name_to_auth_code[esri_name] = ['ESRI', code]
+
+ sql = """INSERT INTO "geodetic_crs" VALUES('ESRI','%s','%s',NULL,NULL,'geographic 2D','EPSG','%s','%s','%s','%s','%s',NULL,%d);""" % (
+ code, esri_name, cs_code, datum_auth_name, datum_code, area_auth_name, area_code, deprecated)
+ all_sql.append(sql)
+
+ if deprecated and code != latestWkid:
+ cursor.execute(
+ "SELECT name FROM geodetic_crs WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row
+
+ sql = """INSERT INTO "link_from_deprecated_to_non_deprecated" VALUES('geodetic_crs','ESRI','%s','EPSG','%s','ESRI');""" % (
+ code, latestWkid)
+ all_sql.append(sql)
+
+########################
+
+
+map_projcs_esri_name_to_auth_code = {}
+
+
+def import_projcs():
+ with open(os.path.join(path_to_csv, 'pe_list_projcs.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ idx_areaname = header.index('areaname')
+ assert idx_areaname >= 0
+
+ idx_slat = header.index('slat')
+ assert idx_slat >= 0
+
+ idx_nlat = header.index('nlat')
+ assert idx_nlat >= 0
+
+ idx_llon = header.index('llon')
+ assert idx_llon >= 0
+
+ idx_rlon = header.index('rlon')
+ assert idx_rlon >= 0
+
+ wkid_set = set()
+ mapDeprecatedToNonDeprecated = {}
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ map_projcs_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', latestWkid]
+
+ cursor.execute(
+ "SELECT name FROM projected_crs WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+ esri_name = row[idx_name]
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('projected_crs','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+ else:
+ assert authority.upper() == 'ESRI', row
+
+ code = row[idx_wkid]
+ wkid_set.add(code)
+
+ wkt = row[idx_wkt]
+ pos = wkt.find('GEOGCS["')
+ assert pos >= 0
+ pos += len('GEOGCS["')
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0
+ end_pos += pos
+ geogcs_name = wkt[pos:end_pos]
+
+ assert geogcs_name in map_geogcs_esri_name_to_auth_code, (
+ geogcs_name, row)
+ geogcs_auth_name, geogcs_code = map_geogcs_esri_name_to_auth_code[geogcs_name]
+
+ area_auth_name, area_code = find_area(
+ row[idx_areaname], row[idx_slat], row[idx_nlat], row[idx_llon], row[idx_rlon])
+
+ map_projcs_esri_name_to_auth_code[esri_name] = ['ESRI', code]
+
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ sql = """INSERT INTO "projected_crs" VALUES('ESRI','%s','%s',NULL,NULL,NULL,NULL,'%s','%s',NULL,NULL,'%s','%s','%s',%d);""" % (
+ code, esri_name, geogcs_auth_name, geogcs_code, area_auth_name, area_code, escape_literal(wkt), deprecated)
+ all_sql.append(sql)
+
+ if deprecated and code != latestWkid:
+ mapDeprecatedToNonDeprecated[code] = latestWkid
+
+ for deprecated in mapDeprecatedToNonDeprecated:
+ code = deprecated
+ latestWkid = mapDeprecatedToNonDeprecated[deprecated]
+
+ if latestWkid in wkid_set:
+ sql = """INSERT INTO "link_from_deprecated_to_non_deprecated" VALUES('projected_crs','ESRI','%s','ESRI','%s','ESRI');""" % (
+ code, latestWkid)
+ all_sql.append(sql)
+ else:
+ cursor.execute(
+ "SELECT name FROM projected_crs WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ sql = """INSERT INTO "link_from_deprecated_to_non_deprecated" VALUES('projected_crs','ESRI','%s','EPSG','%s','ESRI');""" % (
+ code, latestWkid)
+ all_sql.append(sql)
+
+
+########################
+
+map_vdatum_esri_name_to_auth_code = {}
+map_vdatum_esri_to_parameters = {}
+
+
+def import_vdatum():
+ with open(os.path.join(path_to_csv, 'pe_list_vdatum.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ wkid = row[idx_wkid]
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ map_vdatum_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', latestWkid]
+
+ cursor.execute(
+ "SELECT name FROM vertical_datum WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+ esri_name = row[idx_name]
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('vertical_datum','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+ else:
+ assert authority.upper() == 'ESRI', row
+
+ map_vdatum_esri_name_to_auth_code[esri_name] = ['ESRI', wkid]
+
+ description = row[idx_description]
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ map_vdatum_esri_to_parameters[wkid] = {
+ 'esri_name': esri_name,
+ 'description': description,
+ 'deprecated': deprecated
+ }
+ # We cannot write it since we lack the area of use
+
+########################
+
+
+map_vertcs_esri_name_to_auth_code = {}
+
+
+def import_vertcs():
+ with open(os.path.join(path_to_csv, 'pe_list_vertcs.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ idx_areaname = header.index('areaname')
+ assert idx_areaname >= 0
+
+ idx_slat = header.index('slat')
+ assert idx_slat >= 0
+
+ idx_nlat = header.index('nlat')
+ assert idx_nlat >= 0
+
+ idx_llon = header.index('llon')
+ assert idx_llon >= 0
+
+ idx_rlon = header.index('rlon')
+ assert idx_rlon >= 0
+
+ datum_written = set()
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ map_vertcs_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', latestWkid]
+
+ cursor.execute(
+ "SELECT name FROM vertical_crs WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+ esri_name = row[idx_name]
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('vertical_crs','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+ else:
+ assert authority.upper() == 'ESRI', row
+
+ code = row[idx_wkid]
+
+ wkt = row[idx_wkt]
+
+ if ',DATUM[' in wkt:
+ # FIXME ??
+ print('Skipping %s. Should be a CompoundCRS' % (esri_name))
+ sql = """-- Skipping %s. Should be a CompoundCRS""" % (
+ esri_name)
+ all_sql.append(sql)
+ continue
+
+ pos = wkt.find('VDATUM["')
+ assert pos >= 0
+ pos += len('VDATUM["')
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0
+ end_pos += pos
+ datum_name = wkt[pos:end_pos]
+
+ assert 'PARAMETER["Vertical_Shift",0.0]' in wkt, row
+
+ if datum_name not in map_vdatum_esri_name_to_auth_code:
+ print('Skipping vertcs %s. it expresses an ellipsoidal height regarding a horizontal datum' % (
+ str(row)))
+ sql = """-- Skipping vertcs %s. it expresses an ellipsoidal height regarding a horizontal datum""" % (
+ esri_name, datum_name)
+ all_sql.append(sql)
+ continue
+
+ datum_auth_name, datum_code = map_vdatum_esri_name_to_auth_code[datum_name]
+
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ area_auth_name, area_code = find_area(
+ row[idx_areaname], row[idx_slat], row[idx_nlat], row[idx_llon], row[idx_rlon])
+
+ if datum_auth_name == 'ESRI':
+ if datum_code not in datum_written:
+ datum_written.add(datum_code)
+
+ p = map_vdatum_esri_to_parameters[datum_code]
+ sql = """INSERT INTO "vertical_datum" VALUES('ESRI','%s','%s',NULL,NULL,'%s','%s',%d);""" % (
+ datum_code, p['esri_name'], area_auth_name, area_code, p['deprecated'])
+ all_sql.append(sql)
+
+ map_vertcs_esri_name_to_auth_code[esri_name] = ['ESRI', code]
+
+ if 'PARAMETER["Direction",1.0]' in wkt and 'UNIT["Meter"' in wkt:
+ cs_code = 6499
+ elif 'PARAMETER["Direction",1.0]' in wkt and 'UNIT["Foot"' in wkt:
+ cs_code = 1030
+ elif 'PARAMETER["Direction",1.0]' in wkt and 'UNIT["Foot_US"' in wkt:
+ cs_code = 6497
+ else:
+ assert False, ('unknown coordinate system for %s' %
+ str(row))
+
+ sql = """INSERT INTO "vertical_crs" VALUES('ESRI','%s','%s',NULL,NULL,'EPSG','%s','%s','%s','%s','%s',%d);""" % (
+ code, esri_name, cs_code, datum_auth_name, datum_code, area_auth_name, area_code, deprecated)
+ all_sql.append(sql)
+
+ if deprecated and code != latestWkid:
+ cursor.execute(
+ "SELECT name FROM vertical_crs WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row
+
+ sql = """INSERT INTO "link_from_deprecated_to_non_deprecated" VALUES('vertical_crs','ESRI','%s','EPSG','%s','ESRI');""" % (
+ code, latestWkid)
+ all_sql.append(sql)
+
+
+########################
+
+map_compoundcrs_esri_name_to_auth_code = {}
+
+
+def import_hvcoordsys():
+ with open(os.path.join(path_to_csv, 'pe_list_hvcoordsys.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ idx_areaname = header.index('areaname')
+ assert idx_areaname >= 0
+
+ idx_slat = header.index('slat')
+ assert idx_slat >= 0
+
+ idx_nlat = header.index('nlat')
+ assert idx_nlat >= 0
+
+ idx_llon = header.index('llon')
+ assert idx_llon >= 0
+
+ idx_rlon = header.index('rlon')
+ assert idx_rlon >= 0
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ latestWkid = row[idx_latestWkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+
+ if authority == 'EPSG':
+
+ map_compoundcrs_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', latestWkid]
+
+ cursor.execute(
+ "SELECT name FROM compound_crs WHERE auth_name = 'EPSG' AND code = ?", (latestWkid,))
+ src_row = cursor.fetchone()
+ assert src_row, row
+ src_name = src_row[0]
+ esri_name = row[idx_name]
+ if src_name != esri_name:
+ sql = """INSERT INTO alias_name VALUES('compound_crs','EPSG','%s','%s','ESRI');""" % (
+ latestWkid, escape_literal(esri_name))
+ all_sql.append(sql)
+ else:
+ assert False, row # no ESRI specific entries at that time !
+
+
+########################
+
+
+def get_parameter(wkt, param_name):
+ needle = ',PARAMETER["' + param_name + '",'
+ pos = wkt.find(needle)
+ assert pos >= 0, wkt
+ pos += len(needle)
+ end_pos = wkt[pos:].find(']')
+ assert end_pos >= 0, (wkt, wkt[pos:])
+ end_pos += pos
+ return wkt[pos:end_pos]
+
+
+def import_geogtran():
+ with open(os.path.join(path_to_csv, 'pe_list_geogtran.csv'), 'rt') as csvfile:
+ reader = csv.reader(csvfile)
+ header = next(reader)
+ nfields = len(header)
+
+ idx_wkid = header.index('wkid')
+ assert idx_wkid >= 0
+
+ idx_latestWkid = header.index('latestWkid')
+ assert idx_latestWkid >= 0
+
+ idx_name = header.index('name')
+ assert idx_name >= 0
+
+ idx_description = header.index('description')
+ assert idx_description >= 0
+
+ idx_wkt = header.index('wkt')
+ assert idx_wkt >= 0
+
+ idx_authority = header.index('authority')
+ assert idx_authority >= 0
+
+ idx_deprecated = header.index('deprecated')
+ assert idx_deprecated >= 0
+
+ idx_areaname = header.index('areaname')
+ assert idx_areaname >= 0
+
+ idx_slat = header.index('slat')
+ assert idx_slat >= 0
+
+ idx_nlat = header.index('nlat')
+ assert idx_nlat >= 0
+
+ idx_llon = header.index('llon')
+ assert idx_llon >= 0
+
+ idx_rlon = header.index('rlon')
+ assert idx_rlon >= 0
+
+ idx_accuracy = header.index('accuracy')
+ assert idx_accuracy >= 0
+
+ while True:
+ try:
+ row = next(reader)
+ except StopIteration:
+ break
+ assert len(row) == nfields, row
+
+ wkid = row[idx_wkid]
+ authority = row[idx_authority]
+ esri_name = row[idx_name]
+ wkt = row[idx_wkt]
+ deprecated = 1 if row[idx_deprecated] == 'yes' else 0
+
+ if authority == 'EPSG':
+
+ map_compoundcrs_esri_name_to_auth_code[esri_name] = [
+ 'EPSG', wkid]
+
+ cursor.execute(
+ "SELECT name FROM coordinate_operation_view WHERE auth_name = 'EPSG' AND code = ?", (wkid,))
+ src_row = cursor.fetchone()
+
+ if not src_row:
+ if 'Molodensky_Badekas' in wkt:
+ # print('Skipping GEOGTRAN %s (EPSG source) since it uses a non-supported yet suported method'% esri_name)
+ continue
+
+ # Don't do anything particular in part of checking we now
+ # it
+ assert src_row, row
+
+ else:
+
+ # We don't want to import ESRI deprecated transformations
+ # (there are a lot), do we ?
+ if deprecated:
+ #print('Skipping deprecated GEOGTRAN %s' % esri_name)
+ continue
+
+ assert wkt.startswith('GEOGTRAN')
+
+ pos = wkt.find(',GEOGCS["')
+ assert pos >= 0
+ pos += len(',GEOGCS["')
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0
+ end_pos += pos
+ src_crs_name = wkt[pos:end_pos]
+
+ pos = wkt[end_pos:].find(',GEOGCS["')
+ assert pos >= 0
+ pos += end_pos + len(',GEOGCS["')
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0
+ end_pos += pos
+ dst_crs_name = wkt[pos:end_pos]
+
+ assert src_crs_name in map_geogcs_esri_name_to_auth_code, (
+ src_crs_name, row)
+ src_crs_auth_name, src_crs_code = map_geogcs_esri_name_to_auth_code[src_crs_name]
+
+ assert dst_crs_name in map_geogcs_esri_name_to_auth_code, (
+ dst_crs_name, row)
+ dst_crs_auth_name, dst_crs_code = map_geogcs_esri_name_to_auth_code[dst_crs_name]
+
+ is_longitude_rotation = 'METHOD["Longitude_Rotation"]' in wkt
+ if is_longitude_rotation:
+ # skip it as it is automatically handled by PROJ
+ continue
+
+ is_cf = 'METHOD["Coordinate_Frame"]' in wkt
+ is_pv = 'METHOD["Position_Vector"]' in wkt
+ is_geocentric_translation = 'METHOD["Geocentric_Translation"]' in wkt
+ is_geog2d_offset = 'METHOD["Geographic_2D_Offset"]' in wkt
+ is_null = 'METHOD["Null"]' in wkt
+ is_unitchange = 'METHOD["Unit_Change"]' in wkt
+ is_nadcon = 'METHOD["NADCON"]' in wkt
+ is_ntv2 = 'METHOD["NTv2"]' in wkt
+ is_geocon = 'METHOD["GEOCON"]' in wkt
+ is_harn = 'METHOD["HARN"]' in wkt
+ is_molodensky_badekas = 'METHOD["Molodensky_Badekas"]' in wkt
+ assert is_cf or is_pv or is_geocentric_translation or is_molodensky_badekas or is_nadcon or is_geog2d_offset or is_ntv2 or is_geocon or is_null or is_harn or is_unitchange, (
+ row)
+
+ area_auth_name, area_code = find_area(
+ row[idx_areaname], row[idx_slat], row[idx_nlat], row[idx_llon], row[idx_rlon])
+
+ accuracy = row[idx_accuracy]
+
+ if is_cf or is_pv:
+ x = get_parameter(wkt, 'X_Axis_Translation')
+ y = get_parameter(wkt, 'Y_Axis_Translation')
+ z = get_parameter(wkt, 'Z_Axis_Translation')
+ rx = get_parameter(wkt, 'X_Axis_Rotation') # in arc second
+ ry = get_parameter(wkt, 'Y_Axis_Rotation')
+ rz = get_parameter(wkt, 'Z_Axis_Rotation')
+ s = get_parameter(wkt, 'Scale_Difference') # in ppm
+ assert wkt.count('PARAMETER[') == 7
+
+ if is_cf:
+ method_code = '9607'
+ method_name = 'Coordinate Frame rotation (geog2D domain)'
+ else:
+ method_code = '9606'
+ method_name = 'Position Vector transformation (geog2D domain)'
+
+ sql = """INSERT INTO "helmert_transformation" VALUES('ESRI','%s','%s',NULL,NULL,'EPSG','%s','%s','%s','%s','%s','%s','%s','%s',%s,%s,%s,%s,'EPSG','9001',%s,%s,%s,'EPSG','9104',%s,'EPSG','9202',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,%d);""" % (
+ wkid, esri_name, method_code, method_name, src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, area_auth_name, area_code, accuracy, x, y, z, rx, ry, rz, s, deprecated)
+ all_sql.append(sql)
+
+ elif is_molodensky_badekas:
+ x = get_parameter(wkt, 'X_Axis_Translation')
+ y = get_parameter(wkt, 'Y_Axis_Translation')
+ z = get_parameter(wkt, 'Z_Axis_Translation')
+ rx = get_parameter(wkt, 'X_Axis_Rotation') # in arc second
+ ry = get_parameter(wkt, 'Y_Axis_Rotation')
+ rz = get_parameter(wkt, 'Z_Axis_Rotation')
+ s = get_parameter(wkt, 'Scale_Difference') # in ppm
+ px = get_parameter(wkt, 'X_Coordinate_of_Rotation_Origin')
+ py = get_parameter(wkt, 'Y_Coordinate_of_Rotation_Origin')
+ pz = get_parameter(wkt, 'Z_Coordinate_of_Rotation_Origin')
+ assert wkt.count('PARAMETER[') == 10
+
+ # The ESRI naming is not really clear about the convention
+ # but it looks like it is Coordinate Frame when comparing ESRI:1066 (Amersfoort_To_ETRS_1989_MB) with EPSG:1066
+ method_code = '9636'
+ method_name = 'Molodensky-Badekas (CF geog2D domain)'
+
+ sql = """INSERT INTO "helmert_transformation" VALUES('ESRI','%s','%s',NULL,NULL,'EPSG','%s','%s','%s','%s','%s','%s','%s','%s',%s,%s,%s,%s,'EPSG','9001',%s,%s,%s,'EPSG','9104',%s,'EPSG','9202',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,%s,%s,%s,'EPSG','9001',%d);""" % (
+ wkid, esri_name, method_code, method_name, src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, area_auth_name, area_code, accuracy, x, y, z, rx, ry, rz, s, px, py, pz, deprecated)
+ all_sql.append(sql)
+
+ elif is_geocentric_translation:
+ x = get_parameter(wkt, 'X_Axis_Translation')
+ y = get_parameter(wkt, 'Y_Axis_Translation')
+ z = get_parameter(wkt, 'Z_Axis_Translation')
+ assert wkt.count('PARAMETER[') == 3
+
+ method_code = '9603'
+ method_name = 'Geocentric translations (geog2D domain)'
+
+ sql = """INSERT INTO "helmert_transformation" VALUES('ESRI','%s','%s',NULL,NULL,'EPSG','%s','%s','%s','%s','%s','%s','%s','%s',%s,%s,%s,%s,'EPSG','9001',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,%d);""" % (
+ wkid, esri_name, method_code, method_name, src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, area_auth_name, area_code, accuracy, x, y, z, deprecated)
+ all_sql.append(sql)
+
+ elif is_geog2d_offset:
+
+ # The only occurence is quite boring: from NTF(Paris) to NTF.
+ # But interestingly the longitude offset value is not
+ # completely exactly the value of the Paris prime meridian
+
+ long_offset = get_parameter(
+ wkt, 'Longitude_Offset') # in arc second
+ lat_offset = get_parameter(wkt, 'Latitude_Offset')
+ assert wkt.count('PARAMETER[') == 2
+
+ sql = """INSERT INTO "other_transformation" VALUES('ESRI','%s','%s',NULL,NULL,'EPSG','9619','Geographic2D offsets','%s','%s','%s','%s','%s','%s',%s,'EPSG','8601','Latitude offset',%s,'EPSG','9104','EPSG','8602','Longitude offset',%s,'EPSG','9104',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,%d);""" % (
+ wkid, esri_name, src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, area_auth_name, area_code, accuracy, lat_offset, long_offset, deprecated)
+ all_sql.append(sql)
+
+ elif is_null:
+ long_offset = '0'
+ lat_offset = '0'
+ assert wkt.count('PARAMETER[') == 0
+
+ sql = """INSERT INTO "other_transformation" VALUES('ESRI','%s','%s',NULL,NULL,'EPSG','9619','Geographic2D offsets','%s','%s','%s','%s','%s','%s',%s,'EPSG','8601','Latitude offset',%s,'EPSG','9104','EPSG','8602','Longitude offset',%s,'EPSG','9104',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,%d);""" % (
+ wkid, esri_name, src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, area_auth_name, area_code, accuracy, lat_offset, long_offset, deprecated)
+ all_sql.append(sql)
+
+ elif is_unitchange:
+
+ # Automatically handled by PROJ. Not worth importing
+ continue
+
+ else:
+ assert wkt.count('PARAMETER[') == 1
+ needle = ',PARAMETER["'
+ pos = wkt.find(needle)
+ assert pos >= 0, wkt
+ pos += len(needle)
+ end_pos = wkt[pos:].find('"')
+ assert end_pos >= 0, (wkt, wkt[pos:])
+ end_pos += pos
+ filename = wkt[pos:end_pos]
+ assert filename.startswith('Dataset_')
+ filename = filename[len('Dataset_'):]
+
+
+ cursor.execute(
+ "SELECT name, grid_name FROM grid_transformation WHERE auth_name != 'ESRI' AND source_crs_auth_name = ? AND source_crs_code = ? AND target_crs_auth_name = ? AND target_crs_code = ? AND area_of_use_auth_name = ? AND area_of_use_code = ?", (src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, area_auth_name, area_code))
+ src_row = cursor.fetchone()
+ if src_row:
+ print('A grid_transformation (%s, using grid %s) is already known for the equivalent of %s (%s:%s --> %s:%s) for area %s, which uses grid %s. Skipping it' % (src_row[0], src_row[1], esri_name, src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, row[idx_areaname], filename))
+ continue
+
+ sql = """INSERT INTO "grid_transformation" VALUES('ESRI','%s','%s',NULL,NULL,'EPSG','9615','NTv2','%s','%s','%s','%s','%s','%s',%s,'EPSG','8656','Latitude and longitude difference file','%s',NULL,NULL,NULL,NULL,NULL,NULL,%d);""" % (
+ wkid, esri_name, src_crs_auth_name, src_crs_code, dst_crs_auth_name, dst_crs_code, area_auth_name, area_code, accuracy, filename, deprecated)
+ all_sql.append(sql)
+
+ if filename in ('c1hpgn', 'c2hpgn'):
+ sql = """INSERT INTO grid_alternatives VALUES ('%s', '%s', 'NTv2', 'hgridshift', 0, 'proj-datumgrid-north-america', NULL, NULL, NULL, NULL);""" % (
+ filename, filename + '.gsb')
+ all_sql.append(sql)
+ elif filename == 'wohpgn':
+ sql = """INSERT INTO grid_alternatives VALUES ('%s', '%s', 'CTable2', 'hgridshift', 0, 'proj-datumgrid', NULL, NULL, NULL, NULL);""" % (
+ filename, 'WO')
+ all_sql.append(sql)
+ elif filename == 'prvi':
+ continue
+ else:
+ print('not handled grid: ' + filename)
+
+
+import_linunit()
+import_spheroid()
+import_prime_meridian()
+import_datum()
+import_geogcs()
+import_projcs()
+import_vdatum()
+import_vertcs()
+import_hvcoordsys() # compoundcrs
+import_geogtran() # transformations between GeogCRS
+
+script_dir_name = os.path.dirname(os.path.realpath(__file__))
+sql_dir_name = os.path.join(os.path.dirname(script_dir_name), 'data', 'sql')
+
+f = open(os.path.join(sql_dir_name, 'esri') + '.sql', 'wb')
+f.write("--- This file has been generated by scripts/build_db_from_esri.py. DO NOT EDIT !\n\n".encode('UTF-8'))
+for sql in all_sql:
+ f.write((sql + '\n').encode('UTF-8'))
+f.close()
+
+print('')
+print('Finished !')
+print('NOTE: adding into metadata: ESRI.VERSION = %s. Update if needed !' % version)
diff --git a/scripts/build_esri_projection_mapping.py b/scripts/build_esri_projection_mapping.py
new file mode 100644
index 00000000..65e7d500
--- /dev/null
+++ b/scripts/build_esri_projection_mapping.py
@@ -0,0 +1,738 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Generate mappings between ESRI projection names and parameters and
+# their EPSG equivalents.
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+import yaml
+
+# Map methods from pe_list_projection.csv to WKT2 naming
+
+config_str = """
+- Plate_Carree:
+ WKT2_name:
+ - EPSG_NAME_METHOD_EQUIDISTANT_CYLINDRICAL
+ - EPSG_NAME_METHOD_EQUIDISTANT_CYLINDRICAL_SPHERICAL
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ Cond:
+ - EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL = 0
+
+- Equidistant_Cylindrical:
+ WKT2_name: EPSG_NAME_METHOD_EQUIDISTANT_CYLINDRICAL
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+
+- Miller_Cylindrical:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_MILLER_CYLINDRICAL
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Mercator: # Mercator 2SP
+ WKT2_name: EPSG_NAME_METHOD_MERCATOR_VARIANT_B
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+
+- Gauss_Kruger:
+ WKT2_name: EPSG_NAME_METHOD_TRANSVERSE_MERCATOR
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_AT_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Transverse_Mercator:
+ WKT2_name: EPSG_NAME_METHOD_TRANSVERSE_MERCATOR
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_AT_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Albers:
+ WKT2_name: EPSG_NAME_METHOD_ALBERS_EQUAL_AREA
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_FALSE_ORIGIN
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_FALSE_ORIGIN
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_FALSE_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+ - Standard_Parallel_2: EPSG_NAME_PARAMETER_LATITUDE_2ND_STD_PARALLEL
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_FALSE_ORIGIN
+
+- Sinusoidal:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_SINUSOIDAL
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Mollweide:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_MOLLWEIDE
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Eckert_I:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_ECKERT_I
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Eckert_II:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_ECKERT_II
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Eckert_III:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_ECKERT_III
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Eckert_IV:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_ECKERT_IV
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Eckert_V:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_ECKERT_V
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Eckert_VI:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_ECKERT_VI
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Gall_Stereographic:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_GALL_STEREOGRAPHIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+# Behrmann: not handled
+
+- Winkel_I:
+ WKT2_name: "Winkel I"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+
+- Winkel_II:
+ WKT2_name: "Winkel II"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+
+- Lambert_Conformal_Conic:
+ - WKT2_name: EPSG_NAME_METHOD_LAMBERT_CONIC_CONFORMAL_1SP
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_AT_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN # should be the same as Standard_Parallel_1
+
+ - WKT2_name: EPSG_NAME_METHOD_LAMBERT_CONIC_CONFORMAL_2SP
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_FALSE_ORIGIN
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_FALSE_ORIGIN
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_FALSE_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+ - Standard_Parallel_2: EPSG_NAME_PARAMETER_LATITUDE_2ND_STD_PARALLEL
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_FALSE_ORIGIN
+
+ # From GDAL autotest
+ - WKT2_name: EPSG_NAME_METHOD_LAMBERT_CONIC_CONFORMAL_2SP
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_FALSE_ORIGIN
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_FALSE_ORIGIN
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_FALSE_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+ - Standard_Parallel_2: EPSG_NAME_PARAMETER_LATITUDE_2ND_STD_PARALLEL
+ - Scale_Factor: 1.0
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_FALSE_ORIGIN
+
+ # Tempative mapping. Did not find any example
+ - WKT2_name: EPSG_NAME_METHOD_LAMBERT_CONIC_CONFORMAL_2SP_MICHIGAN
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_FALSE_ORIGIN
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_FALSE_ORIGIN
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_FALSE_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+ - Standard_Parallel_2: EPSG_NAME_PARAMETER_LATITUDE_2ND_STD_PARALLEL
+ - Scale_Factor: EPSG_NAME_PARAMETER_ELLIPSOID_SCALE_FACTOR
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_FALSE_ORIGIN
+
+- Polyconic:
+ WKT2_name: EPSG_NAME_METHOD_AMERICAN_POLYCONIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Quartic_Authalic:
+ WKT2_name: "Quartic Authalic"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Loximuthal:
+ WKT2_name: "Loximuthal"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Central_Parallel: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Bonne:
+ WKT2_name: EPSG_NAME_METHOD_BONNE
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Hotine_Oblique_Mercator_Two_Point_Natural_Origin:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_HOTINE_OBLIQUE_MERCATOR_TWO_POINT_NATURAL_ORIGIN
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_PROJECTION_CENTRE
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_PROJECTION_CENTRE
+ - Latitude_Of_1st_Point: "Latitude of 1st point"
+ - Latitude_Of_2nd_Point: "Latitude of 2nd point"
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_INITIAL_LINE
+ - Longitude_Of_1st_Point: "Longitude of 1st point"
+ - Longitude_Of_2nd_Point: "Longitude of 2nd point"
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+
+- Stereographic:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_STEREOGRAPHIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_AT_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Equidistant_Conic:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_EQUIDISTANT_CONIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+ - Standard_Parallel_2: EPSG_NAME_PARAMETER_LATITUDE_2ND_STD_PARALLEL
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Cassini:
+ WKT2_name: EPSG_NAME_METHOD_CASSINI_SOLDNER
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Scale_Factor: 1.0 # fixed
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Van_der_Grinten_I:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_VAN_DER_GRINTEN
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Robinson:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_ROBINSON
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Two_Point_Equidistant:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_TWO_POINT_EQUIDISTANT
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Latitude_Of_1st_Point: "Latitude of 1st point"
+ - Latitude_Of_2nd_Point: "Latitude of 2nd point"
+ - Longitude_Of_1st_Point: "Longitude of 1st point"
+ - Longitude_Of_2nd_Point: "Longitude of 2nd point"
+
+- Azimuthal_Equidistant:
+ WKT2_name: EPSG_NAME_METHOD_MODIFIED_AZIMUTHAL_EQUIDISTANT
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Lambert_Azimuthal_Equal_Area:
+ WKT2_name: EPSG_NAME_METHOD_LAMBERT_AZIMUTHAL_EQUAL_AREA
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Cylindrical_Equal_Area:
+ WKT2_name: EPSG_NAME_METHOD_LAMBERT_CYLINDRICAL_EQUAL_AREA_SPHERICAL
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+
+# No example in pe_list_projection.csv: temptative mapping !
+- Hotine_Oblique_Mercator_Two_Point_Center:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_HOTINE_OBLIQUE_MERCATOR_TWO_POINT_NATURAL_ORIGIN
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_PROJECTION_CENTRE
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_PROJECTION_CENTRE
+ - Latitude_Of_1st_Point: "Latitude of 1st point"
+ - Latitude_Of_2nd_Point: "Latitude of 2nd point"
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_INITIAL_LINE
+ - Longitude_Of_1st_Point: "Longitude of 1st point"
+ - Longitude_Of_2nd_Point: "Longitude of 2nd point"
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Hotine_Oblique_Mercator_Azimuth_Natural_Origin:
+ WKT2_name: EPSG_NAME_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_A
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_INITIAL_LINE
+ - Azimuth: EPSG_NAME_PARAMETER_AZIMUTH_INITIAL_LINE
+ # No EPSG_NAME_PARAMETER_ANGLE_RECTIFIED_TO_SKEW_GRID
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_PROJECTION_CENTRE
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+
+
+- Hotine_Oblique_Mercator_Azimuth_Center:
+ WKT2_name: EPSG_NAME_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_B
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_PROJECTION_CENTRE
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_PROJECTION_CENTRE
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_INITIAL_LINE
+ - Azimuth: EPSG_NAME_PARAMETER_AZIMUTH_INITIAL_LINE
+ # No EPSG_NAME_PARAMETER_ANGLE_RECTIFIED_TO_SKEW_GRID
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_PROJECTION_CENTRE
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+
+- Double_Stereographic:
+ WKT2_name: EPSG_NAME_METHOD_OBLIQUE_STEREOGRAPHIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_AT_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Krovak:
+ - WKT2_name: EPSG_NAME_METHOD_KROVAK
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Pseudo_Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_PSEUDO_STANDARD_PARALLEL
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_PSEUDO_STANDARD_PARALLEL
+ - Azimuth: EPSG_NAME_PARAMETER_COLATITUDE_CONE_AXIS
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_OF_ORIGIN
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+ - X_Scale: 1.0
+ - Y_Scale: 1.0
+ - XY_Plane_Rotation: 0.0
+
+ - WKT2_name: EPSG_NAME_METHOD_KROVAK_NORTH_ORIENTED
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Pseudo_Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_PSEUDO_STANDARD_PARALLEL
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_PSEUDO_STANDARD_PARALLEL
+ - Azimuth: EPSG_NAME_PARAMETER_COLATITUDE_CONE_AXIS
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_OF_ORIGIN
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+ - X_Scale: -1.0
+ - Y_Scale: 1.0
+ - XY_Plane_Rotation: 90.0
+
+- New_Zealand_Map_Grid:
+ WKT2_name: EPSG_NAME_METHOD_NZMG
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Longitude_Of_Origin: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Origin: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Orthographic:
+ WKT2_name: EPSG_NAME_METHOD_ORTHOGRAPHIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+- Winkel_Tripel:
+ WKT2_name: "Winkel Tripel"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+
+- Aitoff:
+ WKT2_name: "Aitoff"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+# Hammer_Aitoff: not handled
+
+# Flat_Polar_Quartic: not handled
+
+- Craster_Parabolic:
+ WKT2_name: "Craster Parabolic"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Gnomonic:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_GNOMONIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+# Times: not handled, but present in PROJ
+
+# Vertical_Near_Side_Perspective: not handled, but present in PROJ
+
+- Stereographic_North_Pole:
+ WKT2_name: EPSG_NAME_METHOD_POLAR_STEREOGRAPHIC_VARIANT_B
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_STD_PARALLEL
+ Cond:
+ - EPSG_NAME_PARAMETER_LATITUDE_STD_PARALLEL > 0
+
+- Stereographic_South_Pole:
+ WKT2_name: EPSG_NAME_METHOD_POLAR_STEREOGRAPHIC_VARIANT_B
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_STD_PARALLEL
+ Cond:
+ - EPSG_NAME_PARAMETER_LATITUDE_STD_PARALLEL < 0
+
+# Fuller: not handled
+
+- Rectified_Skew_Orthomorphic_Natural_Origin:
+ WKT2_name: EPSG_NAME_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_A
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_INITIAL_LINE
+ - Azimuth: EPSG_NAME_PARAMETER_AZIMUTH_INITIAL_LINE
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_PROJECTION_CENTRE
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+ - XY_Plane_Rotation: EPSG_NAME_PARAMETER_ANGLE_RECTIFIED_TO_SKEW_GRID
+
+# temptative mapping: no example
+- Rectified_Skew_Orthomorphic_Center:
+ WKT2_name: EPSG_NAME_METHOD_HOTINE_OBLIQUE_MERCATOR_VARIANT_B
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_EASTING_PROJECTION_CENTRE
+ - False_Northing: EPSG_NAME_PARAMETER_NORTHING_PROJECTION_CENTRE
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_INITIAL_LINE
+ - Azimuth: EPSG_NAME_PARAMETER_AZIMUTH_INITIAL_LINE
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_PROJECTION_CENTRE
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+ - XY_Plane_Rotation: EPSG_NAME_PARAMETER_ANGLE_RECTIFIED_TO_SKEW_GRID
+
+# Cube: not handled
+
+# Transverse_Mercator_Complex: not handled
+
+# Robinson_ARC_INFO: not handled
+
+# Local: not handled
+
+- Goode_Homolosine:
+ WKT2_name: "Goode Homolosine"
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+# Berghaus_Star: not handled
+
+- Equidistant_Cylindrical_Ellipsoidal:
+ WKT2_name: EPSG_NAME_METHOD_EQUIDISTANT_CYLINDRICAL
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_1ST_STD_PARALLEL
+
+# Ney_Modified_Conic: not handled
+
+- Laborde_Oblique_Mercator:
+ WKT2_name: EPSG_NAME_METHOD_LABORDE_OBLIQUE_MERCATOR
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Scale_Factor: EPSG_NAME_PARAMETER_SCALE_FACTOR_INITIAL_LINE
+ - Azimuth: EPSG_NAME_PARAMETER_AZIMUTH_INITIAL_LINE
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_PROJECTION_CENTRE
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_PROJECTION_CENTRE
+
+# IGAC_Plano_Cartesiano: not handled
+
+- Gnomonic_Ellipsoidal:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_GNOMONIC
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Center: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+
+# Polar_Stereographic_Variant_A: not handled, no example in .csv
+
+# Polar_Stereographic_Variant_B: not handled, no example in .csv
+
+# Polar_Stereographic_Variant_C: not handled
+
+# Mercator_Variant_A: not handled, no example in .csv
+
+# Mercator_Variant_C: not handled, no example in .csv
+
+# Hammer_Ellipsoidal: not handled
+
+# Quartic_Authalic_Ellipsoidal: not handled, no example in .csv
+
+# Eckert_Greifendorff: not handled
+
+- Wagner_IV:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_WAGNER_IV
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Latitude_Of_Origin: 0
+
+- Wagner_V:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_WAGNER_V
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+- Wagner_VII:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_WAGNER_VII
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+
+# Natural_Earth: not handled
+
+# Natural_Earth_II: not handled
+
+# Patterson: not handled
+
+# Compact_Miller: not handled
+
+# Transverse_Mercator_NGA_2014: not handled
+
+# Transverse_Cylindrical_Equal_Area: not handled
+
+# Aspect_Adaptive_Cylindrical: not handled
+
+- Geostationary_Satellite:
+ WKT2_name: PROJ_WKT2_NAME_METHOD_GEOSTATIONARY_SATELLITE_SWEEP_Y
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Longitude_Of_Center: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Height: "Satellite Height"
+ - Option: 0.0
+
+# Equidistant_Cylindrical_Auxiliary_Sphere: not handled
+
+- Mercator_Auxiliary_Sphere:
+ WKT2_name: EPSG_NAME_METHOD_POPULAR_VISUALISATION_PSEUDO_MERCATOR
+ Params:
+ - False_Easting: EPSG_NAME_PARAMETER_FALSE_EASTING
+ - False_Northing: EPSG_NAME_PARAMETER_FALSE_NORTHING
+ - Central_Meridian: EPSG_NAME_PARAMETER_LONGITUDE_OF_NATURAL_ORIGIN
+ - Standard_Parallel_1: EPSG_NAME_PARAMETER_LATITUDE_OF_NATURAL_ORIGIN
+ - Auxiliary_Sphere_Type: 0.0
+
+# Mollweide_Auxiliary_Sphere: not handled
+
+# Eckert_VI_Auxiliary_Sphere: not handled
+
+# Eckert_IV_Auxiliary_Sphere: not handled
+
+# Stereographic_Auxiliary_Sphere: not handled
+
+# Van_der_Grinten_I_Auxiliary_Sphere: not handled
+
+# Azimuthal_Equidistant_Auxiliary_Sphere: not handled
+
+# Lambert_Azimuthal_Equal_Area_Auxiliary_Sphere: not handled
+
+# Orthographic_Auxiliary_Sphere: not handled
+
+# Gnomonic_Auxiliary_Sphere: not handled
+
+"""
+
+config = yaml.load(config_str)
+
+all_projs = []
+
+
+def generate_mapping(WKT2_name, esri_proj_name, Params, suffix=''):
+
+ c_name = 'paramsESRI_%s%s' % (esri_proj_name, suffix)
+ if isinstance(WKT2_name, list):
+ for WKT2_name_s in WKT2_name:
+ all_projs.append([esri_proj_name, WKT2_name_s, c_name])
+ else:
+ all_projs.append([esri_proj_name, WKT2_name, c_name])
+ print('static const ESRIParamMapping %s[] = { ' % c_name)
+ for param in Params:
+ for param_name in param:
+ param_value = param[param_name]
+ if isinstance(param_value, str):
+ if param_value.startswith('EPSG_'):
+ print(' { "%s", %s, %s, 0.0 },' % (param_name, param_value, param_value.replace('_NAME_', '_CODE_')))
+ else:
+ print(' { "%s", "%s", 0, 0.0 },' % (param_name, param_value))
+ else:
+ print(' { "%s", nullptr, 0, %.1f },' % (param_name, param_value))
+ print(' { nullptr, nullptr, 0, 0.0 }')
+ print('};')
+
+
+print('// This file was generated by scripts/build_esri_projection_mapping.py. DO NOT EDIT !')
+print('')
+print("""
+#ifndef FROM_COORDINATE_OPERATION_CPP
+#error This file should only be included from coordinateoperation.cpp
+#endif
+
+#ifndef ESRI_PROJECTION_MAPPINGS_HH_INCLUDED
+#define ESRI_PROJECTION_MAPPINGS_HH_INCLUDED
+
+#include "coordinateoperation_internal.hpp"
+
+//! @cond Doxygen_Suppress
+
+// ---------------------------------------------------------------------------
+
+// anonymous namespace
+namespace {
+
+using namespace ::NS_PROJ;
+using namespace ::NS_PROJ::operation;
+
+""")
+
+for item in config:
+ for esri_proj_name in item:
+ proj_config = item[esri_proj_name]
+ if isinstance(proj_config, dict):
+ WKT2_name = proj_config['WKT2_name']
+ Params = proj_config['Params']
+ generate_mapping(WKT2_name, esri_proj_name, Params)
+ else:
+ count = 1
+ for subconfig in proj_config:
+ WKT2_name = subconfig['WKT2_name']
+ Params = subconfig['Params']
+ generate_mapping(WKT2_name, esri_proj_name, Params,
+ suffix='_alt%d' % count)
+ count += 1
+ print('')
+
+print('static const ESRIMethodMapping esriMappings[] = {')
+for esri_proj_name, WKT2_name, c_name in all_projs:
+ if WKT2_name.startswith('EPSG_'):
+ print(' { "%s", %s, %s, %s },' % (esri_proj_name, WKT2_name, WKT2_name.replace('_NAME_', '_CODE_'), c_name))
+ elif WKT2_name.startswith('PROJ_'):
+ print(' { "%s", %s, 0, %s },' % (esri_proj_name, WKT2_name, c_name))
+ else:
+ print(' { "%s", "%s", 0, %s },' % (esri_proj_name, WKT2_name, c_name))
+print('};')
+
+print("""
+// ---------------------------------------------------------------------------
+
+// end of anonymous namespace
+}
+
+//! @endcond
+
+#endif // ESRI_PROJECTION_MAPPINGS_HH_INCLUDED
+""")
diff --git a/scripts/build_grid_alternatives_generated.py b/scripts/build_grid_alternatives_generated.py
new file mode 100755
index 00000000..6570a771
--- /dev/null
+++ b/scripts/build_grid_alternatives_generated.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Populate grid_alternatives with NAD83 -> NAD83(HPGN/HARN) grids
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+import os
+
+nadcon_grids = ['conus', 'alaska', 'hawaii',
+ 'prvi', 'stgeorge', 'stlrnc', 'stpaul']
+
+hpgn_grids = [
+ 'alhpgn.gsb',
+ 'arhpgn.gsb',
+ 'azhpgn.gsb',
+ #'c1hpgn.gsb', -- Not used in EPSG
+ #'c2hpgn.gsb', -- Not used in EPSG
+ 'cnhpgn.gsb',
+ 'cohpgn.gsb',
+ 'cshpgn.gsb',
+ 'emhpgn.gsb',
+ 'eshpgn.gsb',
+ 'ethpgn.gsb',
+ ('flhpgn.gsb', 'FL'),
+ 'gahpgn.gsb',
+ 'guhpgn.gsb',
+ 'hihpgn.gsb',
+ 'iahpgn.gsb',
+ 'ilhpgn.gsb',
+ 'inhpgn.gsb',
+ 'kshpgn.gsb',
+ 'kyhpgn.gsb',
+ 'lahpgn.gsb',
+ ('mdhpgn.gsb', 'MD'),
+ 'mehpgn.gsb',
+ 'mihpgn.gsb',
+ 'mnhpgn.gsb',
+ 'mohpgn.gsb',
+ 'mshpgn.gsb',
+ 'nbhpgn.gsb',
+ 'nchpgn.gsb',
+ 'ndhpgn.gsb',
+ 'nehpgn.gsb',
+ 'njhpgn.gsb',
+ 'nmhpgn.gsb',
+ 'nvhpgn.gsb',
+ 'nyhpgn.gsb',
+ 'ohhpgn.gsb',
+ 'okhpgn.gsb',
+ 'pahpgn.gsb',
+ 'pvhpgn.gsb',
+ 'schpgn.gsb',
+ 'sdhpgn.gsb',
+ ('tnhpgn.gsb', 'TN'),
+ 'uthpgn.gsb',
+ 'vahpgn.gsb',
+ ('wihpgn.gsb', 'WI'),
+ 'wmhpgn.gsb',
+ ('wohpgn.gsb', 'WO'),
+ 'wshpgn.gsb',
+ 'wthpgn.gsb',
+ 'wvhpgn.gsb',
+ 'wyhpgn.gsb',
+]
+
+script_dir_name = os.path.dirname(os.path.realpath(__file__))
+sql_dir_name = os.path.join(os.path.dirname(script_dir_name), 'data', 'sql')
+
+f = open(os.path.join(sql_dir_name, 'grid_alternatives_generated') + '.sql', 'wb')
+f.write("--- This file has been generated by scripts/build_grid_alternatives_generated.py. DO NOT EDIT !\n\n".encode('UTF-8'))
+
+f.write("-- NADCON (NAD27 -> NAD83) entries\n\n".encode('UTF-8'))
+
+for grid in nadcon_grids:
+ sql = """INSERT INTO grid_alternatives(original_grid_name,
+ proj_grid_name,
+ proj_grid_format,
+ proj_method,
+ inverse_direction,
+ package_name,
+ url, direct_download, open_license, directory)
+ VALUES ('%s',
+ '%s',
+ 'CTable2',
+ 'hgridshift',
+ 0,
+ 'proj-datumgrid',
+ NULL, NULL, NULL, NULL);""" % (grid + '.las', grid)
+ f.write((sql + '\n').encode('UTF-8'))
+
+
+f.write("-- NAD83 -> NAD83(HPGN) entries\n\n".encode('UTF-8'))
+
+for row in hpgn_grids:
+ try:
+ ntv2_name, ctable2_name = row
+ except:
+ ntv2_name = row
+ ctable2_name = None
+ las_filename = ntv2_name[0:-4] + ".las"
+ if ctable2_name:
+ sql = """INSERT INTO grid_alternatives(original_grid_name,
+ proj_grid_name,
+ proj_grid_format,
+ proj_method,
+ inverse_direction,
+ package_name,
+ url, direct_download, open_license, directory)
+ VALUES ('%s',
+ '%s',
+ 'CTable2',
+ 'hgridshift',
+ 0,
+ 'proj-datumgrid',
+ NULL, NULL, NULL, NULL);""" % (las_filename, ctable2_name)
+ else:
+ sql = """INSERT INTO grid_alternatives(original_grid_name,
+ proj_grid_name,
+ proj_grid_format,
+ proj_method,
+ inverse_direction,
+ package_name,
+ url, direct_download, open_license, directory)
+ VALUES ('%s',
+ '%s',
+ 'NTv2',
+ 'hgridshift',
+ 0,
+ 'proj-datumgrid-north-america',
+ NULL, NULL, NULL, NULL);""" % (las_filename, ntv2_name)
+ f.write((sql + '\n').encode('UTF-8'))
+
+f.close()
diff --git a/scripts/cppcheck.sh b/scripts/cppcheck.sh
index bac95847..bd677e3d 100755
--- a/scripts/cppcheck.sh
+++ b/scripts/cppcheck.sh
@@ -23,7 +23,8 @@ for dirname in ${TOPDIR}/src; do
echo "Running cppcheck on $dirname... (can be long)"
if ! cppcheck --inline-suppr --template='{file}:{line},{severity},{id},{message}' \
--enable=all --inconclusive --std=posix \
- -DCPPCHECK -DACCEPT_USE_OF_DEPRECATED_PROJ_API_H\
+ -DCPPCHECK -D__cplusplus=201103L -DACCEPT_USE_OF_DEPRECATED_PROJ_API_H \
+ -I${TOPDIR}/src -I${TOPDIR}/include \
"$dirname" \
-j 8 >>${LOG_FILE} 2>&1 ; then
echo "cppcheck failed"
@@ -33,7 +34,7 @@ done
ret_code=0
-grep -v "unmatchedSuppression" ${LOG_FILE} > ${LOG_FILE}.tmp
+grep -v "unmatchedSuppression" ${LOG_FILE} | grep -v "nn.hpp" > ${LOG_FILE}.tmp
mv ${LOG_FILE}.tmp ${LOG_FILE}
if grep "null pointer" ${LOG_FILE} ; then
diff --git a/scripts/create_c_api_projections.py b/scripts/create_c_api_projections.py
new file mode 100755
index 00000000..5d10a16b
--- /dev/null
+++ b/scripts/create_c_api_projections.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Parse XML output of Doxygen on coordinateoperation.hpp to creat
+# C API for projections.
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+from lxml import etree
+import os
+
+script_dir_name = os.path.dirname(os.path.realpath(__file__))
+
+# Make sure to run doxygen
+if not 'SKIP_DOXYGEN' in os.environ:
+ os.system("bash " + os.path.join(script_dir_name, "doxygen.sh"))
+
+xmlfilename = os.path.join(os.path.dirname(script_dir_name),
+ 'docs/build/xml/classosgeo_1_1proj_1_1operation_1_1Conversion.xml')
+
+tree = etree.parse(open(xmlfilename, 'rt'))
+root = tree.getroot()
+compounddef = root.find('compounddef')
+
+header = open('projections.h', 'wt')
+cppfile = open('projections.cpp', 'wt')
+test_cppfile = open('test_projections.cpp', 'wt')
+
+header.write("/* BEGIN: Generated by scripts/create_c_api_projections.py*/\n")
+
+cppfile.write("/* BEGIN: Generated by scripts/create_c_api_projections.py*/\n")
+cppfile.write("\n");
+
+test_cppfile.write("/* BEGIN: Generated by scripts/create_c_api_projections.py*/\n")
+
+for sectiondef in compounddef.iter('sectiondef'):
+ if sectiondef.attrib['kind'] == 'public-static-func':
+ for func in sectiondef.iter('memberdef'):
+ name = func.find('name').text
+ assert name.startswith('create')
+ if name in ('create', 'createChangeVerticalUnit',
+ 'createAxisOrderReversal', 'createGeographicGeocentric'):
+ continue
+ params = []
+ has_angle = False
+ has_linear = False
+ for param in func.iter('param'):
+ type = param.find('type').xpath("normalize-space()")
+ if type.find('Angle') >= 0:
+ has_angle = True
+ if type.find('Length') >= 0:
+ has_linear = True
+ paramname = param.find('declname').text
+ if paramname == 'properties':
+ continue
+ params.append((type, paramname))
+
+ shortName = name[len('create'):]
+
+ decl = "proj_obj_create_projected_crs_"
+ decl += shortName
+ decl += "(\n"
+ decl += " PJ_OBJ* geodetic_crs, const char* crs_name,\n"
+ for param in params:
+ if param[0] in ('int', 'bool'):
+ decl += " int " + param[1] + ",\n"
+ else:
+ decl += " double " + param[1] + ",\n"
+ if has_angle:
+ decl += " const char* angUnitName, double angUnitConvFactor"
+ if has_linear:
+ decl += ","
+ decl += "\n"
+ if has_linear:
+ decl += " const char* linearUnitName, double linearUnitConvFactor"
+ decl += ")"
+
+ header.write("PJ_OBJ PROJ_DLL *" + decl + ";\n\n")
+
+ briefdescription = func.find('briefdescription/para').xpath("normalize-space()")
+ briefdescription = briefdescription.replace("Instanciate ", "Instanciate a ProjectedCRS with ")
+
+ cppfile.write("// ---------------------------------------------------------------------------\n\n")
+ cppfile.write("/** \\brief " + briefdescription + "\n")
+ cppfile.write(" *\n")
+ cppfile.write(" * See osgeo::proj::operation::Conversion::create" + shortName + "().\n")
+ cppfile.write(" *\n")
+ cppfile.write(" * Linear parameters are expressed in (linearUnitName, linearUnitConvFactor).\n")
+ if has_angle:
+ cppfile.write(" * Angular parameters are expressed in (angUnitName, angUnitConvFactor).\n")
+ cppfile.write(" */\n")
+ cppfile.write("PJ_OBJ* " + decl + "{\n");
+ if not has_linear:
+ cppfile.write(" const auto& linearUnit = UnitOfMeasure::METRE;\n")
+ else:
+ cppfile.write(" UnitOfMeasure linearUnit(createLinearUnit(linearUnitName, linearUnitConvFactor));\n")
+ if has_angle:
+ cppfile.write(" UnitOfMeasure angUnit(createAngularUnit(angUnitName, angUnitConvFactor));\n")
+ cppfile.write(" auto conv = Conversion::create" + shortName + "(PropertyMap()")
+ for param in params:
+ if param[0] in 'int':
+ cppfile.write(", " + param[1])
+ elif param[0] in 'bool':
+ cppfile.write(", " + param[1] + " != 0")
+ elif param[0].find('Angle') >= 0:
+ cppfile.write(", Angle(" + param[1] + ", angUnit)")
+ elif param[0].find('Length') >= 0:
+ cppfile.write(", Length(" + param[1] + ", linearUnit)")
+ elif param[0].find('Scale') >= 0:
+ cppfile.write(", Scale(" + param[1] + ")")
+
+ cppfile.write(");\n")
+ cppfile.write(" return proj_obj_create_projected_crs(geodetic_crs, crs_name, conv, linearUnit);\n")
+ cppfile.write("}\n")
+
+ test_cppfile.write("{\n")
+ test_cppfile.write(" auto projCRS = proj_obj_create_projected_crs_" + shortName + "(\n")
+ test_cppfile.write(" geogCRS, nullptr")
+ for param in params:
+ test_cppfile.write(", 0")
+ if has_angle:
+ test_cppfile.write(", \"Degree\", 0.0174532925199433")
+ if has_angle:
+ test_cppfile.write(", \"Metre\", 1.0")
+ test_cppfile.write(");\n")
+ test_cppfile.write(" ObjectKeeper keeper_projCRS(projCRS);\n")
+ test_cppfile.write(" ASSERT_NE(projCRS, nullptr);\n")
+ test_cppfile.write("}\n")
+
+
+header.write("/* END: Generated by scripts/create_c_api_projections.py*/\n")
+cppfile.write("/* END: Generated by scripts/create_c_api_projections.py*/\n")
+
+test_cppfile.write("/* END: Generated by scripts/create_c_api_projections.py*/\n")
+
+print('projections.h and .cpp, and test_projections.cpp have been generated. Manually merge them now') \ No newline at end of file
diff --git a/scripts/create_proj_symbol_rename.sh b/scripts/create_proj_symbol_rename.sh
new file mode 100755
index 00000000..7ede0df2
--- /dev/null
+++ b/scripts/create_proj_symbol_rename.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Script to extract exported symbols and rename them
+# to avoid clashing with other older libproj
+#
+# This is done in provision for the integration of PROJ master with GDAL. GDAL is
+# a complex library that links with many libraries, a number of them linking with
+# PROJ. However in the continuous integration setups used by GDAL, it would be
+# impractical to recompile all those libraries that use the system libproj (4.X or 5.X)
+# that comes with the distribution. Linking GDAL directly against PROJ master
+# and this system libproj with potential different ABI is prone to clash/crash
+# (we experimented that painfully in GDAL with its internal libtiff 4.X copy whereas
+# systems shipped with libtiff 3.X)
+# Hence this solution to rename the symbols of PROJ master so that PROJ master
+# can be used by GDAL without conflicting with the system PROJ.
+# The renaming only happens if -DPROJ_RENAME_SYMBOLS is passed in CFLAGS and CXXFLAGS.
+#
+# Note: we potentially should do the same for C++ symbols, but that is not needed
+# for now, and we would just set the NS_PROJ existing macro to change the C++ base
+# namespace which defaults to osgeo::proj currently.
+
+# To be run from a 'build' subdir after running configure
+
+PROJ_ROOT=$(cd $(dirname ${BASH_SOURCE[0]})/..; pwd)
+
+rm -rf tmp_alias
+mkdir tmp_alias
+for i in ${PROJ_ROOT}/src/*.c; do
+ if ! echo "$i" | grep -q "test228.c"; then
+ if ! echo "$i" | grep -q "proj.c" ; then
+ if ! echo "$i" | grep -q "geod.c" ; then
+ if ! echo "$i" | grep -q "nad2bin.c" ; then
+ if ! echo "$i" | grep -q "geodtest.c" ; then
+ if ! echo "$i" | grep -q "multistresstest.c" ; then
+ if ! echo "$i" | grep -q "gie.c" ; then
+ if ! echo "$i" | grep -q "cs2cs.c" ; then
+ if ! echo "$i" | grep -q "cct.c" ; then
+ echo $i
+ (cd tmp_alias; gcc -fvisibility=hidden ${i} -I../src -I${PROJ_ROOT}/src -I${PROJ_ROOT}/include -fPIC -c)
+ fi
+ fi
+ fi
+ fi
+ fi
+ fi
+ fi
+ fi
+ fi
+done
+for i in ${PROJ_ROOT}/src/*.cpp; do
+ if ! echo "$i" | grep -q "projinfo.cpp"; then
+ echo $i
+ (cd tmp_alias; g++ -fvisibility=hidden -std=c++11 ${i} -I../src -I${PROJ_ROOT}/src -I${PROJ_ROOT}/include -fPIC -c)
+ fi
+done
+
+(cd tmp_alias; g++ -fvisibility=hidden -std=c++11 *.o -shared -o libproj.so)
+
+OUT_FILE=proj_symbol_rename.h
+
+rm $OUT_FILE 2>/dev/null
+
+echo "/* This is a generated file by create_proj_symbol_rename.sh. *DO NOT EDIT MANUALLY !* */" >> $OUT_FILE
+echo "#ifndef PROJ_SYMBOL_RENAME_H" >> $OUT_FILE
+echo "#define PROJ_SYMBOL_RENAME_H" >> $OUT_FILE
+
+symbol_list=$(objdump -t tmp_alias/libproj.so | grep " g " | grep .text | awk '{print $6}' | grep -v -e _Z | sort)
+for symbol in $symbol_list
+do
+ echo "#define $symbol internal_$symbol" >> $OUT_FILE
+done
+
+rodata_symbol_list=$(objdump -t tmp_alias/libproj.so | grep " g O \\.rodata" | awk '{print $6}')
+for symbol in $rodata_symbol_list
+do
+ echo "#define $symbol internal_$symbol" >> $OUT_FILE
+done
+
+#data_symbol_list=$(objdump -t tmp_alias/libproj.so | grep "\\.data" | grep -v -e __dso_handle -e __TMC_END__ | awk '{print $6}' | grep -v "\\."| grep -v _Z)
+#for symbol in $data_symbol_list
+#do
+# echo "#define $symbol internal_$symbol" >> $OUT_FILE
+#done
+
+bss_symbol_list=$(objdump -t tmp_alias/libproj.so | grep "g O \\.bss" | awk '{print $6}' | grep -v "\\."| grep -v _Z)
+for symbol in $bss_symbol_list
+do
+ echo "#define $symbol internal_$symbol" >> $OUT_FILE
+done
+
+echo "#endif /* PROJ_SYMBOL_RENAME_H */" >> $OUT_FILE
diff --git a/scripts/doxygen.sh b/scripts/doxygen.sh
new file mode 100755
index 00000000..2f44dca8
--- /dev/null
+++ b/scripts/doxygen.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+set -eu
+
+SCRIPT_DIR=$(dirname "$0")
+case $SCRIPT_DIR in
+ "/"*)
+ ;;
+ ".")
+ SCRIPT_DIR=$(pwd)
+ ;;
+ *)
+ SCRIPT_DIR=$(pwd)/$(dirname "$0")
+ ;;
+esac
+
+TOPDIR="$SCRIPT_DIR/.."
+
+pushd "${TOPDIR}" > /dev/null || exit
+
+# HTML generation
+# Check that doxygen runs warning free
+rm -rf docs/build/html/
+mkdir -p docs/build/
+doxygen > /tmp/docs_log.txt 2>&1
+if grep -i warning /tmp/docs_log.txt; then
+ echo "Doxygen warnings found" && cat /tmp/docs_log.txt && /bin/false;
+else
+ echo "No Doxygen warnings found";
+fi
+
+
+# XML generation (for Breathe)
+mkdir -p docs/build/tmp_breathe
+python scripts/generate_breathe_friendly_general_doc.py
+rm -rf docs/build/xml/
+(cat Doxyfile; printf "GENERATE_HTML=NO\nGENERATE_XML=YES\nINPUT= src include/proj src/proj.h docs/build/tmp_breathe/general_doc.dox.reworked.h") | doxygen - > /tmp/docs_log.txt 2>&1
+if grep -i warning /tmp/docs_log.txt; then
+ echo "Doxygen warnings found" && cat /tmp/docs_log.txt && /bin/false;
+else
+ echo "No Doxygen warnings found";
+fi
+
+for i in ${TOPDIR}/docs/build/xml/*; do
+
+# Fix Breathe error on Doxygen XML
+# Type must be either just a name or a typedef-like declaration.
+# If just a name:
+# Invalid definition: Expected end of definition. [error at 32]
+# osgeo::proj::common::MeasurePtr = typedef std::shared_ptr<Measure>
+ sed "s/ = typedef /=/g" < $i > $i.tmp;
+ mv $i.tmp $i
+done
+
+# There is a confusion for Breathe between PROJStringFormatter::Convention and WKTFormatter:Convention
+sed "s/Convention/Convention_/g" < ${TOPDIR}/docs/build/xml/classosgeo_1_1proj_1_1io_1_1WKTFormatter.xml | sed "s/WKT2_2018/_WKT2_2018/g" | sed "s/WKT2_2015/_WKT2_2015/g" | sed "s/WKT1_GDAL/_WKT1_GDAL/g" | sed "s/WKT1_ESRI/_WKT1_ESRI/g" > ${TOPDIR}/docs/build/xml/classosgeo_1_1proj_1_1io_1_1WKTFormatter.xml.tmp
+mv ${TOPDIR}/docs/build/xml/classosgeo_1_1proj_1_1io_1_1WKTFormatter.xml.tmp ${TOPDIR}/docs/build/xml/classosgeo_1_1proj_1_1io_1_1WKTFormatter.xml
+
+popd > /dev/null || exit
+
diff --git a/scripts/filter_lcov_info.py b/scripts/filter_lcov_info.py
new file mode 100755
index 00000000..98d630a3
--- /dev/null
+++ b/scripts/filter_lcov_info.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Remove coverage from system include files in lcov output
+
+import sys
+import fnmatch
+
+extension_filter = None
+if len(sys.argv) == 2:
+ extension_filter = sys.argv[1].split(',')
+
+output_lines = True
+for line in sys.stdin.readlines():
+ if line.startswith('SF:/usr/include'):
+ output_lines = False
+ elif line.startswith('SF:'):
+ if extension_filter:
+ output_lines = False
+ for filter in extension_filter:
+ if fnmatch.fnmatch(line.strip('\n'), filter):
+ output_lines = True
+ break
+ else:
+ output_lines = True
+ if output_lines:
+ sys.stdout.write(line)
diff --git a/scripts/fix_typos.sh b/scripts/fix_typos.sh
index 06dfa821..fa3168f9 100755
--- a/scripts/fix_typos.sh
+++ b/scripts/fix_typos.sh
@@ -51,4 +51,4 @@ WORDS_WHITE_LIST="metres,als,lsat,twon,ang,PJD_ERR_UNKNOW_UNIT_ID,PJD_ERR_LSAT_N
python3 fix_typos/codespell/codespell.py -w -i 3 -q 2 -S $EXCLUDED_FILES \
-x scripts/typos_whitelist.txt --words-white-list=$WORDS_WHITE_LIST \
- -D fix_typos/gdal_dict.txt ./src ./docs ./cmake ./examples
+ -D fix_typos/gdal_dict.txt ./include ./src ./docs ./cmake ./examples
diff --git a/scripts/gen_html_coverage.sh b/scripts/gen_html_coverage.sh
index 95dfb1af..bb239fbf 100755
--- a/scripts/gen_html_coverage.sh
+++ b/scripts/gen_html_coverage.sh
@@ -1,5 +1,43 @@
#!/bin/sh
set -eu
+
+# To filter only on c++ stuff:
+# scripts/gen_html_coverage.sh -ext "*.cpp,*.hh"
+
+SCRIPT_DIR=$(dirname "$0")
+case $SCRIPT_DIR in
+ "/"*)
+ ;;
+ ".")
+ SCRIPT_DIR=$(pwd)
+ ;;
+ *)
+ SCRIPT_DIR=$(pwd)/$(dirname "$0")
+ ;;
+esac
+
+FILTER=""
+if test $# -ge 1; then
+ if test "$1" = "--help"; then
+ echo "Usage: gen_html_coverage.sh [--help] [-ext \"ext1,...\"]"
+ exit
+ fi
+
+ if test "$1" = "-ext"; then
+ FILTER="$2"
+ shift
+ shift
+ fi
+
+ if test $# -ge 1; then
+ echo "Invalid option: $1"
+ echo "Usage: gen_html_coverage.sh [--help] [-ext \"ext1,...\"]"
+ exit
+ fi
+fi
+
rm -rf coverage_html
-lcov --directory src --capture --output-file proj.info
+lcov --directory src --directory include --capture --output-file proj.info
+"$SCRIPT_DIR"/filter_lcov_info.py "$FILTER" < proj.info > proj.info.filtered
+mv proj.info.filtered proj.info
genhtml -o ./coverage_html --ignore-errors source --num-spaces 2 proj.info
diff --git a/scripts/generate_breathe_friendly_general_doc.py b/scripts/generate_breathe_friendly_general_doc.py
new file mode 100644
index 00000000..746bdd04
--- /dev/null
+++ b/scripts/generate_breathe_friendly_general_doc.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+###############################################################################
+# $Id$
+#
+# Project: PROJ
+# Purpose: Rework general_doc.dox in a Breathe friendly (hacky) way
+# Author: Even Rouault <even.rouault at spatialys.com>
+#
+###############################################################################
+# Copyright (c) 2018, Even Rouault <even.rouault at spatialys.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+import os
+
+# HACK HACK HACK
+# Transform a .dox with section, subsection, etc.. in fake namespaces
+# since Breathe cannot handle such sections and references to them
+
+script_dir_name = os.path.dirname(os.path.realpath(__file__))
+general_doc_dox = os.path.join(os.path.dirname(
+ script_dir_name), 'src', 'general_doc.dox')
+general_doc_dox_reworked_h = os.path.join(os.path.dirname(
+ script_dir_name), 'docs/build/tmp_breathe', 'general_doc.dox.reworked.h')
+
+out = open(general_doc_dox_reworked_h, 'wt')
+
+
+class Section:
+
+ def __init__(self):
+ self.name = None
+ self.title = None
+ self.content = ''
+ self.children = []
+ pass
+
+ def serialize(self, out):
+ out.write("/** \\brief %s\n" % self.title)
+ out.write("\n")
+ out.write("%s\n" % self.content)
+ out.write("*/\n")
+ out.write("namespace %s {\n" % self.name)
+ out.write("}\n")
+ for child in self.children:
+ child.serialize(out)
+
+
+stack = []
+current = None
+
+for line in open(general_doc_dox, 'rt').readlines():
+ line = line.strip()
+ if line == '/*!' or line == '*/':
+ continue
+
+ if line.startswith('\page'):
+
+ tokens = line.split(' ')
+ new = Section()
+ new.name = tokens[1]
+ new.title = ' '.join(tokens[2:])
+ current = new
+ stack.append(new)
+ continue
+
+ if line.startswith('\section'):
+
+ stack = stack[0:1]
+ current = stack[-1]
+
+ tokens = line.split(' ')
+ new = Section()
+ new.name = tokens[1]
+ new.title = ' '.join(tokens[2:])
+ current.children.append(new)
+ current = new
+ stack.append(new)
+ continue
+
+ if line.startswith('\subsection'):
+
+ stack = stack[0:2]
+ current = stack[-1]
+
+ tokens = line.split(' ')
+ new = Section()
+ new.name = tokens[1]
+ new.title = ' '.join(tokens[2:])
+ current.children.append(new)
+ current = new
+ stack.append(new)
+ continue
+
+ if line.startswith('\subsubsection'):
+
+ stack = stack[0:3]
+ current = stack[-1]
+
+ tokens = line.split(' ')
+ new = Section()
+ new.name = tokens[1]
+ new.title = ' '.join(tokens[2:])
+ current.children.append(new)
+ current = new
+ stack.append(new)
+ continue
+
+ if current:
+ current.content += line + '\n'
+
+for child in stack[0].children:
+ child.serialize(out)
diff --git a/scripts/reformat.sh b/scripts/reformat.sh
new file mode 100755
index 00000000..c3826322
--- /dev/null
+++ b/scripts/reformat.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+set -eu
+
+# Refuse to reformat nn.hpp: this is third-party code
+if test $(basename $1) = "nn.hpp"; then
+ exit 0
+fi
+
+clang-format -style="{BasedOnStyle: llvm, IndentWidth: 4}" $1 > $1.reformatted
+if diff -u $1.reformatted $1; then
+ # No reformatting: remove temporary file
+ rm $1.reformatted
+else
+ # Differences. Backup original file, and use reformatted file
+ cp $1 $1.before_reformat
+ mv $1.reformatted $1
+fi
diff --git a/scripts/reformat_cpp.sh b/scripts/reformat_cpp.sh
new file mode 100755
index 00000000..51127529
--- /dev/null
+++ b/scripts/reformat_cpp.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+set -eu
+
+SCRIPT_DIR=$(dirname "$0")
+case $SCRIPT_DIR in
+ "/"*)
+ ;;
+ ".")
+ SCRIPT_DIR=$(pwd)
+ ;;
+ *)
+ SCRIPT_DIR=$(pwd)/$(dirname "$0")
+ ;;
+esac
+
+TOPDIR="$SCRIPT_DIR/.."
+
+for i in "$TOPDIR"/include/proj/*.hpp "$TOPDIR"/include/proj/internal/*.hpp "$TOPDIR"/src/*.cpp "$TOPDIR"/test/unit/test*.cpp; do
+ if ! echo "$i" | grep -q "lru_cache.hpp"; then
+ "$SCRIPT_DIR"/reformat.sh "$i";
+ fi
+done