aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorEven Rouault <even.rouault@spatialys.com>2020-01-29 12:51:49 +0100
committerGitHub <noreply@github.com>2020-01-29 12:51:49 +0100
commitbf6b1a889b12d7ecdcb190cd14155e09e132095f (patch)
treef742983990ca914ff3044352efd4e01eef5bd01d /scripts
parent74a10a8de03deb823690f143e191087bf7c4821f (diff)
parentb113d0825cf82f66f738746db0f21745017552e9 (diff)
downloadPROJ-bf6b1a889b12d7ecdcb190cd14155e09e132095f.tar.gz
PROJ-bf6b1a889b12d7ecdcb190cd14155e09e132095f.zip
Merge pull request #1891 from rouault/rfc5
Implement RFC5: Adopt GeoTIFF-based grids for grids delivered with PROJ
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/build_db_create_ignf_from_xml.py19
-rwxr-xr-xscripts/build_db_from_esri.py10
-rwxr-xr-xscripts/build_grid_alternatives_generated_noaa.py (renamed from scripts/build_grid_alternatives_generated.py)29
-rwxr-xr-xscripts/grid_checks.py162
4 files changed, 41 insertions, 179 deletions
diff --git a/scripts/build_db_create_ignf_from_xml.py b/scripts/build_db_create_ignf_from_xml.py
index f39f8d81..63e22770 100755
--- a/scripts/build_db_create_ignf_from_xml.py
+++ b/scripts/build_db_create_ignf_from_xml.py
@@ -1067,27 +1067,31 @@ all_sql.append("""--- Grid alternatives""")
all_sql.append('')
all_sql.append("""INSERT INTO grid_alternatives(original_grid_name,
proj_grid_name,
+ old_proj_grid_name,
proj_grid_format,
proj_method,
inverse_direction,
package_name,
url, direct_download, open_license, directory)
VALUES ('ntf_r93.gsb', -- as referenced by the IGNF registry
+ 'fr_ign_ntf_r93.tif',
'ntf_r93.gsb',
- 'NTv2',
+ 'GTiff',
'hgridshift',
0,
- 'proj-datumgrid',
- NULL, NULL, NULL, NULL);
+ NULL,
+ 'https://cdn.proj.org/fr_ign_ntf_r93.tif', 1, 1, NULL);
""")
for grid in setVerticalGrids:
original_grid_name = grid
- proj_grid_name = grid[grid.rfind('/')+1:].replace('.txt', '.gtx').replace('.mnt', '.gtx').replace('.gra', '.gtx')
+ old_proj_grid_name = grid[grid.rfind('/')+1:].replace('.txt', '.gtx').replace('.mnt', '.gtx').replace('.gra', '.gtx')
+ gtiff_grid_name = 'fr_ign_' + old_proj_grid_name[0:-4] + '.tif'
all_sql.append("""INSERT INTO grid_alternatives(original_grid_name,
proj_grid_name,
+ old_proj_grid_name,
proj_grid_format,
proj_method,
inverse_direction,
@@ -1095,11 +1099,12 @@ for grid in setVerticalGrids:
url, direct_download, open_license, directory)
VALUES ('%s', -- as referenced by the IGNF registry
'%s',
- 'GTX',
+ '%s',
+ 'GTiff',
'geoid_like',
0,
- 'proj-datumgrid-europe',
- NULL, NULL, NULL, NULL);""" % (original_grid_name, proj_grid_name))
+ NULL,
+ '%s', 1, 1, NULL);""" % (original_grid_name, gtiff_grid_name, old_proj_grid_name, 'https://cdn.proj.org/' + gtiff_grid_name))
all_sql.append('')
diff --git a/scripts/build_db_from_esri.py b/scripts/build_db_from_esri.py
index f4237b9b..500dc5d0 100755
--- a/scripts/build_db_from_esri.py
+++ b/scripts/build_db_from_esri.py
@@ -1568,12 +1568,14 @@ def import_geogtran():
all_sql.append(sql)
if filename in ('c1hpgn', 'c2hpgn'):
- sql = """INSERT INTO grid_alternatives VALUES ('%s', '%s', 'NTv2', 'hgridshift', 0, 'proj-datumgrid-north-america', NULL, NULL, NULL, NULL);""" % (
- filename, filename + '.gsb')
+ tiff_filename = 'us_noaa_' + filename + '.tif'
+ sql = """INSERT INTO grid_alternatives VALUES ('%s', '%s', '%s', 'GTiff', 'hgridshift', 0, NULL, '%s', 1, 1, NULL);""" % (
+ filename, tiff_filename, filename + '.gsb', 'https://cdn.proj.org/' + tiff_filename)
all_sql.append(sql)
elif filename == 'wohpgn':
- sql = """INSERT INTO grid_alternatives VALUES ('%s', '%s', 'CTable2', 'hgridshift', 0, 'proj-datumgrid', NULL, NULL, NULL, NULL);""" % (
- filename, 'WO')
+ tiff_filename = 'us_noaa_' + filename + '.tif'
+ sql = """INSERT INTO grid_alternatives VALUES ('%s', '%s', '%s', 'GTiff', 'hgridshift', , NULL, '%s', 1, 1, NULL);""" % (
+ filename, tiff_filename, 'WO', 'https://cdn.proj.org/' + tiff_filename)
all_sql.append(sql)
elif filename == 'prvi':
continue
diff --git a/scripts/build_grid_alternatives_generated.py b/scripts/build_grid_alternatives_generated_noaa.py
index 6570a771..96e12842 100755
--- a/scripts/build_grid_alternatives_generated.py
+++ b/scripts/build_grid_alternatives_generated_noaa.py
@@ -91,13 +91,15 @@ script_dir_name = os.path.dirname(os.path.realpath(__file__))
sql_dir_name = os.path.join(os.path.dirname(script_dir_name), 'data', 'sql')
f = open(os.path.join(sql_dir_name, 'grid_alternatives_generated') + '.sql', 'wb')
-f.write("--- This file has been generated by scripts/build_grid_alternatives_generated.py. DO NOT EDIT !\n\n".encode('UTF-8'))
+f.write("--- This file has been generated by scripts/build_grid_alternatives_generated_noaa.py. DO NOT EDIT !\n\n".encode('UTF-8'))
f.write("-- NADCON (NAD27 -> NAD83) entries\n\n".encode('UTF-8'))
for grid in nadcon_grids:
+ tiff_name = 'us_noaa_' + grid + '.tif'
sql = """INSERT INTO grid_alternatives(original_grid_name,
proj_grid_name,
+ old_proj_grid_name,
proj_grid_format,
proj_method,
inverse_direction,
@@ -105,11 +107,12 @@ for grid in nadcon_grids:
url, direct_download, open_license, directory)
VALUES ('%s',
'%s',
- 'CTable2',
+ '%s',
+ 'GTiff',
'hgridshift',
0,
- 'proj-datumgrid',
- NULL, NULL, NULL, NULL);""" % (grid + '.las', grid)
+ NULL,
+ '%s', 1, 1, NULL);""" % (grid + '.las', tiff_name, grid, 'https://cdn.proj.org/' + tiff_name)
f.write((sql + '\n').encode('UTF-8'))
@@ -123,8 +126,10 @@ for row in hpgn_grids:
ctable2_name = None
las_filename = ntv2_name[0:-4] + ".las"
if ctable2_name:
+ tiff_name = 'us_noaa_' + ctable2_name+'.tif'
sql = """INSERT INTO grid_alternatives(original_grid_name,
proj_grid_name,
+ old_proj_grid_name,
proj_grid_format,
proj_method,
inverse_direction,
@@ -132,14 +137,17 @@ for row in hpgn_grids:
url, direct_download, open_license, directory)
VALUES ('%s',
'%s',
- 'CTable2',
+ '%s',
+ 'GTiff',
'hgridshift',
0,
- 'proj-datumgrid',
- NULL, NULL, NULL, NULL);""" % (las_filename, ctable2_name)
+ NULL,
+ '%s', 1, 1, NULL);""" % (las_filename, tiff_name, ctable2_name, 'https://cdn.proj.org/' + tiff_name)
else:
+ tiff_name = 'us_noaa_' + ntv2_name[0:-4]+'.tif'
sql = """INSERT INTO grid_alternatives(original_grid_name,
proj_grid_name,
+ old_proj_grid_name,
proj_grid_format,
proj_method,
inverse_direction,
@@ -147,11 +155,12 @@ for row in hpgn_grids:
url, direct_download, open_license, directory)
VALUES ('%s',
'%s',
- 'NTv2',
+ '%s',
+ 'GTiff',
'hgridshift',
0,
- 'proj-datumgrid-north-america',
- NULL, NULL, NULL, NULL);""" % (las_filename, ntv2_name)
+ NULL,
+ '%s', 1, 1, NULL);""" % (las_filename, tiff_name, ntv2_name, 'https://cdn.proj.org/' + tiff_name)
f.write((sql + '\n').encode('UTF-8'))
f.close()
diff --git a/scripts/grid_checks.py b/scripts/grid_checks.py
index 3b748d2c..6177fd4a 100755
--- a/scripts/grid_checks.py
+++ b/scripts/grid_checks.py
@@ -35,21 +35,19 @@ import fnmatch
import os
import sqlite3
-parser = argparse.ArgumentParser(description='Check database and proj-datumgrid consistency.')
+parser = argparse.ArgumentParser(description='Check database and proj-datumgrid-geotiff consistency.')
parser.add_argument('path_to_proj_db',
help='Full pathname to proj.db')
parser.add_argument('path_to_proj_datumgrid',
- help='Full pathname to the root of the proj_datumgrid git repository')
+ help='Full pathname to the root of the proj_datumgrid_geotiff git repository')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--not-in-grid-alternatives', dest='not_in_grid_alternatives', action='store_true',
help='list grids mentionned in grid_transformation but missing in grid_alternatives')
-group.add_argument('--not-in-proj-datumgrid', dest='not_in_proj_datum_grid', action='store_true',
+group.add_argument('--not-in-proj-datumgrid-geotiff', dest='not_in_proj_datum_grid_geotiff', action='store_true',
help='list grids registered in grid_alternatives, but missing in proj-datumgrid')
group.add_argument('--not-in-db', dest='not_in_db', action='store_true',
help='list grids in proj-datumgrid, but not registered in grid_alternatives')
-group.add_argument('--check-filelist', dest='check_filelist', action='store_true',
- help='check consistency of proj-datumgrid filelist.csv')
args = parser.parse_args()
@@ -66,7 +64,7 @@ if args.not_in_grid_alternatives:
for row in res:
print(row)
-elif args.not_in_proj_datum_grid:
+elif args.not_in_proj_datum_grid_geotiff:
set_grids = set()
for root, dirnames, filenames in os.walk(proj_datumgrid):
@@ -97,157 +95,5 @@ elif args.not_in_db:
res = conn.execute("SELECT 1 FROM grid_alternatives WHERE proj_grid_name = ?", (filename,))
if not res.fetchone():
print('WARNING: grid ' + filename + ' in proj-datumgrid but missing in grid_alternatives')
-
-elif args.check_filelist:
-
- from osgeo import gdal
-
- set_grids = set()
- non_gsb_hgrids = ('ntv1_can.dat',
- 'alaska',
- 'conus',
- 'hawaii',
- 'prvi',
- 'stgeorge',
- 'stlrnc',
- 'stpaul',
- 'FL'.lower(),
- 'MD'.lower(),
- 'TN'.lower(),
- 'WI'.lower(),
- 'WO'.lower(),)
- for root, dirnames, filenames in os.walk(proj_datumgrid):
- if '.git' in root:
- continue
- for filename in fnmatch.filter(filenames, '*'):
- filename_lower = filename.lower()
- if '.aux.xml' in filename_lower:
- continue
- if '.gsb' in filename_lower or '.gtx' in filename_lower:
- set_grids.add(filename)
- elif filename_lower in non_gsb_hgrids:
- set_grids.add(filename)
-
- conn = sqlite3.connect(dbname)
-
- set_filenames_from_csv = set()
- with open(os.path.join(proj_datumgrid,'filelist.csv')) as f:
- reader = csv.reader(f)
- first_line = True
- for row in reader:
- if first_line:
- assert row == ['filename', 'type', 'area', 'unit', 'source_crs', 'target_crs', 'interpolation_crs', 'agency_name', 'source', 'licence']
- first_line = False
- continue
- filename, type, _, unit, source_crs, target_crs, interpolation_crs, _, _, _ = row
- if type in ('DEFORMATION_MODEL', 'VELOCITY_MODEL'):
- continue
- assert type in ('HORIZONTAL_OFFSET',
- 'VERTICAL_OFFSET_GEOGRAPHIC_TO_VERTICAL',
- 'VERTICAL_OFFSET_VERTICAL_TO_VERTICAL'), type
- set_filenames_from_csv.add(filename)
-
- assert filename in set_grids, filename
- if filename.lower().endswith('.gsb') or filename.lower() in non_gsb_hgrids:
- assert type == 'HORIZONTAL_OFFSET', (filename, type)
- else:
- assert type in ('VERTICAL_OFFSET_GEOGRAPHIC_TO_VERTICAL',
- 'VERTICAL_OFFSET_VERTICAL_TO_VERTICAL'), (filename, type)
-
- for dirname in ('.', 'europe', 'north-america', 'oceania', 'world'):
- filename_with_path_tmp = os.path.join(proj_datumgrid, dirname, filename)
- if os.path.exists(filename_with_path_tmp):
- filename_with_path = filename_with_path_tmp
- break
- assert filename_with_path
-
- ds = gdal.Open(filename_with_path)
- assert ds, filename
- gt = ds.GetGeoTransform()
- grid_w = gt[0]
- grid_n = gt[3]
- grid_e = gt[0] + gt[1] * ds.RasterXSize
- grid_s = gt[3] + gt[5] * ds.RasterYSize
- if grid_w > 180:
- grid_w -= 360
- grid_e -= 360
-
- source_crs_name = None
- target_crs_name = None
-
- if source_crs.startswith('EPSG:') or source_crs.startswith('IGNF:'):
- auth_name = source_crs[0:4]
- code = source_crs[len('EPSG:'):]
- res = conn.execute("SELECT name, table_name FROM crs_view WHERE auth_name = ? AND code = ?", (auth_name, code))
- source_crs_name, table_name = res.fetchone()
- if type == 'HORIZONTAL_OFFSET':
- assert table_name == 'geodetic_crs', (filename, table_name, code)
- res = conn.execute("SELECT type FROM geodetic_crs WHERE auth_name = ? AND code = ?", (auth_name, code))
- geodetic_crs_type, = res.fetchone()
- assert geodetic_crs_type == 'geographic 2D', (filename, geodetic_crs_type, code)
- elif type == 'VERTICAL_OFFSET_GEOGRAPHIC_TO_VERTICAL':
- assert table_name == 'geodetic_crs', (filename, table_name, code)
- res = conn.execute("SELECT type FROM geodetic_crs WHERE auth_name = ? AND code = ?", (auth_name, code))
- geodetic_crs_type, = res.fetchone()
- if code == '4269': # NAD 83
- assert geodetic_crs_type == 'geographic 2D', (filename, geodetic_crs_type, code)
- else:
- assert geodetic_crs_type == 'geographic 3D', (filename, geodetic_crs_type, code)
- elif type == 'VERTICAL_OFFSET_VERTICAL_TO_VERTICAL':
- assert table_name == 'vertical_crs', (filename, table_name, code)
-
- res = conn.execute("SELECT south_lat, north_lat, west_lon, east_lon FROM crs_view c, area a WHERE c.area_of_use_auth_name = a.auth_name AND c.area_of_use_code = a.code AND c.auth_name = ? AND c.code = ?", (auth_name, code))
- s, n, w, e = res.fetchone()
- if w > e:
- if grid_w > 0:
- e += 360
- else:
- w -= 360
- if filename not in ('c1hpgn.gsb', 'c2hpgn.gsb', 'guhpgn.gsb', 'g2009g01.gtx','g2009s01.gtx','g2012bg0.gtx', 'MAY76V20.gsb', ):
- assert grid_w < e, (filename, source_crs, grid_w, e)
- assert grid_e > w, (filename, source_crs, grid_e, w)
- assert grid_s < n, (filename, source_crs, grid_s, n)
- assert grid_n > s, (filename, source_crs, grid_n, s)
-
- else:
- assert False, (filename, source_crs)
-
- if target_crs.startswith('EPSG:') or target_crs.startswith('IGNF:'):
- auth_name = target_crs[0:4]
- code = target_crs[len('EPSG:'):]
- res = conn.execute("SELECT name, table_name FROM crs_view WHERE auth_name = ? AND code = ?", (auth_name, code))
- target_crs_name, table_name = res.fetchone()
- if type == 'HORIZONTAL_OFFSET':
- assert table_name == 'geodetic_crs', (filename, table_name, code)
- res = conn.execute("SELECT type FROM geodetic_crs WHERE auth_name = ? AND code = ?", (auth_name, code))
- geodetic_crs_type, = res.fetchone()
- assert geodetic_crs_type == 'geographic 2D', (filename, geodetic_crs_type, code)
- elif type in ('VERTICAL_OFFSET_GEOGRAPHIC_TO_VERTICAL', 'VERTICAL_OFFSET_VERTICAL_TO_VERTICAL'):
- assert table_name == 'vertical_crs', (filename, table_name, code)
-
- res = conn.execute("SELECT south_lat, north_lat, west_lon, east_lon FROM crs_view c, area a WHERE c.area_of_use_auth_name = a.auth_name AND c.area_of_use_code = a.code AND c.auth_name = ? AND c.code = ?", (auth_name, code))
- s, n, w, e = res.fetchone()
- if w > e:
- if grid_w > 0:
- e += 360
- else:
- w -= 360
- if filename not in ('c1hpgn.gsb', 'c2hpgn.gsb', 'guhpgn.gsb', 'ggpf08-Fakarava.gtx'):
- assert grid_w < e, (filename, target_crs, grid_w, e)
- assert grid_e > w, (filename, target_crs, grid_e, w)
- assert grid_s < n, (filename, target_crs, grid_s, n)
- assert grid_n > s, (filename, target_crs, grid_n, s)
-
- elif target_crs.startswith('VERTCRS['):
- assert type == 'VERTICAL_OFFSET_GEOGRAPHIC_TO_VERTICAL', (filename, target_crs)
- else:
- assert False, (filename, target_crs)
-
- #print(filename, source_crs_name, target_crs_name)
-
- for f in set_grids:
- if f not in set_filenames_from_csv:
- print(f + ' is missing in filelist.csv')
-
else:
raise Exception('unknown mode')