1 # SPDX-License-Identifier: GPL-2.0-only
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2022 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Functions for database migration to newer software versions.
12 from psycopg2 import sql as pysql
14 from nominatim.db import properties
15 from nominatim.db.connection import connect
16 from nominatim.version import NOMINATIM_VERSION, version_str
17 from nominatim.tools import refresh
18 from nominatim.tokenizer import factory as tokenizer_factory
19 from nominatim.errors import UsageError
21 LOG = logging.getLogger()
23 _MIGRATION_FUNCTIONS = []
25 def migrate(config, paths):
26 """ Check for the current database version and execute migrations,
29 with connect(config.get_libpq_dsn()) as conn:
30 if conn.table_exists('nominatim_properties'):
31 db_version_str = properties.get_property(conn, 'database_version')
35 if db_version_str is not None:
36 parts = db_version_str.split('.')
37 db_version = tuple(int(x) for x in parts[:2] + parts[2].split('-'))
39 if db_version == NOMINATIM_VERSION:
40 LOG.warning("Database already at latest version (%s)", db_version_str)
43 LOG.info("Detected database version: %s", db_version_str)
45 db_version = _guess_version(conn)
48 has_run_migration = False
49 for version, func in _MIGRATION_FUNCTIONS:
50 if db_version <= version:
51 LOG.warning("Runnning: %s (%s)", func.__doc__.split('\n', 1)[0],
53 kwargs = dict(conn=conn, config=config, paths=paths)
56 has_run_migration = True
59 LOG.warning('Updating SQL functions.')
60 refresh.create_functions(conn, config)
61 tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
62 tokenizer.update_sql_functions(config)
64 properties.set_property(conn, 'database_version', version_str())
71 def _guess_version(conn):
72 """ Guess a database version when there is no property table yet.
73 Only migrations for 3.6 and later are supported, so bail out
74 when the version seems older.
76 with conn.cursor() as cur:
77 # In version 3.6, the country_name table was updated. Check for that.
78 cnt = cur.scalar("""SELECT count(*) FROM
79 (SELECT svals(name) FROM country_name
80 WHERE country_code = 'gb')x;
83 LOG.fatal('It looks like your database was imported with a version '
84 'prior to 3.6.0. Automatic migration not possible.')
85 raise UsageError('Migration not possible.')
91 def _migration(major, minor, patch=0, dbpatch=0):
92 """ Decorator for a single migration step. The parameters describe the
93 version after which the migration is applicable, i.e before changing
94 from the given version to the next, the migration is required.
96 All migrations are run in the order in which they are defined in this
97 file. Do not run global SQL scripts for migrations as you cannot be sure
98 that these scripts do the same in later versions.
100 Functions will always be reimported in full at the end of the migration
101 process, so the migration functions may leave a temporary state behind
105 _MIGRATION_FUNCTIONS.append(((major, minor, patch, dbpatch), func))
111 @_migration(3, 5, 0, 99)
112 def import_status_timestamp_change(conn, **_):
113 """ Add timezone to timestamp in status table.
115 The import_status table has been changed to include timezone information
118 with conn.cursor() as cur:
119 cur.execute("""ALTER TABLE import_status ALTER COLUMN lastimportdate
120 TYPE timestamp with time zone;""")
123 @_migration(3, 5, 0, 99)
124 def add_nominatim_property_table(conn, config, **_):
125 """ Add nominatim_property table.
127 if not conn.table_exists('nominatim_properties'):
128 with conn.cursor() as cur:
129 cur.execute(pysql.SQL("""CREATE TABLE nominatim_properties (
132 GRANT SELECT ON TABLE nominatim_properties TO {};
133 """).format(pysql.Identifier(config.DATABASE_WEBUSER)))
135 @_migration(3, 6, 0, 0)
136 def change_housenumber_transliteration(conn, **_):
137 """ Transliterate housenumbers.
139 The database schema switched from saving raw housenumbers in
140 placex.housenumber to saving transliterated ones.
142 Note: the function create_housenumber_id() has been dropped in later
145 with conn.cursor() as cur:
146 cur.execute("""CREATE OR REPLACE FUNCTION create_housenumber_id(housenumber TEXT)
151 SELECT array_to_string(array_agg(trans), ';')
153 FROM (SELECT lookup_word as trans,
154 getorcreate_housenumber_id(lookup_word)
155 FROM (SELECT make_standard_name(h) as lookup_word
156 FROM regexp_split_to_table(housenumber, '[,;]') h) x) y;
159 $$ LANGUAGE plpgsql STABLE STRICT;""")
160 cur.execute("DELETE FROM word WHERE class = 'place' and type = 'house'")
161 cur.execute("""UPDATE placex
162 SET housenumber = create_housenumber_id(housenumber)
163 WHERE housenumber is not null""")
166 @_migration(3, 7, 0, 0)
167 def switch_placenode_geometry_index(conn, **_):
168 """ Replace idx_placex_geometry_reverse_placeNode index.
170 Make the index slightly more permissive, so that it can also be used
171 when matching up boundaries and place nodes. It makes the index
172 idx_placex_adminname index unnecessary.
174 with conn.cursor() as cur:
175 cur.execute(""" CREATE INDEX IF NOT EXISTS idx_placex_geometry_placenode ON placex
176 USING GIST (geometry)
177 WHERE osm_type = 'N' and rank_search < 26
178 and class = 'place' and type != 'postcode'
179 and linked_place_id is null""")
180 cur.execute(""" DROP INDEX IF EXISTS idx_placex_adminname """)
183 @_migration(3, 7, 0, 1)
184 def install_legacy_tokenizer(conn, config, **_):
185 """ Setup legacy tokenizer.
187 If no other tokenizer has been configured yet, then create the
188 configuration for the backwards-compatible legacy tokenizer
190 if properties.get_property(conn, 'tokenizer') is None:
191 with conn.cursor() as cur:
192 for table in ('placex', 'location_property_osmline'):
193 has_column = cur.scalar("""SELECT count(*) FROM information_schema.columns
194 WHERE table_name = %s
195 and column_name = 'token_info'""",
198 cur.execute(pysql.SQL('ALTER TABLE {} ADD COLUMN token_info JSONB')
199 .format(pysql.Identifier(table)))
200 tokenizer = tokenizer_factory.create_tokenizer(config, init_db=False,
201 module_name='legacy')
203 tokenizer.migrate_database(config)
206 @_migration(4, 0, 99, 0)
207 def create_tiger_housenumber_index(conn, **_):
208 """ Create idx_location_property_tiger_parent_place_id with included
211 The inclusion is needed for efficient lookup of housenumbers in
212 full address searches.
214 if conn.server_version_tuple() >= (11, 0, 0):
215 with conn.cursor() as cur:
216 cur.execute(""" CREATE INDEX IF NOT EXISTS
217 idx_location_property_tiger_housenumber_migrated
218 ON location_property_tiger
219 USING btree(parent_place_id)
220 INCLUDE (startnumber, endnumber) """)
223 @_migration(4, 0, 99, 1)
224 def create_interpolation_index_on_place(conn, **_):
225 """ Create idx_place_interpolations for lookup of interpolation lines
228 with conn.cursor() as cur:
229 cur.execute("""CREATE INDEX IF NOT EXISTS idx_place_interpolations
230 ON place USING gist(geometry)
231 WHERE osm_type = 'W' and address ? 'interpolation'""")
234 @_migration(4, 0, 99, 2)
235 def add_step_column_for_interpolation(conn, **_):
236 """ Add a new column 'step' to the interpolations table.
238 Also convers the data into the stricter format which requires that
239 startnumbers comply with the odd/even requirements.
241 if conn.table_has_column('location_property_osmline', 'step'):
244 with conn.cursor() as cur:
245 # Mark invalid all interpolations with no intermediate numbers.
246 cur.execute("""UPDATE location_property_osmline SET startnumber = null
247 WHERE endnumber - startnumber <= 1 """)
248 # Align the start numbers where odd/even does not match.
249 cur.execute("""UPDATE location_property_osmline
250 SET startnumber = startnumber + 1,
251 linegeo = ST_LineSubString(linegeo,
252 1.0 / (endnumber - startnumber)::float,
254 WHERE (interpolationtype = 'odd' and startnumber % 2 = 0)
255 or (interpolationtype = 'even' and startnumber % 2 = 1)
257 # Mark invalid odd/even interpolations with no intermediate numbers.
258 cur.execute("""UPDATE location_property_osmline SET startnumber = null
259 WHERE interpolationtype in ('odd', 'even')
260 and endnumber - startnumber = 2""")
261 # Finally add the new column and populate it.
262 cur.execute("ALTER TABLE location_property_osmline ADD COLUMN step SMALLINT")
263 cur.execute("""UPDATE location_property_osmline
264 SET step = CASE WHEN interpolationtype = 'all'
269 @_migration(4, 0, 99, 3)
270 def add_step_column_for_tiger(conn, **_):
271 """ Add a new column 'step' to the tiger data table.
273 if conn.table_has_column('location_property_tiger', 'step'):
276 with conn.cursor() as cur:
277 cur.execute("ALTER TABLE location_property_tiger ADD COLUMN step SMALLINT")
278 cur.execute("""UPDATE location_property_tiger
279 SET step = CASE WHEN interpolationtype = 'all'
284 @_migration(4, 0, 99, 4)
285 def add_derived_name_column_for_country_names(conn, **_):
286 """ Add a new column 'derived_name' which in the future takes the
287 country names as imported from OSM data.
289 if not conn.table_has_column('country_name', 'derived_name'):
290 with conn.cursor() as cur:
291 cur.execute("ALTER TABLE country_name ADD COLUMN derived_name public.HSTORE")
294 @_migration(4, 0, 99, 5)
295 def mark_internal_country_names(conn, config, **_):
296 """ Names from the country table should be marked as internal to prevent
297 them from being deleted. Only necessary for ICU tokenizer.
299 import psycopg2.extras # pylint: disable=import-outside-toplevel
301 tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
302 with tokenizer.name_analyzer() as analyzer:
303 with conn.cursor() as cur:
304 psycopg2.extras.register_hstore(cur)
305 cur.execute("SELECT country_code, name FROM country_name")
307 for country_code, names in cur:
310 names['countrycode'] = country_code
311 analyzer.add_country_names(country_code, names)