1 # SPDX-License-Identifier: GPL-3.0-or-later
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2026 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Tests for functions to import a new database.
10 from pathlib import Path
13 import pytest_asyncio # noqa
15 from psycopg import sql as pysql
17 from nominatim_db.tools import database_import
18 from nominatim_db.errors import UsageError
21 class TestDatabaseSetup:
22 DBNAME = 'test_nominatim_python_unittest'
24 @pytest.fixture(autouse=True)
25 def setup_nonexistant_db(self):
26 with psycopg.connect(dbname='postgres', autocommit=True) as conn:
27 with conn.cursor() as cur:
28 cur.execute(pysql.SQL('DROP DATABASE IF EXISTS ')
29 + pysql.Identifier(self.DBNAME))
33 with conn.cursor() as cur:
34 cur.execute(pysql.SQL('DROP DATABASE IF EXISTS ')
35 + pysql.Identifier(self.DBNAME))
39 with psycopg.connect(dbname=self.DBNAME) as conn:
40 with conn.cursor() as cur:
44 return psycopg.connect(dbname=self.DBNAME)
46 def test_setup_skeleton(self):
47 database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
49 # Check that all extensions are set up.
50 with self.conn() as conn:
51 with conn.cursor() as cur:
52 cur.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))')
54 def test_unsupported_pg_version(self, monkeypatch):
55 monkeypatch.setattr(database_import, 'POSTGRESQL_REQUIRED_VERSION', (100, 4))
57 with pytest.raises(UsageError, match='PostgreSQL server is too old.'):
58 database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
60 def test_create_db_explicit_ro_user(self):
61 database_import.setup_database_skeleton(f'dbname={self.DBNAME}',
64 def test_create_db_missing_ro_user(self):
65 with pytest.raises(UsageError, match='Missing read-only user.'):
66 database_import.setup_database_skeleton(f'dbname={self.DBNAME}',
67 rouser='sdfwkjkjgdugu2jgsafkljas')
69 def test_setup_extensions_old_postgis(self, monkeypatch):
70 monkeypatch.setattr(database_import, 'POSTGIS_REQUIRED_VERSION', (50, 50))
72 with pytest.raises(UsageError, match='PostGIS is too old.'):
73 database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
76 def test_setup_skeleton_already_exists(temp_db):
77 with pytest.raises(UsageError):
78 database_import.setup_database_skeleton(f'dbname={temp_db}')
81 def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd):
82 table_factory('place', content=((1, ), ))
84 database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
85 captured = capfd.readouterr()
87 assert '--create' in captured.out
88 assert '--output flex' in captured.out
89 assert f'--style {osm2pgsql_options["osm2pgsql_style"]}' in captured.out
90 assert f'--number-processes {osm2pgsql_options["threads"]}' in captured.out
91 assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out
92 assert 'file.pbf' in captured.out
95 def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, capfd):
96 table_factory('place', content=((1, ), ))
97 osm2pgsql_options['osm2pgsql_cache'] = 0
99 files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm']
101 f.write_text('test', encoding='utf-8')
103 database_import.import_osm_data(files, osm2pgsql_options)
104 captured = capfd.readouterr()
106 assert 'file1.osm' in captured.out
107 assert 'file2.osm' in captured.out
110 def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options):
111 table_factory('place')
113 with pytest.raises(UsageError, match='No data imported'):
114 database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
117 def test_import_osm_data_simple_ignore_no_data(table_factory, osm2pgsql_options):
118 table_factory('place')
120 database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options,
124 def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options):
125 table_factory('place', content=((1, ), ))
126 table_factory('planet_osm_nodes')
128 flatfile = tmp_path / 'flatfile'
129 flatfile.write_text('touch', encoding='utf-8')
131 osm2pgsql_options['flatnode_file'] = str(flatfile.resolve())
133 database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, drop=True)
135 assert not flatfile.exists()
136 assert not temp_db_cursor.table_exists('planet_osm_nodes')
139 def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd):
140 table_factory('place', content=((1, ), ))
142 osm2pgsql_options['osm2pgsql_cache'] = 0
144 database_import.import_osm_data(Path(__file__), osm2pgsql_options)
145 captured = capfd.readouterr()
147 assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out
150 @pytest.mark.parametrize("with_search", (True, False))
151 def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory, with_search):
152 tables = ['placex', 'place_addressline', 'location_area',
153 'location_area_country',
154 'location_property_tiger', 'location_property_osmline',
155 'location_postcodes', 'location_road_23']
157 tables.append('search_name')
160 table_factory(table, content=((1, ), (2, ), (3, )))
161 assert temp_db_cursor.table_rows(table) == 3
163 database_import.truncate_data_tables(temp_db_conn)
166 assert temp_db_cursor.table_rows(table) == 0
169 @pytest.mark.parametrize("threads", (1, 5))
171 async def test_load_data(dsn, place_row, placex_table, osmline_table,
172 temp_db_cursor, threads):
173 for oid in range(100, 130):
174 place_row(osm_id=oid)
175 place_row(osm_type='W', osm_id=342, cls='place', typ='houses',
176 geom='LINESTRING(0 0, 10 10)')
178 temp_db_cursor.execute("""
179 CREATE OR REPLACE FUNCTION placex_insert() RETURNS TRIGGER AS $$
181 NEW.place_id := nextval('seq_place');
182 NEW.indexed_status := 1;
183 NEW.centroid := ST_Centroid(NEW.geometry);
185 NEW.geometry_sector := 2424;
186 NEW.rank_address := 30;
187 NEW.rank_search := 30;
189 END; $$ LANGUAGE plpgsql STABLE PARALLEL SAFE;
191 CREATE OR REPLACE FUNCTION osmline_insert() RETURNS TRIGGER AS $$
193 NEW.place_id := nextval('seq_place');
194 IF NEW.indexed_status IS NULL THEN
195 NEW.indexed_status := 1;
197 NEW.geometry_sector := 2424;
200 END; $$ LANGUAGE plpgsql STABLE PARALLEL SAFE;
202 CREATE TRIGGER placex_before_insert BEFORE INSERT ON placex
203 FOR EACH ROW EXECUTE PROCEDURE placex_insert();
205 CREATE TRIGGER osmline_before_insert BEFORE INSERT ON location_property_osmline
206 FOR EACH ROW EXECUTE PROCEDURE osmline_insert();
209 await database_import.load_data(dsn, threads)
211 assert temp_db_cursor.table_rows('placex') == 30
212 assert temp_db_cursor.table_rows('location_property_osmline') == 1
217 @pytest.fixture(autouse=True)
218 def init_env(self, temp_db, tmp_path, def_config, sql_preprocessor_cfg):
219 def_config.lib_dir.sql = tmp_path / 'sql'
220 def_config.lib_dir.sql.mkdir()
222 self.config = def_config
224 def write_sql(self, fname, content):
225 (self.config.lib_dir.sql / fname).write_text(content, encoding='utf-8')
227 @pytest.mark.parametrize("reverse", [True, False])
228 def test_create_tables(self, temp_db_conn, temp_db_cursor, reverse):
229 self.write_sql('tables.sql',
230 """CREATE FUNCTION test() RETURNS bool
231 AS $$ SELECT {{db.reverse_only}} $$ LANGUAGE SQL""")
233 self.write_sql('grants.sql', "-- Mock grants file for testing\n")
235 database_import.create_tables(temp_db_conn, self.config, reverse)
237 temp_db_cursor.scalar('SELECT test()') == reverse
239 def test_create_table_triggers(self, temp_db_conn, temp_db_cursor):
240 self.write_sql('table-triggers.sql',
241 """CREATE FUNCTION test() RETURNS TEXT
242 AS $$ SELECT 'a'::text $$ LANGUAGE SQL""")
244 database_import.create_table_triggers(temp_db_conn, self.config)
246 temp_db_cursor.scalar('SELECT test()') == 'a'
248 def test_create_partition_tables(self, temp_db_conn, temp_db_cursor):
249 self.write_sql('partition-tables.src.sql',
250 """CREATE FUNCTION test() RETURNS TEXT
251 AS $$ SELECT 'b'::text $$ LANGUAGE SQL""")
253 database_import.create_partition_tables(temp_db_conn, self.config)
255 temp_db_cursor.scalar('SELECT test()') == 'b'
257 @pytest.mark.parametrize("drop", [True, False])
259 async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop):
260 self.write_sql('indices.sql',
261 """CREATE FUNCTION test() RETURNS bool
262 AS $$ SELECT {{drop}} $$ LANGUAGE SQL""")
264 await database_import.create_search_indices(temp_db_conn, self.config, drop)
266 temp_db_cursor.scalar('SELECT test()') == drop