ignored-classes=NominatimArgs,closing
disable=too-few-public-methods,duplicate-code
-good-names=i,x,y,fd
+good-names=i,x,y,fd,db
from nominatim.config import Configuration
from nominatim.db import connection
from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.db import properties
import nominatim.tokenizer.factory
import nominatim.cli
@pytest.fixture
def cli_call():
def _call_nominatim(*args):
- return nominatim.cli.nominatim(
- module_dir='MODULE NOT AVAILABLE',
- osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
- phplib_dir=str(SRC_DIR / 'lib-php'),
- data_dir=str(SRC_DIR / 'data'),
- phpcgi_path='/usr/bin/php-cgi',
- sqllib_dir=str(SRC_DIR / 'lib-sql'),
- config_dir=str(SRC_DIR / 'settings'),
- cli_args=args)
+ return nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
+ osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
+ phplib_dir=str(SRC_DIR / 'lib-php'),
+ data_dir=str(SRC_DIR / 'data'),
+ phpcgi_path='/usr/bin/php-cgi',
+ sqllib_dir=str(SRC_DIR / 'lib-sql'),
+ config_dir=str(SRC_DIR / 'settings'),
+ cli_args=args)
return _call_nominatim
@pytest.fixture
-def property_table(table_factory):
+def property_table(table_factory, temp_db_conn):
table_factory('nominatim_properties', 'property TEXT, value TEXT')
+ return mocks.MockPropertyTable(temp_db_conn)
+
@pytest.fixture
def status_table(table_factory):
main_data='', main_index=''))
@pytest.fixture
-def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory):
+def sql_preprocessor(temp_db_conn, tmp_path, table_factory):
table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
@pytest.fixture
-def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
+def tokenizer_mock(monkeypatch, property_table):
""" Sets up the configuration so that the test dummy tokenizer will be
loaded when the tokenizer factory is used. Also returns a factory
with which a new dummy tokenizer may be created.
"""
monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
- def _import_dummy(module, *args, **kwargs):
+ def _import_dummy(*args, **kwargs):
return dummy_tokenizer
monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy)
- properties.set_property(temp_db_conn, 'tokenizer', 'dummy')
+ property_table.set('tokenizer', 'dummy')
def _create_tokenizer():
return dummy_tokenizer.DummyTokenizer(None, None)
def init_new_db(self, *args, **kwargs):
- assert self.init_state == None
+ assert self.init_state is None
self.init_state = "new"
def init_from_project(self):
- assert self.init_state == None
+ assert self.init_state is None
self.init_state = "loaded"
- def finalize_import(self, _):
+ @staticmethod
+ def finalize_import(_):
pass
def close(self):
pass
- def normalize_postcode(self, postcode):
+ @staticmethod
+ def normalize_postcode(postcode):
return postcode
- def update_postcodes_from_db(self):
+ @staticmethod
+ def update_postcodes_from_db():
pass
def update_special_phrases(self, phrases, should_replace):
def add_country_names(self, code, names):
self.analyser_cache['countries'].append((code, names))
- def process_place(self, place):
+ @staticmethod
+ def process_place(place):
return {}
import psycopg2.extras
+from nominatim.db import properties
+
class MockParamCapture:
""" Mock that records the parameters with which a function was called
as well as the number of calls.
def __init__(self, retval=0):
self.called = 0
self.return_value = retval
+ self.last_args = None
+ self.last_kwargs = None
def __call__(self, *args, **kwargs):
self.called += 1
conn.commit()
- def add_special(self, word_token, word, cls, typ, op):
+ def add_special(self, word_token, word, cls, typ, oper):
with self.conn.cursor() as cur:
cur.execute("""INSERT INTO word (word_token, word, class, type, operator)
VALUES (%s, %s, %s, %s, %s)
- """, (word_token, word, cls, typ, op))
+ """, (word_token, word, cls, typ, oper))
self.conn.commit()
admin_level, address, extratags, 'SRID=4326;' + geom,
country))
self.conn.commit()
+
+
+class MockPropertyTable:
+ """ A property table for testing.
+ """
+ def __init__(self, conn):
+ self.conn = conn
+
+
+ def set(self, name, value):
+ """ Set a property in the table to the given value.
+ """
+ properties.set_property(self.conn, name, value)
correct functionionality. They use a lot of monkeypatching to avoid executing
the actual functions.
"""
-from pathlib import Path
-
import pytest
import nominatim.db.properties
def test_cli_help(self, capsys):
""" Running nominatim tool without arguments prints help.
"""
- assert 1 == self.call_nominatim()
+ assert self.call_nominatim() == 1
captured = capsys.readouterr()
assert captured.out.startswith('usage:')
(('export',), 'export')
])
def test_legacy_commands_simple(self, mock_run_legacy, command, script):
- assert 0 == self.call_nominatim(*command)
+ assert self.call_nominatim(*command) == 0
assert mock_run_legacy.called == 1
assert mock_run_legacy.last_args[0] == script + '.php'
def test_admin_command_legacy(self, mock_func_factory, params):
mock_run_legacy = mock_func_factory(nominatim.clicmd.admin, 'run_legacy_script')
- assert 0 == self.call_nominatim('admin', *params)
+ assert self.call_nominatim('admin', *params) == 0
assert mock_run_legacy.called == 1
def test_admin_command_check_database(self, mock_func_factory):
mock = mock_func_factory(nominatim.tools.check_database, 'check_database')
- assert 0 == self.call_nominatim('admin', '--check-database')
+ assert self.call_nominatim('admin', '--check-database') == 0
assert mock.called == 1
@pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc'),
('node', 12), ('way', 8), ('relation', 32)])
def test_add_data_command(self, mock_run_legacy, name, oid):
- assert 0 == self.call_nominatim('add-data', '--' + name, str(oid))
+ assert self.call_nominatim('add-data', '--' + name, str(oid)) == 0
assert mock_run_legacy.called == 1
assert mock_run_legacy.last_args == ('update.php', '--import-' + name, oid)
assert func.called == 1
- @pytest.mark.parametrize("params", [
- ('search', '--query', 'new'),
- ('reverse', '--lat', '0', '--lon', '0'),
- ('lookup', '--id', 'N1'),
- ('details', '--node', '1'),
- ('details', '--way', '1'),
- ('details', '--relation', '1'),
- ('details', '--place_id', '10001'),
- ('status',)
- ])
+ @pytest.mark.parametrize("params", [('search', '--query', 'new'),
+ ('reverse', '--lat', '0', '--lon', '0'),
+ ('lookup', '--id', 'N1'),
+ ('details', '--node', '1'),
+ ('details', '--way', '1'),
+ ('details', '--relation', '1'),
+ ('details', '--place_id', '10001'),
+ ('status',)])
def test_api_commands_simple(self, mock_func_factory, params):
mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
- assert 0 == self.call_nominatim(*params)
+ assert self.call_nominatim(*params) == 0
assert mock_run_api.called == 1
assert mock_run_api.last_args[0] == params[0]
self.finalize_import_called = True
tok = DummyTokenizer()
- monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' ,
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
lambda *args: tok)
- monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' ,
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
lambda *args: tok)
self.tokenizer_mock = tok
def test_import_missing_file(self):
- assert 1 == self.call_nominatim('import', '--osm-file', 'sfsafegweweggdgw.reh.erh')
+ assert self.call_nominatim('import', '--osm-file', 'sfsafegwedgw.reh.erh') == 1
def test_import_bad_file(self):
- assert 1 == self.call_nominatim('import', '--osm-file', '.')
+ assert self.call_nominatim('import', '--osm-file', '.') == 1
def test_import_full(self, mock_func_factory):
cf_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
- assert 0 == self.call_nominatim('import', '--osm-file', __file__)
+ assert self.call_nominatim('import', '--osm-file', __file__) == 0
assert self.tokenizer_mock.finalize_import_called
assert cf_mock.called > 1
mock_func_factory(nominatim.db.properties, 'set_property')
]
- assert 0 == self.call_nominatim('import', '--continue', 'load-data')
+ assert self.call_nominatim('import', '--continue', 'load-data') == 0
assert self.tokenizer_mock.finalize_import_called
for mock in mocks:
mock_func_factory(nominatim.db.properties, 'set_property')
]
- assert 0 == self.call_nominatim('import', '--continue', 'indexing')
+ assert self.call_nominatim('import', '--continue', 'indexing') == 0
for mock in mocks:
assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
assert temp_db_conn.index_exists('idx_placex_pendingsector')
# Calling it again still works for the index
- assert 0 == self.call_nominatim('import', '--continue', 'indexing')
+ assert self.call_nominatim('import', '--continue', 'indexing') == 0
assert temp_db_conn.index_exists('idx_placex_pendingsector')
mock_func_factory(nominatim.db.properties, 'set_property')
]
- assert 0 == self.call_nominatim('import', '--continue', 'db-postprocess')
+ assert self.call_nominatim('import', '--continue', 'db-postprocess') == 0
assert self.tokenizer_mock.finalize_import_called
mock_drop = mock_func_factory(nominatim.tools.freeze, 'drop_update_tables')
mock_flatnode = mock_func_factory(nominatim.tools.freeze, 'drop_flatnode_file')
- assert 0 == self.call_nominatim('freeze')
+ assert self.call_nominatim('freeze') == 0
assert mock_drop.called == 1
assert mock_flatnode.called == 1
def test_admin_command_tool(self, mock_func_factory, func, params):
mock = mock_func_factory(nominatim.tools.admin, func)
- assert 0 == self.call_nominatim('admin', *params)
+ assert self.call_nominatim('admin', *params) == 0
assert mock.called == 1
bnd_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_boundaries')
rank_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_by_rank')
- assert 0 == self.call_nominatim('index', *params)
+ assert self.call_nominatim('index', *params) == 0
assert bnd_mock.called == do_bnds
assert rank_mock.called == do_ranks
def test_refresh_command(self, mock_func_factory, command, func):
func_mock = mock_func_factory(nominatim.tools.refresh, func)
- assert 0 == self.call_nominatim('refresh', '--' + command)
+ assert self.call_nominatim('refresh', '--' + command) == 0
assert func_mock.called == 1
func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')
- assert 0 == self.call_nominatim('refresh', '--postcodes')
+ assert self.call_nominatim('refresh', '--postcodes') == 0
assert func_mock.called == 1
+ assert idx_mock.called == 1
def test_refresh_create_functions(self, mock_func_factory):
func_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
- assert 0 == self.call_nominatim('refresh', '--functions')
+ assert self.call_nominatim('refresh', '--functions') == 0
assert func_mock.called == 1
assert self.tokenizer_mock.update_sql_functions_called
monkeypatch.setattr(nominatim.tools.refresh, 'recompute_importance',
lambda *args, **kwargs: calls.append('update'))
- assert 0 == self.call_nominatim('refresh', '--importance', '--wiki-data')
+ assert self.call_nominatim('refresh', '--importance', '--wiki-data') == 0
assert calls == ['import', 'update']
-
-
-
"""
import datetime as dt
import time
-from pathlib import Path
import pytest
self.finalize_import_called = True
tok = DummyTokenizer()
- monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' ,
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
lambda *args: tok)
- monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' ,
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
lambda *args: tok)
return tok
-@pytest.fixture
-def index_mock(monkeypatch, tokenizer_mock):
- mock = MockParamCapture()
- monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock)
- monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock)
-
- return mock
-
-
@pytest.fixture
def mock_func_factory(monkeypatch):
def get_mock(module, func):
@pytest.fixture
def init_status(temp_db_conn, status_table):
status.set_status(temp_db_conn, date=dt.datetime.now(dt.timezone.utc), seq=1)
- return 1
+
+
+@pytest.fixture
+def index_mock(monkeypatch, tokenizer_mock, init_status):
+ mock = MockParamCapture()
+ monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock)
+ monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock)
+
+ return mock
@pytest.fixture
def test_replication_command(self, mock_func_factory, params, func):
func_mock = mock_func_factory(nominatim.tools.replication, func)
- assert 0 == self.call_nominatim(*params)
+ assert self.call_nominatim(*params) == 0
assert func_mock.called == 1
def test_replication_update_bad_interval_for_geofabrik(self, monkeypatch):
monkeypatch.setenv('NOMINATIM_REPLICATION_URL',
- 'https://download.geofabrik.de/europe/ireland-and-northern-ireland-updates')
+ 'https://download.geofabrik.de/europe/italy-updates')
assert self.call_nominatim() == 1
def test_replication_update_once_no_index(self, update_mock):
- assert 0 == self.call_nominatim('--once', '--no-index')
+ assert self.call_nominatim('--once', '--no-index') == 0
assert str(update_mock.last_args[1]['osm2pgsql']) == 'OSM2PGSQL NOT AVAILABLE'
def test_replication_update_custom_osm2pgsql(self, monkeypatch, update_mock):
monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', '/secret/osm2pgsql')
- assert 0 == self.call_nominatim('--once', '--no-index')
+ assert self.call_nominatim('--once', '--no-index') == 0
assert str(update_mock.last_args[1]['osm2pgsql']) == '/secret/osm2pgsql'
def test_replication_update_custom_threads(self, update_mock):
- assert 0 == self.call_nominatim('--once', '--no-index', '--threads', '4')
+ assert self.call_nominatim('--once', '--no-index', '--threads', '4') == 0
assert update_mock.last_args[1]['threads'] == 4
- def test_replication_update_continuous(self, monkeypatch, init_status, index_mock):
+ def test_replication_update_continuous(self, monkeypatch, index_mock):
states = [nominatim.tools.replication.UpdateState.UP_TO_DATE,
nominatim.tools.replication.UpdateState.UP_TO_DATE]
monkeypatch.setattr(nominatim.tools.replication, 'update',
assert index_mock.called == 4
- def test_replication_update_continuous_no_change(self, monkeypatch, init_status, index_mock):
+ def test_replication_update_continuous_no_change(self, monkeypatch, index_mock):
states = [nominatim.tools.replication.UpdateState.NO_CHANGES,
nominatim.tools.replication.UpdateState.UP_TO_DATE]
monkeypatch.setattr(nominatim.tools.replication, 'update',
"""
Test for loading dotenv configuration.
"""
-from pathlib import Path
-
import pytest
from nominatim.config import Configuration
def test_get_libpq_dsn_convert_libpq(make_config, monkeypatch):
config = make_config()
- monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
+ monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
'host=localhost dbname=gis password=foo')
assert config.get_libpq_dsn() == 'host=localhost dbname=gis password=foo'
config = make_config()
assert config.DATABASE_MODULE_PATH == ''
- assert config.get_bool('DATABASE_MODULE_PATH') == False
+ assert not config.get_bool('DATABASE_MODULE_PATH')
@pytest.mark.parametrize("value,result", [('0', 0), ('1', 1),
config.get_int('DATABASE_MODULE_PATH')
-def test_get_import_style_intern(make_config, monkeypatch):
+def test_get_import_style_intern(make_config, src_dir, monkeypatch):
config = make_config()
monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'street')
- expected = DEFCFG_DIR / 'import-street.style'
+ expected = src_dir / 'settings' / 'import-street.style'
assert config.get_import_style_file() == expected
@pytest.mark.parametrize("value", ['custom', '/foo/bar.stye'])
-def test_get_import_style_intern(make_config, monkeypatch, value):
+def test_get_import_style_extern(make_config, monkeypatch, value):
config = make_config()
monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', value)
import pytest
import psycopg2
-from psycopg2.extras import wait_select
from nominatim.db.async_connection import DBConnection, DeadlockHandler
@pytest.fixture
def conn(temp_db):
- with closing(DBConnection('dbname=' + temp_db)) as c:
- yield c
+ with closing(DBConnection('dbname=' + temp_db)) as connection:
+ yield connection
@pytest.fixture
future.result()
assert len(deadlock_check) == 1
-
-
from nominatim.db.connection import connect, get_pg_env
@pytest.fixture
-def db(temp_db):
- with connect('dbname=' + temp_db) as conn:
+def db(dsn):
+ with connect(dsn) as conn:
yield conn
def test_connection_table_exists(db, table_factory):
- assert db.table_exists('foobar') == False
+ assert not db.table_exists('foobar')
table_factory('foobar')
- assert db.table_exists('foobar') == True
+ assert db.table_exists('foobar')
def test_connection_index_exists(db, table_factory, temp_db_cursor):
- assert db.index_exists('some_index') == False
+ assert not db.index_exists('some_index')
table_factory('foobar')
temp_db_cursor.execute('CREATE INDEX some_index ON foobar(id)')
- assert db.index_exists('some_index') == True
- assert db.index_exists('some_index', table='foobar') == True
- assert db.index_exists('some_index', table='bar') == False
+ assert db.index_exists('some_index')
+ assert db.index_exists('some_index', table='foobar')
+ assert not db.index_exists('some_index', table='bar')
def test_drop_table_existing(db, table_factory):
"""
Tests for SQL preprocessing.
"""
-from pathlib import Path
-
import pytest
@pytest.fixture
import nominatim.db.status
from nominatim.errors import UsageError
-def test_compute_database_date_place_empty(status_table, place_table, temp_db_conn):
- with pytest.raises(UsageError):
- nominatim.db.status.compute_database_date(temp_db_conn)
-
OSM_NODE_DATA = """\
<osm version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/">
<node id="45673" visible="true" version="1" changeset="2047" timestamp="2006-01-27T22:09:10Z" user="Foo" uid="111" lat="48.7586670" lon="8.1343060">
.replace(tzinfo=dt.timezone.utc)
-def test_compute_database_date_valid(monkeypatch, status_table, place_row, temp_db_conn):
+@pytest.fixture(autouse=True)
+def setup_status_table(status_table):
+ pass
+
+
+def test_compute_database_date_place_empty(place_table, temp_db_conn):
+ with pytest.raises(UsageError):
+ nominatim.db.status.compute_database_date(temp_db_conn)
+
+
+def test_compute_database_date_valid(monkeypatch, place_row, temp_db_conn):
place_row(osm_type='N', osm_id=45673)
requested_url = []
assert date == iso_date('2006-01-27T22:09:10')
-def test_compute_database_broken_api(monkeypatch, status_table, place_row, temp_db_conn):
+def test_compute_database_broken_api(monkeypatch, place_row, temp_db_conn):
place_row(osm_type='N', osm_id=45673)
requested_url = []
monkeypatch.setattr(nominatim.db.status, "get_url", mock_url)
with pytest.raises(UsageError):
- date = nominatim.db.status.compute_database_date(temp_db_conn)
+ nominatim.db.status.compute_database_date(temp_db_conn)
-def test_set_status_empty_table(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_empty_table(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date)
{(date, None, True)}
-def test_set_status_filled_table(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_filled_table(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date)
{(date, 456, False)}
-def test_set_status_missing_date(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_missing_date(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date)
{(date, 456, False)}
-def test_get_status_empty_table(status_table, temp_db_conn):
+def test_get_status_empty_table(temp_db_conn):
assert nominatim.db.status.get_status(temp_db_conn) == (None, None, None)
-def test_get_status_success(status_table, temp_db_conn):
+def test_get_status_success(temp_db_conn):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date, seq=667, indexed=False)
@pytest.mark.parametrize("old_state", [True, False])
@pytest.mark.parametrize("new_state", [True, False])
-def test_set_indexed(status_table, temp_db_conn, temp_db_cursor, old_state, new_state):
+def test_set_indexed(temp_db_conn, temp_db_cursor, old_state, new_state):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date, indexed=old_state)
nominatim.db.status.set_indexed(temp_db_conn, new_state)
assert temp_db_cursor.scalar("SELECT indexed FROM import_status") == new_state
-def test_set_indexed_empty_status(status_table, temp_db_conn, temp_db_cursor):
+def test_set_indexed_empty_status(temp_db_conn, temp_db_cursor):
nominatim.db.status.set_indexed(temp_db_conn, True)
assert temp_db_cursor.table_rows("import_status") == 0
-def text_log_status(status_table, temp_db_conn):
+def test_log_status(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
start = dt.datetime.now() - dt.timedelta(hours=1)
+
nominatim.db.status.set_status(temp_db_conn, date=date, seq=56)
nominatim.db.status.log_status(temp_db_conn, start, 'index')
+ temp_db_conn.commit()
+
assert temp_db_cursor.table_rows("import_osmosis_log") == 1
- assert temp_db_cursor.scalar("SELECT seq FROM import_osmosis_log") == 56
- assert temp_db_cursor.scalar("SELECT date FROM import_osmosis_log") == date
+ assert temp_db_cursor.scalar("SELECT batchseq FROM import_osmosis_log") == 56
+ assert temp_db_cursor.scalar("SELECT event FROM import_osmosis_log") == 'index'
"""
Tests for DB utility functions in db.utils
"""
-import psycopg2
import pytest
import nominatim.db.utils as db_utils
Tests for running the indexing.
"""
import itertools
-import psycopg2
import pytest
from nominatim.indexer import indexer
END;
$$ LANGUAGE plpgsql STABLE;
""")
- cur.execute("""CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT)
+ cur.execute("""CREATE OR REPLACE FUNCTION
+ get_interpolation_address(in_address HSTORE, wayid BIGINT)
RETURNS HSTORE AS $$
BEGIN
RETURN in_address;
return self.scalar('SELECT count(*) from placex where indexed_status > 0')
def osmline_unindexed(self):
- return self.scalar('SELECT count(*) from location_property_osmline where indexed_status > 0')
+ return self.scalar("""SELECT count(*) from location_property_osmline
+ WHERE indexed_status > 0""")
@pytest.fixture
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_by_rank(0, 30)
- assert 0 == test_db.placex_unindexed()
- assert 0 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 0
+ assert test_db.osmline_unindexed() == 0
- assert 0 == test_db.scalar("""SELECT count(*) from placex
- WHERE indexed_status = 0 and indexed_date is null""")
+ assert test_db.scalar("""SELECT count(*) from placex
+ WHERE indexed_status = 0 and indexed_date is null""") == 0
# ranks come in order of rank address
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex p WHERE rank_address > 0
AND indexed_date >= (SELECT min(indexed_date) FROM placex o
- WHERE p.rank_address < o.rank_address)""")
+ WHERE p.rank_address < o.rank_address)""") == 0
# placex rank < 30 objects come before interpolations
- assert 0 == test_db.scalar(
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address < 30
- AND indexed_date > (SELECT min(indexed_date) FROM location_property_osmline)""")
+ AND indexed_date >
+ (SELECT min(indexed_date) FROM location_property_osmline)""") == 0
# placex rank = 30 objects come after interpolations
- assert 0 == test_db.scalar(
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address = 30
- AND indexed_date < (SELECT max(indexed_date) FROM location_property_osmline)""")
+ AND indexed_date <
+ (SELECT max(indexed_date) FROM location_property_osmline)""") == 0
# rank 0 comes after rank 29 and before rank 30
- assert 0 == test_db.scalar(
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address < 30
- AND indexed_date > (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""")
- assert 0 == test_db.scalar(
+ AND indexed_date >
+ (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address = 30
- AND indexed_date < (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""")
+ AND indexed_date <
+ (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
@pytest.mark.parametrize("threads", [1, 15])
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest',
test_tokenizer, threads)
idx.index_by_rank(4, 15)
- assert 19 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 19
+ assert test_db.osmline_unindexed() == 1
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex
- WHERE indexed_status = 0 AND not rank_address between 4 and 15""")
+ WHERE indexed_status = 0 AND not rank_address between 4 and 15""") == 0
@pytest.mark.parametrize("threads", [1, 15])
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_by_rank(28, 30)
- assert 27 == test_db.placex_unindexed()
- assert 0 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 27
+ assert test_db.osmline_unindexed() == 0
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex
- WHERE indexed_status = 0 AND rank_address between 1 and 27""")
+ WHERE indexed_status = 0 AND rank_address between 1 and 27""") == 0
@pytest.mark.parametrize("threads", [1, 15])
def test_index_boundaries(test_db, threads, test_tokenizer):
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 37 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 37
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_boundaries(0, 30)
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex
- WHERE indexed_status = 0 AND class != 'boundary'""")
+ WHERE indexed_status = 0 AND class != 'boundary'""") == 0
@pytest.mark.parametrize("threads", [1, 15])
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_postcodes()
- assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode
- WHERE indexed_status != 0""")
+ assert test_db.scalar("""SELECT count(*) FROM location_postcode
+ WHERE indexed_status != 0""") == 0
@pytest.mark.parametrize("analyse", [True, False])
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, 4)
idx.index_full(analyse=analyse)
- assert 0 == test_db.placex_unindexed()
- assert 0 == test_db.osmline_unindexed()
- assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode
- WHERE indexed_status != 0""")
+ assert test_db.placex_unindexed() == 0
+ assert test_db.osmline_unindexed() == 0
+ assert test_db.scalar("""SELECT count(*) FROM location_postcode
+ WHERE indexed_status != 0""") == 0
@pytest.mark.parametrize("threads", [1, 15])
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_by_rank(28, 30)
- assert 0 == test_db.placex_unindexed()
+ assert test_db.placex_unindexed() == 0
"""
Tests for creating new tokenizers.
"""
-import importlib
import pytest
from nominatim.db import properties
from dummy_tokenizer import DummyTokenizer
@pytest.fixture
-def test_config(def_config, tmp_path):
+def test_config(def_config, tmp_path, property_table, tokenizer_mock):
def_config.project_dir = tmp_path
return def_config
-def test_setup_dummy_tokenizer(temp_db_conn, test_config,
- tokenizer_mock, property_table):
+def test_setup_dummy_tokenizer(temp_db_conn, test_config):
tokenizer = factory.create_tokenizer(test_config)
assert isinstance(tokenizer, DummyTokenizer)
assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
-def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_exists(test_config):
(test_config.project_dir / 'tokenizer').mkdir()
tokenizer = factory.create_tokenizer(test_config)
assert tokenizer.init_state == "new"
-def test_setup_tokenizer_dir_failure(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_failure(test_config):
(test_config.project_dir / 'tokenizer').write_text("foo")
with pytest.raises(UsageError):
factory.create_tokenizer(test_config)
-def test_setup_bad_tokenizer_name(test_config, monkeypatch):
+def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
+ def_config.project_dir = tmp_path
monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
with pytest.raises(UsageError):
- factory.create_tokenizer(test_config)
+ factory.create_tokenizer(def_config)
-def test_load_tokenizer(temp_db_conn, test_config,
- tokenizer_mock, property_table):
+
+def test_load_tokenizer(test_config):
factory.create_tokenizer(test_config)
tokenizer = factory.get_tokenizer_for_db(test_config)
assert tokenizer.init_state == "loaded"
-def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table):
+def test_load_no_tokenizer_dir(test_config):
factory.create_tokenizer(test_config)
test_config.project_dir = test_config.project_dir / 'foo'
factory.get_tokenizer_for_db(test_config)
-def test_load_missing_propoerty(temp_db_cursor, test_config, tokenizer_mock, property_table):
+def test_load_missing_propoerty(temp_db_cursor, test_config):
factory.create_tokenizer(test_config)
temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
with pytest.raises(UsageError):
factory.get_tokenizer_for_db(test_config)
-
@pytest.fixture
def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
RETURNS INTEGER AS $$ SELECT 342; $$ LANGUAGE SQL;
""")
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
tok = tokenizer_factory()
tok.init_new_db(test_config)
$$ LANGUAGE SQL""")
-@pytest.fixture
-def create_housenumbers(temp_db_cursor):
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers(
- housenumbers TEXT[],
- OUT tokens TEXT, OUT normtext TEXT)
- AS $$
- SELECT housenumbers::TEXT, array_to_string(housenumbers, ';')
- $$ LANGUAGE SQL""")
-
-
@pytest.fixture
def make_keywords(temp_db_cursor, temp_db_with_extensions):
temp_db_cursor.execute(
def test_init_new(tokenizer_factory, test_config, monkeypatch,
temp_db_conn, sql_preprocessor):
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', 'xxvv')
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
assert outfile.stat().st_mode == 33261
-def test_init_module_load_failed(tokenizer_factory, test_config,
- monkeypatch, temp_db_conn):
+def test_init_module_load_failed(tokenizer_factory, test_config):
tok = tokenizer_factory()
with pytest.raises(UsageError):
(module_dir/ 'nominatim.so').write_text('CUSTOM nomiantim.so')
monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', str(module_dir))
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
tokenizer_factory, test_config, table_factory,
monkeypatch, temp_db_cursor):
monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
monkeypatch.undo()
def test_migrate_database(tokenizer_factory, test_config, temp_db_conn, monkeypatch):
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.migrate_database(test_config)
(' strasse', 'strasse', 'highway', 'primary', 'in')))
-def test_update_special_phrase_delete_all(analyzer, word_table, temp_db_cursor,
- make_standard_name):
+def test_update_special_phrase_delete_all(analyzer, word_table, make_standard_name):
word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in')
word_table.add_special(' bar', 'bar', 'highway', 'road', None)
assert word_table.count_special() == 0
-def test_update_special_phrases_no_replace(analyzer, word_table, temp_db_cursor,
- make_standard_name):
+def test_update_special_phrases_no_replace(analyzer, word_table, make_standard_name):
word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in')
word_table.add_special(' bar', 'bar', 'highway', 'road', None)
assert word_table.count_special() == 2
analyzer.update_special_phrases([
- ('prison', 'amenity', 'prison', 'in'),
- ('bar', 'highway', 'road', '-'),
- ('garden', 'leisure', 'garden', 'near')
+ ('prison', 'amenity', 'prison', 'in'),
+ ('bar', 'highway', 'road', '-'),
+ ('garden', 'leisure', 'garden', 'near')
], True)
assert word_table.get_special() \
def test_process_place_names(analyzer, make_keywords):
-
info = analyzer.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
assert info['names'] == '{1,2,3}'
-@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345'])
-def test_process_place_postcode(analyzer, create_postcode_id, word_table, pc):
- info = analyzer.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
+def test_process_place_postcode(analyzer, create_postcode_id, word_table, pcode):
+ analyzer.process_place({'address': {'postcode' : pcode}})
- assert word_table.get_postcodes() == {pc, }
+ assert word_table.get_postcodes() == {pcode, }
-@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836'])
-def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pc):
- info = analyzer.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
+def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pcode):
+ analyzer.process_place({'address': {'postcode' : pcode}})
assert not word_table.get_postcodes()
-@pytest.mark.parametrize('hnr', ['123a', '1', '101'])
-def test_process_place_housenumbers_simple(analyzer, create_housenumbers, hnr):
- info = analyzer.process_place({'address': {'housenumber' : hnr}})
+class TestHousenumberName:
+
+ @staticmethod
+ @pytest.fixture(autouse=True)
+ def setup_create_housenumbers(temp_db_cursor):
+ temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers(
+ housenumbers TEXT[],
+ OUT tokens TEXT, OUT normtext TEXT)
+ AS $$
+ SELECT housenumbers::TEXT, array_to_string(housenumbers, ';')
+ $$ LANGUAGE SQL""")
+
+
+ @staticmethod
+ @pytest.mark.parametrize('hnr', ['123a', '1', '101'])
+ def test_process_place_housenumbers_simple(analyzer, hnr):
+ info = analyzer.process_place({'address': {'housenumber' : hnr}})
- assert info['hnr'] == hnr
- assert info['hnr_tokens'].startswith("{")
+ assert info['hnr'] == hnr
+ assert info['hnr_tokens'].startswith("{")
-def test_process_place_housenumbers_lists(analyzer, create_housenumbers):
- info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
+ @staticmethod
+ def test_process_place_housenumbers_lists(analyzer):
+ info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
- assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
+ assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
-def test_process_place_housenumbers_duplicates(analyzer, create_housenumbers):
- info = analyzer.process_place({'address': {'housenumber' : '134',
- 'conscriptionnumber' : '134',
- 'streetnumber' : '99a'}})
+ @staticmethod
+ def test_process_place_housenumbers_duplicates(analyzer):
+ info = analyzer.process_place({'address': {'housenumber' : '134',
+ 'conscriptionnumber' : '134',
+ 'streetnumber' : '99a'}})
- assert set(info['hnr'].split(';')) == set(('134', '99a'))
+ assert set(info['hnr'].split(';')) == set(('134', '99a'))
return _get_db_property
@pytest.fixture
-def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
+def tokenizer_setup(tokenizer_factory, test_config):
tok = tokenizer_factory()
tok.init_new_db(test_config)
@pytest.fixture
-def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor,
- word_table, temp_db_with_extensions, tmp_path):
+def analyzer(tokenizer_factory, test_config, monkeypatch,
+ temp_db_with_extensions, tmp_path):
sql = tmp_path / 'sql' / 'tokenizer' / 'legacy_icu_tokenizer.sql'
sql.write_text("SELECT 'a';")
@pytest.fixture
def getorcreate_term_id(temp_db_cursor):
temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_term_id(lookup_term TEXT)
- RETURNS INTEGER AS $$ SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
+ RETURNS INTEGER AS $$
+ SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
@pytest.fixture
def getorcreate_hnr_id(temp_db_cursor):
temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_hnr_id(lookup_term TEXT)
- RETURNS INTEGER AS $$ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
+ RETURNS INTEGER AS $$
+ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
-def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop,
- sql_preprocessor, place_table, word_table):
+def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop):
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
tok = tokenizer_factory()
assert tok.abbreviations is not None
-def test_update_sql_functions(temp_db_conn, db_prop, temp_db_cursor,
+def test_update_sql_functions(db_prop, temp_db_cursor,
tokenizer_factory, test_config, table_factory,
- monkeypatch,
- sql_preprocessor, place_table, word_table):
+ monkeypatch):
monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
tok = tokenizer_factory()
tok.init_new_db(test_config)
def test_make_standard_word(analyzer):
- with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as a:
- assert a.make_standard_word('tiny street') == 'TINY ST'
+ with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as anl:
+ assert anl.make_standard_word('tiny street') == 'TINY ST'
- with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as a:
- assert a.make_standard_word('Hauptstrasse') == 'HAUPTST'
+ with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as anl:
+ assert anl.make_standard_word('Hauptstrasse') == 'HAUPTST'
def test_make_standard_hnr(analyzer):
- with analyzer(abbr=(('IV', '4'),)) as a:
- assert a._make_standard_hnr('345') == '345'
- assert a._make_standard_hnr('iv') == 'IV'
+ with analyzer(abbr=(('IV', '4'),)) as anl:
+ assert anl._make_standard_hnr('345') == '345'
+ assert anl._make_standard_hnr('iv') == 'IV'
def test_update_postcodes_from_db_empty(analyzer, table_factory, word_table):
table_factory('location_postcode', 'postcode TEXT',
content=(('1234',), ('12 34',), ('AB23',), ('1234',)))
- with analyzer() as a:
- a.update_postcodes_from_db()
+ with analyzer() as anl:
+ anl.update_postcodes_from_db()
assert word_table.count() == 3
assert word_table.get_postcodes() == {'1234', '12 34', 'AB23'}
word_table.add_postcode(' 1234', '1234')
word_table.add_postcode(' 5678', '5678')
- with analyzer() as a:
- a.update_postcodes_from_db()
+ with analyzer() as anl:
+ anl.update_postcodes_from_db()
assert word_table.count() == 3
assert word_table.get_postcodes() == {'1234', '45BC', 'XX45'}
-def test_update_special_phrase_empty_table(analyzer, word_table, temp_db_cursor):
- with analyzer() as a:
- a.update_special_phrases([
+def test_update_special_phrase_empty_table(analyzer, word_table):
+ with analyzer() as anl:
+ anl.update_special_phrases([
("König bei", "amenity", "royal", "near"),
("Könige", "amenity", "royal", "-"),
("street", "highway", "primary", "in")
], True)
- assert temp_db_cursor.row_set("""SELECT word_token, word, class, type, operator
- FROM word WHERE class != 'place'""") \
- == set(((' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'),
- (' KÖNIGE', 'könige', 'amenity', 'royal', None),
- (' ST', 'street', 'highway', 'primary', 'in')))
+ assert word_table.get_special() \
+ == {(' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'),
+ (' KÖNIGE', 'könige', 'amenity', 'royal', None),
+ (' ST', 'street', 'highway', 'primary', 'in')}
def test_update_special_phrase_delete_all(analyzer, word_table):
assert word_table.count_special() == 2
- with analyzer() as a:
- a.update_special_phrases([], True)
+ with analyzer() as anl:
+ anl.update_special_phrases([], True)
assert word_table.count_special() == 0
assert word_table.count_special() == 2
- with analyzer() as a:
- a.update_special_phrases([], False)
+ with analyzer() as anl:
+ anl.update_special_phrases([], False)
assert word_table.count_special() == 2
assert word_table.count_special() == 2
- with analyzer() as a:
- a.update_special_phrases([
- ('prison', 'amenity', 'prison', 'in'),
- ('bar', 'highway', 'road', '-'),
- ('garden', 'leisure', 'garden', 'near')
+ with analyzer() as anl:
+ anl.update_special_phrases([
+ ('prison', 'amenity', 'prison', 'in'),
+ ('bar', 'highway', 'road', '-'),
+ ('garden', 'leisure', 'garden', 'near')
], True)
assert word_table.get_special() \
def test_process_place_names(analyzer, getorcreate_term_id):
- with analyzer() as a:
- info = a.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
+ with analyzer() as anl:
+ info = anl.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
assert info['names'] == '{1,2,3,4,5,6}'
-@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345'])
-def test_process_place_postcode(analyzer, word_table, pc):
- with analyzer() as a:
- info = a.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
+def test_process_place_postcode(analyzer, word_table, pcode):
+ with analyzer() as anl:
+ anl.process_place({'address': {'postcode' : pcode}})
- assert word_table.get_postcodes() == {pc, }
+ assert word_table.get_postcodes() == {pcode, }
-@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836'])
-def test_process_place_bad_postcode(analyzer, word_table, pc):
- with analyzer() as a:
- info = a.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
+def test_process_place_bad_postcode(analyzer, word_table, pcode):
+ with analyzer() as anl:
+ anl.process_place({'address': {'postcode' : pcode}})
assert not word_table.get_postcodes()
@pytest.mark.parametrize('hnr', ['123a', '1', '101'])
def test_process_place_housenumbers_simple(analyzer, hnr, getorcreate_hnr_id):
- with analyzer() as a:
- info = a.process_place({'address': {'housenumber' : hnr}})
+ with analyzer() as anl:
+ info = anl.process_place({'address': {'housenumber' : hnr}})
assert info['hnr'] == hnr.upper()
assert info['hnr_tokens'] == "{-1}"
def test_process_place_housenumbers_lists(analyzer, getorcreate_hnr_id):
- with analyzer() as a:
- info = a.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
+ with analyzer() as anl:
+ info = anl.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
assert info['hnr_tokens'] == "{-1,-2,-3}"
def test_process_place_housenumbers_duplicates(analyzer, getorcreate_hnr_id):
- with analyzer() as a:
- info = a.process_place({'address': {'housenumber' : '134',
- 'conscriptionnumber' : '134',
- 'streetnumber' : '99a'}})
+ with analyzer() as anl:
+ info = anl.process_place({'address': {'housenumber' : '134',
+ 'conscriptionnumber' : '134',
+ 'streetnumber' : '99a'}})
assert set(info['hnr'].split(';')) == set(('134', '99A'))
assert info['hnr_tokens'] == "{-1,-2}"
def create_placex_table(placex_table):
""" All tests in this module require the placex table to be set up.
"""
- pass
def test_analyse_indexing_no_objects(temp_db_conn):
def test_check_database_unknown_db(def_config, monkeypatch):
monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=fjgkhughwgh2423gsags')
- assert 1 == chkdb.check_database(def_config)
+ assert chkdb.check_database(def_config) == 1
def test_check_database_fatal_test(def_config, temp_db):
- assert 1 == chkdb.check_database(def_config)
+ assert chkdb.check_database(def_config) == 1
def test_check_conection_good(temp_db_conn, def_config):
@pytest.mark.parametrize("check_result,state", [(None, chkdb.CheckState.OK),
("Something wrong", chkdb.CheckState.FAIL)])
-def test_check_tokenizer(tokenizer_mock, temp_db_conn, def_config, monkeypatch,
+def test_check_tokenizer(temp_db_conn, def_config, monkeypatch,
check_result, state):
class _TestTokenizer:
- def check_database(self):
+ @staticmethod
+ def check_database():
return check_result
monkeypatch.setattr(chkdb.tokenizer_factory, 'get_tokenizer_for_db',
- lambda *a, **k: _TestTokenizer())
+ lambda *a, **k: _TestTokenizer())
assert chkdb.check_tokenizer(temp_db_conn, def_config) == state
def test_check_tiger_table_disabled(temp_db_conn, def_config, monkeypatch):
- monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'no')
+ monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'no')
assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.NOT_APPLICABLE
def test_check_tiger_table_enabled(temp_db_cursor, temp_db_conn, def_config, monkeypatch):
- monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'yes')
+ monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'yes')
assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.FAIL
temp_db_cursor.execute('CREATE TABLE location_property_tiger (place_id int)')
temp_db_cursor.execute('INSERT INTO location_property_tiger VALUES (1), (2)')
assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.OK
-
"""
Tests for functions to import a new database.
"""
+from pathlib import Path
+
import pytest
import psycopg2
-import sys
-from pathlib import Path
from nominatim.tools import database_import
from nominatim.errors import UsageError
try:
with conn.cursor() as cur:
cur.execute("SELECT distinct partition FROM country_name")
- partitions = set([r[0] for r in list(cur)])
+ partitions = set((r[0] for r in list(cur)))
if no_partitions:
- assert partitions == set([0])
+ assert partitions == set((0, ))
else:
assert len(partitions) > 10
finally:
@pytest.mark.parametrize("threads", (1, 5))
-def test_load_data(dsn, src_dir, place_row, placex_table, osmline_table,
+def test_load_data(dsn, place_row, placex_table, osmline_table,
word_table, temp_db_cursor, threads):
for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'):
temp_db_cursor.execute("""CREATE FUNCTION {} (src TEXT)
def test_run_legacy_return_dont_throw_on_success(self):
fname = self.mk_script('exit(0);')
- assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
- throw_on_fail=True)
+ assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
+ throw_on_fail=True) == 0
def test_run_legacy_use_given_module_path(self):
fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);")
- assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=self.testenv)
+ assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
def test_run_legacy_do_not_overwrite_module_path(self, monkeypatch):
monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', 'other')
- fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
+ fname = self.mk_script(
+ "exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
- assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=self.testenv)
+ assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
class TestRunApiScript:
+ @staticmethod
@pytest.fixture(autouse=True)
- def setup_project_dir(self, tmp_path):
+ def setup_project_dir(tmp_path):
webdir = tmp_path / 'website'
webdir.mkdir()
(webdir / 'test.php').write_text("<?php\necho 'OK\n';")
- def test_run_api(self, tmp_path):
- assert 0 == exec_utils.run_api_script('test', tmp_path)
+ @staticmethod
+ def test_run_api(tmp_path):
+ assert exec_utils.run_api_script('test', tmp_path) == 0
- def test_run_api_execution_error(self, tmp_path):
- assert 0 != exec_utils.run_api_script('badname', tmp_path)
+ @staticmethod
+ def test_run_api_execution_error(tmp_path):
+ assert exec_utils.run_api_script('badname', tmp_path) != 0
- def test_run_api_with_extra_env(self, tmp_path):
+ @staticmethod
+ def test_run_api_with_extra_env(tmp_path):
extra_env = dict(SCRIPT_FILENAME=str(tmp_path / 'website' / 'test.php'))
- assert 0 == exec_utils.run_api_script('badname', tmp_path,
- extra_env=extra_env)
+ assert exec_utils.run_api_script('badname', tmp_path, extra_env=extra_env) == 0
### run_osm2pgsql
"""
Tests for freeze functions (removing unused database parts).
"""
-import pytest
-
from nominatim.tools import freeze
NOMINATIM_RUNTIME_TABLES = [
import pytest
from nominatim.tools.special_phrases.sp_importer import SPImporter
from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
-from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
from nominatim.errors import UsageError
"""
return the content of the static xml test file.
"""
- xml_test_content_path = (src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt').resolve()
- return xml_test_content_path.read_text()
+ xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt'
+ return xml_test_content.read_text()
@pytest.fixture
return MockPostcodeTable(temp_db_conn)
-def test_import_postcodes_empty(dsn, postcode_table, tmp_path, tokenizer):
+def test_postcodes_empty(dsn, postcode_table, tmp_path, tokenizer):
postcodes.update_postcodes(dsn, tmp_path, tokenizer)
assert not postcode_table.row_set
-def test_import_postcodes_add_new(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_add_new(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='9486'))
postcode_table.add('yy', '9486', 99, 34)
assert postcode_table.row_set == {('xx', '9486', 10, 12), }
-def test_import_postcodes_replace_coordinates(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_replace_coordinates(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
postcode_table.add('xx', 'AB 4511', 99, 34)
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
-def test_import_postcodes_replace_coordinates_close(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_replace_coordinates_close(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
postcode_table.add('xx', 'AB 4511', 10, 11.99999)
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 11.99999)}
-def test_import_postcodes_remove(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_remove(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
postcode_table.add('xx', 'badname', 10, 12)
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
-def test_import_postcodes_ignore_empty_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_ignore_empty_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country=None, geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
assert not postcode_table.row_set
-def test_import_postcodes_remove_all(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_remove_all(dsn, postcode_table, tmp_path, tokenizer):
postcode_table.add('ch', '5613', 10, 12)
postcodes.update_postcodes(dsn, tmp_path, tokenizer)
assert not postcode_table.row_set
-def test_import_postcodes_multi_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_multi_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country='de', geom='POINT(10 12)',
address=dict(postcode='54451'))
placex_table.add(country='cc', geom='POINT(100 56)',
@pytest.mark.parametrize("gzipped", [True, False])
-def test_import_postcodes_extern(dsn, placex_table, postcode_table, tmp_path,
- tokenizer, gzipped):
+def test_postcodes_extern(dsn, placex_table, postcode_table, tmp_path,
+ tokenizer, gzipped):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
('xx', 'CD 4511', -10, -5)}
-def test_import_postcodes_extern_bad_column(dsn, placex_table, postcode_table,
- tmp_path, tokenizer):
+def test_postcodes_extern_bad_column(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
-def test_import_postcodes_extern_bad_number(dsn, placex_table, postcode_table,
- tmp_path, tokenizer):
+def test_postcodes_extern_bad_number(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
from nominatim.tools import refresh
def test_refresh_import_wikipedia_not_existing(dsn):
- assert 1 == refresh.import_wikipedia_articles(dsn, Path('.'))
+ assert refresh.import_wikipedia_articles(dsn, Path('.')) == 1
@pytest.mark.parametrize("replace", (True, False))
table_factory('wikipedia_redirect')
# use the small wikipedia file for the API testdb
- assert 0 == refresh.import_wikipedia_articles(dsn, src_dir / 'test' / 'testdb')
+ assert refresh.import_wikipedia_articles(dsn, src_dir / 'test' / 'testdb') == 0
assert temp_db_cursor.table_rows('wikipedia_article') > 0
assert temp_db_cursor.table_rows('wikipedia_redirect') > 0
[{"tags": {"place": {"village": 14}}},
{"countries": ['de'],
"tags": {"place": {"village": 15}}},
- {"countries": ['uk', 'us' ],
+ {"countries": ['uk', 'us'],
"tags": {"place": {"village": 16}}}
])
def test_load_ranks_multiple_keys(temp_db_conn, temp_db_cursor):
load_address_levels(temp_db_conn, 'levels',
- [{"tags":
- {"place": {"city": 14},
- "boundary": {"administrative2" : 4}}
+ [{"tags": {"place": {"city": 14},
+ "boundary": {"administrative2" : 4}}
}])
assert temp_db_cursor.row_set('SELECT * FROM levels') == \
def test_load_ranks_address(temp_db_conn, temp_db_cursor):
load_address_levels(temp_db_conn, 'levels',
- [{"tags":
- {"place": {"city": 14,
- "town" : [14, 13]}}
+ [{"tags": {"place": {"city": 14,
+ "town" : [14, 13]}}
}])
assert temp_db_cursor.row_set('SELECT * FROM levels') == \
@pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
-def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path, dbg, ret):
+def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path,
+ dbg, ret):
sqlfile = sql_tmp_path / 'functions.sql'
sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
AS $$
"""
Tests for setting up the website scripts.
"""
-from pathlib import Path
import subprocess
import pytest
from nominatim.tools import refresh
@pytest.fixture
-def envdir(tmpdir):
- (tmpdir / 'php').mkdir()
- (tmpdir / 'php' / 'website').mkdir()
- return tmpdir
+def test_script(tmp_path):
+ (tmp_path / 'php').mkdir()
+ website_dir = (tmp_path / 'php' / 'website')
+ website_dir.mkdir()
-@pytest.fixture
-def test_script(envdir):
def _create_file(code):
- outfile = envdir / 'php' / 'website' / 'reverse-only-search.php'
+ outfile = website_dir / 'reverse-only-search.php'
outfile.write_text('<?php\n{}\n'.format(code), 'utf-8')
return _create_file
-def run_website_script(envdir, config, conn):
- config.lib_dir.php = envdir / 'php'
- config.project_dir = envdir
- refresh.setup_website(envdir, config, conn)
+@pytest.fixture
+def run_website_script(tmp_path, def_config, temp_db_conn):
+ def_config.lib_dir.php = tmp_path / 'php'
+ def_config.project_dir = tmp_path
+
+ def _runner():
+ refresh.setup_website(tmp_path, def_config, temp_db_conn)
+
+ proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
+ tmp_path / 'search.php'], check=False)
- proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
- envdir / 'search.php'], check=False)
+ return proc.returncode
- return proc.returncode
+ return _runner
@pytest.mark.parametrize("setting,retval", (('yes', 10), ('no', 20)))
-def test_setup_website_check_bool(def_config, monkeypatch, envdir, test_script,
- setting, retval, temp_db_conn):
+def test_setup_website_check_bool(monkeypatch, test_script, run_website_script,
+ setting, retval):
monkeypatch.setenv('NOMINATIM_CORS_NOACCESSCONTROL', setting)
test_script('exit(CONST_NoAccessControl ? 10 : 20);')
- assert run_website_script(envdir, def_config, temp_db_conn) == retval
+ assert run_website_script() == retval
@pytest.mark.parametrize("setting", (0, 10, 99067))
-def test_setup_website_check_int(def_config, monkeypatch, envdir, test_script, setting,
- temp_db_conn):
+def test_setup_website_check_int(monkeypatch, test_script, run_website_script, setting):
monkeypatch.setenv('NOMINATIM_LOOKUP_MAX_COUNT', str(setting))
test_script('exit(CONST_Places_Max_ID_count == {} ? 10 : 20);'.format(setting))
- assert run_website_script(envdir, def_config, temp_db_conn) == 10
+ assert run_website_script() == 10
-def test_setup_website_check_empty_str(def_config, monkeypatch, envdir, test_script,
- temp_db_conn):
+def test_setup_website_check_empty_str(monkeypatch, test_script, run_website_script):
monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', '')
test_script('exit(CONST_Default_Language === false ? 10 : 20);')
- assert run_website_script(envdir, def_config, temp_db_conn) == 10
+ assert run_website_script() == 10
-def test_setup_website_check_str(def_config, monkeypatch, envdir, test_script,
- temp_db_conn):
+def test_setup_website_check_str(monkeypatch, test_script, run_website_script):
monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'ffde 2')
test_script('exit(CONST_Default_Language === "ffde 2" ? 10 : 20);')
- assert run_website_script(envdir, def_config, temp_db_conn) == 10
-
-
+ assert run_website_script() == 10
</osm>
"""
+@pytest.fixture(autouse=True)
+def setup_status_table(status_table):
+ pass
+
### init replication
-def test_init_replication_bad_base_url(monkeypatch, status_table, place_row, temp_db_conn):
+def test_init_replication_bad_base_url(monkeypatch, place_row, temp_db_conn):
place_row(osm_type='N', osm_id=100)
- monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA)
+ monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
with pytest.raises(UsageError, match="Failed to reach replication service"):
nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io')
-def test_init_replication_success(monkeypatch, status_table, place_row, temp_db_conn, temp_db_cursor):
+def test_init_replication_success(monkeypatch, place_row, temp_db_conn, temp_db_cursor):
place_row(osm_type='N', osm_id=100)
- monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA)
+ monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
"timestamp_to_sequence",
lambda self, date: 234)
### checking for updates
-def test_check_for_updates_empty_status_table(status_table, temp_db_conn):
+def test_check_for_updates_empty_status_table(temp_db_conn):
assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
-def test_check_for_updates_seq_not_set(status_table, temp_db_conn):
+def test_check_for_updates_seq_not_set(temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc))
assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
-def test_check_for_updates_no_state(monkeypatch, status_table, temp_db_conn):
+def test_check_for_updates_no_state(monkeypatch, temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=345)
monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
@pytest.mark.parametrize("server_sequence,result", [(344, 2), (345, 2), (346, 0)])
-def test_check_for_updates_no_new_data(monkeypatch, status_table, temp_db_conn,
+def test_check_for_updates_no_new_data(monkeypatch, temp_db_conn,
server_sequence, result):
date = dt.datetime.now(dt.timezone.utc)
status.set_status(temp_db_conn, date, seq=345)
@pytest.fixture
def update_options(tmpdir):
return dict(base_url='https://test.io',
- indexed_only=False,
- update_interval=3600,
- import_file=tmpdir / 'foo.osm',
- max_diff_size=1)
+ indexed_only=False,
+ update_interval=3600,
+ import_file=tmpdir / 'foo.osm',
+ max_diff_size=1)
-def test_update_empty_status_table(status_table, temp_db_conn):
+def test_update_empty_status_table(temp_db_conn):
with pytest.raises(UsageError):
nominatim.tools.replication.update(temp_db_conn, {})
-def test_update_already_indexed(status_table, temp_db_conn):
+def test_update_already_indexed(temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=34, indexed=False)
assert nominatim.tools.replication.update(temp_db_conn, dict(indexed_only=True)) \
== nominatim.tools.replication.UpdateState.MORE_PENDING
-def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update_options):
+def test_update_no_data_no_sleep(monkeypatch, temp_db_conn, update_options):
date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(days=1)
status.set_status(temp_db_conn, date, seq=34)
lambda *args, **kwargs: None)
sleeptime = []
- monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s))
+ monkeypatch.setattr(time, 'sleep', sleeptime.append)
assert nominatim.tools.replication.update(temp_db_conn, update_options) \
== nominatim.tools.replication.UpdateState.NO_CHANGES
assert not sleeptime
-def test_update_no_data_sleep(monkeypatch, status_table, temp_db_conn, update_options):
+def test_update_no_data_sleep(monkeypatch, temp_db_conn, update_options):
date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=30)
status.set_status(temp_db_conn, date, seq=34)
lambda *args, **kwargs: None)
sleeptime = []
- monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s))
+ monkeypatch.setattr(time, 'sleep', sleeptime.append)
assert nominatim.tools.replication.update(temp_db_conn, update_options) \
== nominatim.tools.replication.UpdateState.NO_CHANGES
the right phrases of the sp_csv_test.csv special phrases.
"""
return len(phrases) > 1 \
- and any(p.p_label == 'Billboard' and p.p_class == 'advertising' and p.p_type == 'billboard'
+ and any(p.p_label == 'Billboard'
+ and p.p_class == 'advertising'
+ and p.p_type == 'billboard'
and p.p_operator == '-' for p in phrases) \
- and any(p.p_label == 'Zip Lines' and p.p_class == 'aerialway' and p.p_type == 'zip_line'
+ and any(p.p_label == 'Zip Lines'
+ and p.p_class == 'aerialway'
+ and p.p_type == 'zip_line'
and p.p_operator == '-' for p in phrases)
@pytest.fixture
"""
return the content of the static xml test file.
"""
- xml_test_content_path = (src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt').resolve()
- with open(xml_test_content_path) as xml_content_reader:
- return xml_content_reader.read()
+ xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt'
+ return xml_test_content.read_text()
@pytest.fixture
and p.p_operator == '-' for p in phrases) \
and any(p.p_label == 'Zip Line' and p.p_class == 'aerialway' and p.p_type == 'zip_line'
and p.p_operator == '-' for p in phrases)
-
"""
Test for tiger data function
"""
-from pathlib import Path
+import tarfile
from textwrap import dedent
import pytest
-import tarfile
-from nominatim.tools import tiger_data, database_import
+from nominatim.tools import tiger_data
from nominatim.errors import UsageError
class MockTigerTable:
stop INTEGER, interpol TEXT,
token_info JSONB, postcode TEXT)
RETURNS INTEGER AS $$
- INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode) RETURNING 1
+ INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode)
+ RETURNING 1
$$ LANGUAGE SQL;""")
(def_config.lib_dir.sql / 'tiger_import_finish.sql').write_text(
"""DROP FUNCTION tiger_line_import (linegeo GEOMETRY, in_startnumber INTEGER,
tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv'))
tar.close()
- tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
+ tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
tokenizer_mock())
assert tiger_table.count() == 6213
def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
- tmp_path, src_dir):
+ tmp_path):
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
tar.add(__file__)
tar.close()
tokenizer_mock())
assert tiger_table.count() == 0
-