]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/test_tokenizer_icu_name_processor.py
unify ICUNameProcessorRules and ICURuleLoader
[nominatim.git] / test / python / test_tokenizer_icu_name_processor.py
index ae05988ae42ce4a69ab9942ef8ca39305b151924..d0ed21ecd8b6d8625a967b5b4a7a158360afcec7 100644 (file)
@@ -4,15 +4,17 @@ Tests for import name normalisation and variant generation.
 from textwrap import dedent
 
 import pytest
-import yaml
 
 from nominatim.tokenizer.icu_rule_loader import ICURuleLoader
-from nominatim.tokenizer.icu_name_processor import ICUNameProcessor, ICUNameProcessorRules
 
 from nominatim.errors import UsageError
 
 @pytest.fixture
-def cfgfile():
+def cfgfile(def_config, tmp_path):
+    project_dir = tmp_path / 'project_dir'
+    project_dir.mkdir()
+    def_config.project_dir = project_dir
+
     def _create_config(*variants, **kwargs):
         content = dedent("""\
         normalization:
@@ -30,7 +32,9 @@ def cfgfile():
         content += '\n'.join(("      - " + s for s in variants)) + '\n'
         for k, v in kwargs:
             content += "    {}: {}\n".format(k, v)
-        return yaml.safe_load(content)
+        (project_dir / 'icu_tokenizer.yaml').write_text(content)
+
+        return def_config
 
     return _create_config
 
@@ -40,10 +44,9 @@ def get_normalized_variants(proc, name):
 
 
 def test_variants_empty(cfgfile):
-    fpath = cfgfile('saint -> ðŸœµ', 'street -> st')
+    config = cfgfile('saint -> ðŸœµ', 'street -> st')
 
-    rules = ICUNameProcessorRules(loader=ICURuleLoader(fpath))
-    proc = ICUNameProcessor(rules)
+    proc = ICURuleLoader(config).make_token_analysis()
 
     assert get_normalized_variants(proc, '🜵') == []
     assert get_normalized_variants(proc, '🜳') == []
@@ -83,8 +86,8 @@ VARIANT_TESTS = [
 
 @pytest.mark.parametrize("rules,name,variants", VARIANT_TESTS)
 def test_variants(cfgfile, rules, name, variants):
-    fpath = cfgfile(*rules)
-    proc = ICUNameProcessor(ICUNameProcessorRules(loader=ICURuleLoader(fpath)))
+    config = cfgfile(*rules)
+    proc = ICURuleLoader(config).make_token_analysis()
 
     result = get_normalized_variants(proc, name)
 
@@ -93,10 +96,8 @@ def test_variants(cfgfile, rules, name, variants):
 
 
 def test_search_normalized(cfgfile):
-    fpath = cfgfile('~street => s,st', 'master => mstr')
-
-    rules = ICUNameProcessorRules(loader=ICURuleLoader(fpath))
-    proc = ICUNameProcessor(rules)
+    config = cfgfile('~street => s,st', 'master => mstr')
+    proc = ICURuleLoader(config).make_token_analysis()
 
     assert proc.get_search_normalized('Master Street') == 'master street'
     assert proc.get_search_normalized('Earnes St') == 'earnes st'