]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/dummy_tokenizer.py
Merge pull request #3692 from lonvia/word-lookup-variants
[nominatim.git] / test / python / dummy_tokenizer.py
index 013016c8d0ba7c2e84bddab61bfb3428944a205a..08554129257b74fbc43e526d4621deafac42f27f 100644 (file)
@@ -1,32 +1,45 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2025 by the Nominatim developer community.
+# For a full list of authors see the git log.
 """
 Tokenizer for testing.
 """
+from nominatim_db.data.place_info import PlaceInfo
+from nominatim_db.config import Configuration
+
 
 def create(dsn, data_dir):
     """ Create a new instance of the tokenizer provided by this module.
     """
     return DummyTokenizer(dsn, data_dir)
 
+
 class DummyTokenizer:
 
     def __init__(self, dsn, data_dir):
         self.dsn = dsn
         self.data_dir = data_dir
         self.init_state = None
+        self.analyser_cache = {}
 
-
-    def init_new_db(self, config):
-        assert self.init_state == None
+    def init_new_db(self, *args, **kwargs):
+        assert self.init_state is None
         self.init_state = "new"
 
-
-    def init_from_project(self):
-        assert self.init_state == None
+    def init_from_project(self, config):
+        assert isinstance(config, Configuration)
+        assert self.init_state is None
         self.init_state = "loaded"
 
+    @staticmethod
+    def finalize_import(_):
+        pass
 
     def name_analyzer(self):
-        return DummyNameAnalyzer()
+        return DummyNameAnalyzer(self.analyser_cache)
 
 
 class DummyNameAnalyzer:
@@ -37,16 +50,28 @@ class DummyNameAnalyzer:
     def __exit__(self, exc_type, exc_value, traceback):
         self.close()
 
+    def __init__(self, cache):
+        self.analyser_cache = cache
+        cache['countries'] = []
 
     def close(self):
-        """ Free all resources used by the analyzer.
-        """
         pass
 
-    def process_place(self, place):
-        """ Determine tokenizer information about the given place.
+    @staticmethod
+    def normalize_postcode(postcode):
+        return postcode
+
+    @staticmethod
+    def update_postcodes_from_db():
+        pass
+
+    def update_special_phrases(self, phrases, should_replace):
+        self.analyser_cache['special_phrases'] = phrases
+
+    def add_country_names(self, code, names):
+        self.analyser_cache['countries'].append((code, names))
 
-            Returns a JSON-serialisable structure that will be handed into
-            the database via the token_info field.
-        """
+    @staticmethod
+    def process_place(place):
+        assert isinstance(place, PlaceInfo)
         return {}