]> git.openstreetmap.org Git - nominatim.git/commitdiff
Merge remote-tracking branch 'upstream/master'
authorSarah Hoffmann <lonvia@denofr.de>
Mon, 24 May 2021 15:54:27 +0000 (17:54 +0200)
committerSarah Hoffmann <lonvia@denofr.de>
Mon, 24 May 2021 15:54:27 +0000 (17:54 +0200)
54 files changed:
.github/workflows/ci-tests.yml
.pylintrc
docs/admin/Customization.md
docs/admin/Import.md
lib-php/SearchDescription.php
lib-php/website/search.php
lib-sql/tokenizer/legacy_tokenizer.sql
nominatim/clicmd/special_phrases.py
nominatim/tokenizer/factory.py
nominatim/tokenizer/legacy_icu_tokenizer.py
nominatim/tokenizer/legacy_tokenizer.py
nominatim/tools/__init__.py
nominatim/tools/special_phrases/importer_statistics.py
nominatim/tools/special_phrases/sp_csv_loader.py [new file with mode: 0644]
nominatim/tools/special_phrases/sp_importer.py [moved from nominatim/tools/special_phrases/special_phrases_importer.py with 66% similarity]
nominatim/tools/special_phrases/sp_wiki_loader.py [new file with mode: 0644]
nominatim/tools/special_phrases/special_phrase.py [new file with mode: 0644]
test/Makefile
test/bdd/db/import/search_name.feature
test/bdd/steps/nominatim_environment.py
test/python/conftest.py
test/python/cursor.py [new file with mode: 0644]
test/python/dummy_tokenizer.py
test/python/mocks.py
test/python/test_cli.py
test/python/test_cli_replication.py
test/python/test_config.py
test/python/test_db_async_connection.py
test/python/test_db_connection.py
test/python/test_db_properties.py
test/python/test_db_sql_preprocessor.py
test/python/test_db_status.py
test/python/test_db_utils.py
test/python/test_indexing.py
test/python/test_tokenizer_factory.py
test/python/test_tokenizer_legacy.py
test/python/test_tokenizer_legacy_icu.py
test/python/test_tools_admin.py
test/python/test_tools_check_database.py
test/python/test_tools_database_import.py
test/python/test_tools_exec_utils.py
test/python/test_tools_freeze.py
test/python/test_tools_import_special_phrases.py
test/python/test_tools_postcodes.py
test/python/test_tools_refresh.py
test/python/test_tools_refresh_address_levels.py
test/python/test_tools_refresh_create_functions.py
test/python/test_tools_refresh_setup_website.py
test/python/test_tools_replication.py
test/python/test_tools_sp_csv_loader.py [new file with mode: 0644]
test/python/test_tools_sp_wiki_loader.py [new file with mode: 0644]
test/python/test_tools_tiger_data.py
test/testdata/sp_csv_test.csv [new file with mode: 0644]
test/testdb/full_en_phrases_test.csv [new file with mode: 0644]

index 3d473751c01614594b8bec09ea0a8d201d2c84d5..b60507bc4511599922d981dbbae2b4135c8ae222 100644 (file)
@@ -71,6 +71,11 @@ jobs:
                   vendor/bin/phpcov merge --clover ../../coverage-bdd.xml ./cov
               working-directory: Nominatim/test/bdd
 
+            - name: BDD tests (legacy_icu tokenizer)
+              run: |
+                  behave -DREMOVE_TEMPLATE=1 -DBUILDDIR=$GITHUB_WORKSPACE/build -DTOKENIZER=legacy_icu --format=progress3
+              working-directory: Nominatim/test/bdd
+
             - name: Upload coverage to Codecov
               uses: codecov/codecov-action@v1
               with:
index 022243ad5ac1231c8ea8d9502142f230f8994934..28ce1ff42588ddae75268815bdb344aac6761d01 100644 (file)
--- a/.pylintrc
+++ b/.pylintrc
@@ -12,4 +12,4 @@ ignored-modules=icu
 ignored-classes=NominatimArgs,closing
 disable=too-few-public-methods,duplicate-code
 
-good-names=i,x,y,fd
+good-names=i,x,y,fd,db
index 76f0f85a10f3305638f4bdbb7a167682eca15b0b..646465afd1efed1a80e3b32ff38aa62c968b86ab 100644 (file)
@@ -69,3 +69,33 @@ entire US adds about 10GB to your database.
 See the [developer's guide](../develop/data-sources.md#us-census-tiger) for more
 information on how the data got preprocessed.
 
+## Special phrases import
+
+As described in the [Importation chapter](Import.md), it is possible to
+import special phrases from the wiki with the following command:
+
+```sh
+nominatim special-phrases --import-from-wiki
+```
+
+But, it is also possible to import some phrases from a csv file. 
+To do so, you have access to the following command:
+
+```sh
+nominatim special-phrases --import-from-csv <csv file>
+```
+
+Note that the 2 previous import commands will update the phrases from your database.
+This means that if you import some phrases from a csv file, only the phrases
+present in the csv file will be kept into the database. All other phrases will
+be removed.
+
+If you want to only add new phrases and not update the other ones you can add
+the argument `--no-replace` to the import command. For example:
+
+```sh
+nominatim special-phrases --import-from-csv <csv file> --no-replace
+```
+
+This will add the phrases present in the csv file into the database without
+removing the other ones.
index 2686942ebcfd8cd67b723d35eabbcb1455c3f90b..3701df9285112d2ca2cad3bbc193495d97d4a5b9 100644 (file)
@@ -277,7 +277,12 @@ If you want to be able to search for places by their type through
 [special key phrases](https://wiki.openstreetmap.org/wiki/Nominatim/Special_Phrases)
 you also need to import these key phrases like this:
 
-    nominatim special-phrases --import-from-wiki
+```sh
+nominatim special-phrases --import-from-wiki
+```
 
 Note that this command downloads the phrases from the wiki link above. You
 need internet access for the step.
+
+You can also import special phrases from a csv file, for more 
+information please read the [Customization chapter](Customization.md).
index 189ffa74e99fbca744efc659f98f62426ca7afc2..8da49a9ff0bfc7d2d0343d21391462e8baa95b2b 100644 (file)
@@ -333,7 +333,9 @@ class SearchDescription
     public function extendWithPartialTerm($sToken, $oSearchTerm, $bStructuredPhrases, $iPhrase, $aFullTokens)
     {
         // Only allow name terms.
-        if (!(is_a($oSearchTerm, '\Nominatim\Token\Word'))) {
+        if (!(is_a($oSearchTerm, '\Nominatim\Token\Word'))
+            || strpos($sToken, ' ') !== false
+        ) {
             return array();
         }
 
@@ -361,7 +363,6 @@ class SearchDescription
 
         if ((!$this->sPostcode && !$this->aAddress && !$this->aAddressNonSearch)
             && ((empty($this->aName) && empty($this->aNameNonSearch)) || $this->iNamePhrase == $iPhrase)
-            && strpos($sToken, ' ') === false
         ) {
             $oSearch = clone $this;
             $oSearch->iSearchRank++;
index 72ddebe1e3d32b265775596dc1538d21dad0f612..56064e7a9efdbdb63e607316155d640852cb1834 100644 (file)
@@ -79,7 +79,7 @@ if (isset($_SERVER['REQUEST_SCHEME'])
                 .$_SERVER['HTTP_HOST'].$_SERVER['DOCUMENT_URI'].'/?'
                 .http_build_query($aMoreParams);
 } else {
-    $sMoreURL = '/search.php'.http_build_query($aMoreParams);
+    $sMoreURL = '/search.php?'.http_build_query($aMoreParams);
 }
 
 if (CONST_Debug) exit;
index fe82762e47d53867281e0ec68d5a4e9b74c6a046..a2c6b52073ec007e052b7775a148f6159fa1239d 100644 (file)
@@ -287,26 +287,21 @@ DECLARE
   s TEXT;
   w INTEGER;
   words TEXT[];
-  item RECORD;
+  value TEXT;
   j INTEGER;
 BEGIN
   result := '{}'::INTEGER[];
 
-  FOR item IN SELECT (each(src)).* LOOP
-
-    s := make_standard_name(item.value);
-    w := getorcreate_name_id(s, item.value);
+  FOR value IN SELECT unnest(regexp_split_to_array(svals(src), E'[,;]')) LOOP
+    -- full name
+    s := make_standard_name(value);
+    w := getorcreate_name_id(s, value);
 
     IF not(ARRAY[w] <@ result) THEN
       result := result || w;
     END IF;
 
-    w := getorcreate_word_id(s);
-
-    IF w IS NOT NULL AND NOT (ARRAY[w] <@ result) THEN
-      result := result || w;
-    END IF;
-
+    -- partial single-word terms
     words := string_to_array(s, ' ');
     IF array_upper(words, 1) IS NOT NULL THEN
       FOR j IN 1..array_upper(words, 1) LOOP
@@ -319,24 +314,23 @@ BEGIN
       END LOOP;
     END IF;
 
-    words := regexp_split_to_array(item.value, E'[,;()]');
-    IF array_upper(words, 1) != 1 THEN
-      FOR j IN 1..array_upper(words, 1) LOOP
-        s := make_standard_name(words[j]);
-        IF s != '' THEN
-          w := getorcreate_word_id(s);
-          IF w IS NOT NULL AND NOT (ARRAY[w] <@ result) THEN
-            result := result || w;
-          END IF;
+    -- consider parts before an opening braket a full word as well
+    words := regexp_split_to_array(value, E'[(]');
+    IF array_upper(words, 1) > 1 THEN
+      s := make_standard_name(words[1]);
+      IF s != '' THEN
+        w := getorcreate_name_id(s, words[1]);
+        IF w IS NOT NULL AND NOT (ARRAY[w] <@ result) THEN
+          result := result || w;
         END IF;
-      END LOOP;
+      END IF;
     END IF;
 
-    s := regexp_replace(item.value, '市$', '');
-    IF s != item.value THEN
+    s := regexp_replace(value, '市$', '');
+    IF s != value THEN
       s := make_standard_name(s);
       IF s != '' THEN
-        w := getorcreate_name_id(s, item.value);
+        w := getorcreate_name_id(s, value);
         IF NOT (ARRAY[w] <@ result) THEN
           result := result || w;
         END IF;
index 002960feb2049b9d850ea54aeebc7b1644ba4dd1..b20a41010680f020ffe6c4bfa9b63928be175c4f 100644 (file)
@@ -2,8 +2,12 @@
     Implementation of the 'special-phrases' command.
 """
 import logging
-from nominatim.tools import SpecialPhrasesImporter
+from pathlib import Path
+from nominatim.errors import UsageError
 from nominatim.db.connection import connect
+from nominatim.tools.special_phrases.sp_importer import SPImporter
+from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
+from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
 
 LOG = logging.getLogger()
 
@@ -21,16 +25,36 @@ class ImportSpecialPhrases:
         group = parser.add_argument_group('Input arguments')
         group.add_argument('--import-from-wiki', action='store_true',
                            help='Import special phrases from the OSM wiki to the database.')
+        group.add_argument('--import-from-csv', metavar='FILE',
+                           help='Import special phrases from a CSV file.')
+        group.add_argument('--no-replace', action='store_true',
+                           help='Keep the old phrases and only add the new ones.')
 
     @staticmethod
     def run(args):
-        from ..tokenizer import factory as tokenizer_factory
-
         if args.import_from_wiki:
-            LOG.warning('Special phrases importation starting')
-            tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
-            with connect(args.config.get_libpq_dsn()) as db_connection:
-                SpecialPhrasesImporter(
-                    args.config, args.phplib_dir, db_connection
-                ).import_from_wiki(tokenizer)
+            ImportSpecialPhrases.start_import(args, SPWikiLoader(args.config))
+
+        if args.import_from_csv:
+            if not Path(args.import_from_csv).is_file():
+                LOG.fatal("CSV file '%s' does not exist.", args.import_from_csv)
+                raise UsageError('Cannot access file.')
+
+            ImportSpecialPhrases.start_import(args, SPCsvLoader(args.import_from_csv))
+
         return 0
+
+    @staticmethod
+    def start_import(args, loader):
+        """
+            Create the SPImporter object containing the right
+            sp loader and then start the import of special phrases.
+        """
+        from ..tokenizer import factory as tokenizer_factory
+
+        tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+        should_replace = not args.no_replace
+        with connect(args.config.get_libpq_dsn()) as db_connection:
+            SPImporter(
+                args.config, args.phplib_dir, db_connection, loader
+            ).import_phrases(tokenizer, should_replace)
index e0c06293ff56e22b9d90938bb5af923b0e279bdb..069672d4a1fd4d9b874943b5d44a367d4f2ef9e8 100644 (file)
@@ -15,6 +15,7 @@ normalizer module is installed, when the tokenizer is created.
 """
 import logging
 import importlib
+from pathlib import Path
 
 from ..errors import UsageError
 from ..db import properties
@@ -25,12 +26,13 @@ LOG = logging.getLogger()
 def _import_tokenizer(name):
     """ Load the tokenizer.py module from project directory.
     """
-    try:
-        return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
-    except ModuleNotFoundError as exp:
+    src_file = Path(__file__).parent / (name + '_tokenizer.py')
+    if not src_file.is_file():
         LOG.fatal("No tokenizer named '%s' available. "
                   "Check the setting of NOMINATIM_TOKENIZER.", name)
-        raise UsageError('Tokenizer not found') from exp
+        raise UsageError('Tokenizer not found')
+
+    return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
 
 
 def create_tokenizer(config, init_db=True, module_name=None):
index 7205ddefab0c449ec33da6610fe98edb8cfb48ba..2bd22c7207cb3f3cbf2de920a0a5887d67afd04d 100644 (file)
@@ -335,7 +335,7 @@ class LegacyICUNameAnalyzer:
                                        'search_name_count'])
 
 
-    def update_special_phrases(self, phrases):
+    def update_special_phrases(self, phrases, should_replace):
         """ Replace the search index for special phrases with the new phrases.
         """
         norm_phrases = set(((self.normalize(p[0]), p[1], p[2], p[3])
@@ -374,7 +374,7 @@ class LegacyICUNameAnalyzer:
                               columns=['word', 'word_token', 'class', 'type',
                                        'operator', 'search_name_count'])
 
-            if to_delete:
+            if to_delete and should_replace:
                 psycopg2.extras.execute_values(
                     cur,
                     """ DELETE FROM word USING (VALUES %s) as v(name, in_class, in_type, op)
@@ -423,8 +423,7 @@ class LegacyICUNameAnalyzer:
         names = place.get('name')
 
         if names:
-            full_names = set((self.make_standard_word(name) for name in names.values()))
-            full_names.discard('')
+            full_names = self._compute_full_names(names)
 
             token_info.add_names(self.conn, full_names)
 
@@ -461,6 +460,25 @@ class LegacyICUNameAnalyzer:
         return token_info.data
 
 
+    def _compute_full_names(self, names):
+        """ Return the set of all full name word ids to be used with the
+            given dictionary of names.
+        """
+        full_names = set()
+        for name in (n for ns in names.values() for n in re.split('[;,]', ns)):
+            word = self.make_standard_word(name)
+            if word:
+                full_names.add(word)
+
+                brace_split = name.split('(', 2)
+                if len(brace_split) > 1:
+                    word = self.make_standard_word(brace_split[0])
+                    if word:
+                        full_names.add(word)
+
+        return full_names
+
+
     def _add_postcode(self, postcode):
         """ Make sure the normalized postcode is present in the word table.
         """
@@ -519,8 +537,6 @@ class _TokenInfo:
         """
         # Start with all partial names
         terms = set((part for ns in names for part in ns.split()))
-        # Add partials for the full terms (TO BE REMOVED)
-        terms.update((n for n in names))
         # Add the full names
         terms.update((' ' + n for n in names))
 
index 3808c68e069f3f00b7f76bb84847d7b43ccb4ba0..ec9100e9ba860fee8e5c8b9d9c6d114b90738e85 100644 (file)
@@ -352,7 +352,7 @@ class LegacyNameAnalyzer:
 
 
 
-    def update_special_phrases(self, phrases):
+    def update_special_phrases(self, phrases, should_replace):
         """ Replace the search index for special phrases with the new phrases.
         """
         norm_phrases = set(((self.normalize(p[0]), p[1], p[2], p[3])
@@ -381,7 +381,7 @@ class LegacyNameAnalyzer:
                            FROM (VALUES %s) as v(name, class, type, op))""",
                     to_add)
 
-            if to_delete:
+            if to_delete and should_replace:
                 psycopg2.extras.execute_values(
                     cur,
                     """ DELETE FROM word USING (VALUES %s) as v(name, in_class, in_type, op)
@@ -513,10 +513,9 @@ class _TokenInfo:
         """
         def _get_place(name):
             with conn.cursor() as cur:
-                cur.execute("""SELECT (addr_ids_from_name(%s)
-                                       || getorcreate_name_id(make_standard_name(%s), ''))::text,
+                cur.execute("""SELECT make_keywords(hstore('name' , %s))::text,
                                       word_ids_from_name(%s)::text""",
-                            (name, name, name))
+                            (name, name))
                 return cur.fetchone()
 
         self.data['place_search'], self.data['place_match'] = \
index cc5d3e9ba2890ac52993bac07cf57472e128a5d3..cab6fb8b89ce541f1054ad7887cd5aafcdd0d5c1 100644 (file)
@@ -2,5 +2,3 @@
 Module with functions for importing, updating Nominatim databases
 as well as general maintenance helpers.
 """
-
-from nominatim.tools.special_phrases.special_phrases_importer import SpecialPhrasesImporter
index 9b97bca628dc8bb729d6645568722b369e91d6ef..d0b3bbf81fc3300ae59a10000db981427a212833 100644 (file)
@@ -12,10 +12,9 @@ class SpecialPhrasesImporterStatistics():
         process of special phrases.
     """
     def __init__(self):
-        self._set_lang_values_to_0()
-        self._set_global_values_to_0()
+        self._intialize_values()
 
-    def _set_global_values_to_0(self):
+    def _intialize_values(self):
         """
             Set all counts for the global
             import to 0.
@@ -23,22 +22,14 @@ class SpecialPhrasesImporterStatistics():
         self.tables_created = 0
         self.tables_deleted = 0
         self.tables_ignored = 0
-        self.global_phrases_invalid = 0
-
-    def _set_lang_values_to_0(self):
-        """
-            Set all counts for the current
-            lang to 0.
-        """
-        self.lang_phrases_invalid = 0
+        self.invalids = 0
 
     def notify_one_phrase_invalid(self):
         """
             Add +1 to the count of invalid entries
             fetched from the wiki.
         """
-        self.lang_phrases_invalid += 1
-        self.global_phrases_invalid += 1
+        self.invalids += 1
 
     def notify_one_table_created(self):
         """
@@ -58,7 +49,6 @@ class SpecialPhrasesImporterStatistics():
         """
         self.tables_ignored += 1
 
-
     def notify_import_done(self):
         """
             Print stats for the whole import process
@@ -66,8 +56,8 @@ class SpecialPhrasesImporterStatistics():
         """
         LOG.info('====================================================================')
         LOG.info('Final statistics of the import:')
-        LOG.info('- %s phrases were invalid.', self.global_phrases_invalid)
-        if self.global_phrases_invalid > 0:
+        LOG.info('- %s phrases were invalid.', self.invalids)
+        if self.invalids > 0:
             LOG.info('  Those invalid phrases have been skipped.')
         LOG.info('- %s tables were ignored as they already exist on the database',
                  self.tables_ignored)
@@ -76,26 +66,8 @@ class SpecialPhrasesImporterStatistics():
         if self.tables_deleted > 0:
             LOG.info('  They were deleted as they are not valid anymore.')
 
-        if self.global_phrases_invalid > 0:
+        if self.invalids > 0:
             LOG.warning('%s phrases were invalid and have been skipped during the whole process.',
-                        self.global_phrases_invalid)
-
-        self._set_global_values_to_0()
-
-    def notify_current_lang_done(self, lang):
-        """
-            Print stats for the current lang
-            and then reset lang values.
-        """
-        LOG.info('====================================================================')
-        LOG.info('Statistics for the import of %s:', lang)
-        LOG.info('- %s phrases were invalid.', self.lang_phrases_invalid)
-        if self.lang_phrases_invalid > 0:
-            LOG.info('  Those invalid phrases have been skipped.')
-        LOG.info('====================================================================')
-
-        if self.lang_phrases_invalid > 0:
-            LOG.warning('%s phrases were invalid and have been skipped for the import of lang %s.',
-                        self.lang_phrases_invalid, lang)
+                        self.invalids)
 
-        self._set_lang_values_to_0()
+        self._intialize_values()
diff --git a/nominatim/tools/special_phrases/sp_csv_loader.py b/nominatim/tools/special_phrases/sp_csv_loader.py
new file mode 100644 (file)
index 0000000..b7b24a7
--- /dev/null
@@ -0,0 +1,51 @@
+"""
+    Module containing the SPCsvLoader class.
+
+    The class allows to load phrases from a csv file.
+"""
+import csv
+import os
+from collections.abc import Iterator
+from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
+from nominatim.errors import UsageError
+
+class SPCsvLoader(Iterator):
+    """
+        Handles loading of special phrases from external csv file.
+    """
+    def __init__(self, csv_path):
+        super().__init__()
+        self.csv_path = csv_path
+        self.has_been_read = False
+
+    def __next__(self):
+        if self.has_been_read:
+            raise StopIteration()
+
+        self.has_been_read = True
+        self.check_csv_validity()
+        return self.parse_csv()
+
+    def parse_csv(self):
+        """
+            Open and parse the given csv file.
+            Create the corresponding SpecialPhrases.
+        """
+        phrases = set()
+
+        with open(self.csv_path) as file:
+            reader = csv.DictReader(file, delimiter=',')
+            for row in reader:
+                phrases.add(
+                    SpecialPhrase(row['phrase'], row['class'], row['type'], row['operator'])
+                )
+        return phrases
+
+    def check_csv_validity(self):
+        """
+            Check that the csv file has the right extension.
+        """
+        _, extension = os.path.splitext(self.csv_path)
+
+        if extension != '.csv':
+            raise UsageError('The file {} is not a csv file.'.format(self.csv_path))
similarity index 66%
rename from nominatim/tools/special_phrases/special_phrases_importer.py
rename to nominatim/tools/special_phrases/sp_importer.py
index 9649f94b1a736b0d561d489d1b9e67ef8546d28b..48764518e6e267088834ff46f68b1222cfbc739d 100644 (file)
@@ -1,5 +1,11 @@
 """
-    Functions to import special phrases into the database.
+    Module containing the class handling the import
+    of the special phrases.
+
+    Phrases are analyzed and imported into the database.
+
+    The phrases already present in the database which are not
+    valids anymore are removed.
 """
 import logging
 import os
@@ -10,27 +16,24 @@ import subprocess
 import json
 
 from psycopg2.sql import Identifier, Literal, SQL
-
-from nominatim.tools.exec_utils import get_url
 from nominatim.errors import UsageError
 from nominatim.tools.special_phrases.importer_statistics import SpecialPhrasesImporterStatistics
 
 LOG = logging.getLogger()
-class SpecialPhrasesImporter():
+class SPImporter():
     # pylint: disable-msg=too-many-instance-attributes
     """
-        Class handling the process of special phrases importations.
+        Class handling the process of special phrases importation into the database.
+
+        Take a sp loader which load the phrases from an external source.
     """
-    def __init__(self, config, phplib_dir, db_connection) -> None:
-        self.statistics_handler = SpecialPhrasesImporterStatistics()
-        self.db_connection = db_connection
+    def __init__(self, config, phplib_dir, db_connection, sp_loader) -> None:
         self.config = config
         self.phplib_dir = phplib_dir
+        self.db_connection = db_connection
+        self.sp_loader = sp_loader
+        self.statistics_handler = SpecialPhrasesImporterStatistics()
         self.black_list, self.white_list = self._load_white_and_black_lists()
-        #Compile the regex here to increase performances.
-        self.occurence_pattern = re.compile(
-            r'\| *([^\|]+) *\|\| *([^\|]+) *\|\| *([^\|]+) *\|\| *([^\|]+) *\|\| *([\-YN])'
-        )
         self.sanity_check_pattern = re.compile(r'^\w+$')
         # This set will contain all existing phrases to be added.
         # It contains tuples with the following format: (lable, class, type, operator)
@@ -39,34 +42,34 @@ class SpecialPhrasesImporter():
         #special phrases class/type on the wiki.
         self.table_phrases_to_delete = set()
 
-    def import_from_wiki(self, tokenizer, languages=None):
-        """
-            Iterate through all specified languages and
-            extract corresponding special phrases from the wiki.
+    def import_phrases(self, tokenizer, should_replace):
         """
-        if languages is not None and not isinstance(languages, list):
-            raise TypeError('The \'languages\' argument should be of type list.')
+            Iterate through all SpecialPhrases extracted from the
+            loader and import them into the database.
 
+            If should_replace is set to True only the loaded phrases
+            will be kept into the database. All other phrases already
+            in the database will be removed.
+        """
+        LOG.warning('Special phrases importation starting')
         self._fetch_existing_place_classtype_tables()
 
-        #Get all languages to process.
-        languages = self._load_languages() if not languages else languages
-
         #Store pairs of class/type for further processing
         class_type_pairs = set()
 
-        for lang in languages:
-            LOG.warning('Importing phrases for lang: %s...', lang)
-            wiki_page_xml_content = SpecialPhrasesImporter._get_wiki_content(lang)
-            class_type_pairs.update(self._process_xml_content(wiki_page_xml_content, lang))
-            self.statistics_handler.notify_current_lang_done(lang)
+        for loaded_phrases in self.sp_loader:
+            for phrase in loaded_phrases:
+                result = self._process_phrase(phrase)
+                if result:
+                    class_type_pairs.add(result)
 
         self._create_place_classtype_table_and_indexes(class_type_pairs)
-        self._remove_non_existent_tables_from_db()
+        if should_replace:
+            self._remove_non_existent_tables_from_db()
         self.db_connection.commit()
 
         with tokenizer.name_analyzer() as analyzer:
-            analyzer.update_special_phrases(self.word_phrases)
+            analyzer.update_special_phrases(self.word_phrases, should_replace)
 
         LOG.warning('Import done.')
         self.statistics_handler.notify_import_done()
@@ -101,89 +104,46 @@ class SpecialPhrasesImporter():
             settings = json.load(json_settings)
         return settings['blackList'], settings['whiteList']
 
-    def _load_languages(self):
-        """
-            Get list of all languages from env config file
-            or default if there is no languages configured.
-            The system will extract special phrases only from all specified languages.
-        """
-        default_languages = [
-            'af', 'ar', 'br', 'ca', 'cs', 'de', 'en', 'es',
-            'et', 'eu', 'fa', 'fi', 'fr', 'gl', 'hr', 'hu',
-            'ia', 'is', 'it', 'ja', 'mk', 'nl', 'no', 'pl',
-            'ps', 'pt', 'ru', 'sk', 'sl', 'sv', 'uk', 'vi']
-        return self.config.LANGUAGES.split(',') if self.config.LANGUAGES else default_languages
-
-    @staticmethod
-    def _get_wiki_content(lang):
-        """
-            Request and return the wiki page's content
-            corresponding to special phrases for a given lang.
-            Requested URL Example :
-                https://wiki.openstreetmap.org/wiki/Special:Export/Nominatim/Special_Phrases/EN
-        """
-        url = 'https://wiki.openstreetmap.org/wiki/Special:Export/Nominatim/Special_Phrases/' + lang.upper() # pylint: disable=line-too-long
-        return get_url(url)
-
-    def _check_sanity(self, lang, phrase_class, phrase_type):
+    def _check_sanity(self, phrase):
         """
             Check sanity of given inputs in case somebody added garbage in the wiki.
             If a bad class/type is detected the system will exit with an error.
         """
-        type_matchs = self.sanity_check_pattern.findall(phrase_type)
-        class_matchs = self.sanity_check_pattern.findall(phrase_class)
+        class_matchs = self.sanity_check_pattern.findall(phrase.p_class)
+        type_matchs = self.sanity_check_pattern.findall(phrase.p_type)
 
         if not class_matchs or not type_matchs:
-            LOG.warning("Bad class/type for language %s: %s=%s. It will not be imported",
-                        lang, phrase_class, phrase_type)
+            LOG.warning("Bad class/type: %s=%s. It will not be imported",
+                        phrase.p_class, phrase.p_type)
             return False
         return True
 
-    def _process_xml_content(self, xml_content, lang):
+    def _process_phrase(self, phrase):
         """
-            Process given xml content by extracting matching patterns.
-            Matching patterns are processed there and returned in a
-            set of class/type pairs.
+            Processes the given phrase by checking black and white list
+            and sanity.
+            Return the class/type pair corresponding to the phrase.
         """
-        #One match will be of format [label, class, type, operator, plural]
-        matches = self.occurence_pattern.findall(xml_content)
-        #Store pairs of class/type for further processing
-        class_type_pairs = set()
 
-        for match in matches:
-            phrase_label = match[0].strip()
-            phrase_class = match[1].strip()
-            phrase_type = match[2].strip()
-            phrase_operator = match[3].strip()
-            #Needed if some operator in the wiki are not written in english
-            phrase_operator = '-' if phrase_operator not in ('near', 'in') else phrase_operator
-            #hack around a bug where building=yes was imported with quotes into the wiki
-            phrase_type = re.sub(r'\"|&quot;', '', phrase_type)
-
-            #blacklisting: disallow certain class/type combinations
-            if (
-                    phrase_class in self.black_list.keys() and
-                    phrase_type in self.black_list[phrase_class]
-            ):
-                continue
-            #whitelisting: if class is in whitelist, allow only tags in the list
-            if (
-                    phrase_class in self.white_list.keys() and
-                    phrase_type not in self.white_list[phrase_class]
-            ):
-                continue
+        #blacklisting: disallow certain class/type combinations
+        if phrase.p_class in self.black_list.keys() \
+           and phrase.p_type in self.black_list[phrase.p_class]:
+            return None
 
-            #sanity check, in case somebody added garbage in the wiki
-            if not self._check_sanity(lang, phrase_class, phrase_type):
-                self.statistics_handler.notify_one_phrase_invalid()
-                continue
+        #whitelisting: if class is in whitelist, allow only tags in the list
+        if phrase.p_class in self.white_list.keys() \
+           and phrase.p_type not in self.white_list[phrase.p_class]:
+            return None
 
-            class_type_pairs.add((phrase_class, phrase_type))
+        #sanity check, in case somebody added garbage in the wiki
+        if not self._check_sanity(phrase):
+            self.statistics_handler.notify_one_phrase_invalid()
+            return None
 
-            self.word_phrases.add((phrase_label, phrase_class,
-                                   phrase_type, phrase_operator))
+        self.word_phrases.add((phrase.p_label, phrase.p_class,
+                               phrase.p_type, phrase.p_operator))
 
-        return class_type_pairs
+        return (phrase.p_class, phrase.p_type)
 
 
     def _create_place_classtype_table_and_indexes(self, class_type_pairs):
diff --git a/nominatim/tools/special_phrases/sp_wiki_loader.py b/nominatim/tools/special_phrases/sp_wiki_loader.py
new file mode 100644 (file)
index 0000000..914e153
--- /dev/null
@@ -0,0 +1,69 @@
+"""
+    Module containing the SPWikiLoader class.
+"""
+import re
+import logging
+from collections.abc import Iterator
+from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
+from nominatim.tools.exec_utils import get_url
+
+LOG = logging.getLogger()
+class SPWikiLoader(Iterator):
+    """
+        Handles loading of special phrases from the wiki.
+    """
+    def __init__(self, config, languages=None):
+        super().__init__()
+        self.config = config
+        #Compile the regex here to increase performances.
+        self.occurence_pattern = re.compile(
+            r'\| *([^\|]+) *\|\| *([^\|]+) *\|\| *([^\|]+) *\|\| *([^\|]+) *\|\| *([\-YN])'
+        )
+        self.languages = self._load_languages() if not languages else list(languages)
+
+    def __next__(self):
+        if not self.languages:
+            raise StopIteration
+
+        lang = self.languages.pop(0)
+        loaded_xml = self._get_wiki_content(lang)
+        LOG.warning('Importing phrases for lang: %s...', lang)
+        return self.parse_xml(loaded_xml)
+
+    def parse_xml(self, xml):
+        """
+            Parses XML content and extracts special phrases from it.
+            Return a list of SpecialPhrase.
+        """
+        #One match will be of format [label, class, type, operator, plural]
+        matches = self.occurence_pattern.findall(xml)
+        returned_phrases = set()
+        for match in matches:
+            returned_phrases.add(
+                SpecialPhrase(match[0], match[1], match[2], match[3])
+            )
+        return returned_phrases
+
+    def _load_languages(self):
+        """
+            Get list of all languages from env config file
+            or default if there is no languages configured.
+            The system will extract special phrases only from all specified languages.
+        """
+        default_languages = [
+            'af', 'ar', 'br', 'ca', 'cs', 'de', 'en', 'es',
+            'et', 'eu', 'fa', 'fi', 'fr', 'gl', 'hr', 'hu',
+            'ia', 'is', 'it', 'ja', 'mk', 'nl', 'no', 'pl',
+            'ps', 'pt', 'ru', 'sk', 'sl', 'sv', 'uk', 'vi']
+        return self.config.LANGUAGES.split(',') if self.config.LANGUAGES else default_languages
+
+    @staticmethod
+    def _get_wiki_content(lang):
+        """
+            Request and return the wiki page's content
+            corresponding to special phrases for a given lang.
+            Requested URL Example :
+                https://wiki.openstreetmap.org/wiki/Special:Export/Nominatim/Special_Phrases/EN
+        """
+        url = 'https://wiki.openstreetmap.org/wiki/Special:Export/Nominatim/Special_Phrases/' + lang.upper() # pylint: disable=line-too-long
+        return get_url(url)
diff --git a/nominatim/tools/special_phrases/special_phrase.py b/nominatim/tools/special_phrases/special_phrase.py
new file mode 100644 (file)
index 0000000..448fbee
--- /dev/null
@@ -0,0 +1,19 @@
+"""
+    Module containing the class SpecialPhrase.
+
+    This class is a model used to transfer a special phrase through
+    the process of load and importation.
+"""
+import re
+
+class SpecialPhrase():
+    """
+        Model representing a special phrase.
+    """
+    def __init__(self, p_label, p_class, p_type, p_operator):
+        self.p_label = p_label.strip()
+        self.p_class = p_class.strip()
+        #Hack around a bug where building=yes was imported with quotes into the wiki
+        self.p_type = re.sub(r'\"|&quot;', '', p_type.strip())
+        #Needed if some operator in the wiki are not written in english
+        self.p_operator = '-' if p_operator not in ('near', 'in') else p_operator
index 613b974d9fecbafd872aad1c2f756f7be99683ae..b8afdf9b2b9eb3f240f68468a1efc8e6a3d63dca 100644 (file)
@@ -4,8 +4,8 @@ no-test-db: bdd-no-test-db php
 bdd:
        cd bdd && behave -DREMOVE_TEMPLATE=1
 
-bdd-no-test-db:
-       cd bdd && behave -DREMOVE_TEMPLATE=1 db osm2pgsql
+icu:
+       cd bdd && behave -DREMOVE_TEMPLATE=1 -DTOKENIZER=legacy_icu
 
 php:
        cd php && phpunit ./
index fd207059408fc96092b537149c0f83e807a5260c..9e899053180b6cf025bd33940a493d846ffbbfa5 100644 (file)
@@ -2,6 +2,29 @@
 Feature: Creation of search terms
     Tests that search_name table is filled correctly
 
+    Scenario Outline: Comma- and semicolon separated names appear as full names
+        Given the places
+         | osm | class   | type | name+alt_name |
+         | N1  | place   | city | New York<sep>Big Apple |
+        When importing
+        Then search_name contains
+         | object | name_vector |
+         | N1     | #New York, #Big Apple |
+
+    Examples:
+         | sep |
+         | ,   |
+         | ;   |
+
+    Scenario Outline: Name parts before brackets appear as full names
+        Given the places
+         | osm | class   | type | name+name |
+         | N1  | place   | city | Halle (Saale) |
+        When importing
+        Then search_name contains
+         | object | name_vector |
+         | N1     | #Halle Saale, #Halle |
+
     Scenario: Unnamed POIs have no search entry
         Given the scene roads-with-pois
         And the places
@@ -49,7 +72,7 @@ Feature: Creation of search terms
         When importing
         Then search_name contains
          | object | nameaddress_vector |
-         | N1     | Rose Street, Little, Big, Town |
+         | N1     | #Rose Street, rose, Little, Big, Town |
         When searching for "23 Rose Street, Little Big Town"
         Then results contain
          | osm_type | osm_id | name |
index 7eb6f3dd474232d127defa82aa1993fa7c4f6a81..1deb43f38a2f4fae6fb8820226903701b0ea55bd 100644 (file)
@@ -198,19 +198,15 @@ class NominatimEnvironment:
 
                 try:
                     self.run_nominatim('import', '--osm-file', str(self.api_test_file))
-                    if self.tokenizer != 'legacy_icu':
-                        self.run_nominatim('add-data', '--tiger-data', str((testdata / 'tiger').resolve()))
+                    self.run_nominatim('add-data', '--tiger-data', str((testdata / 'tiger').resolve()))
                     self.run_nominatim('freeze')
 
                     if self.tokenizer != 'legacy_icu':
                         phrase_file = str((testdata / 'specialphrases_testdb.sql').resolve())
                         run_script(['psql', '-d', self.api_test_db, '-f', phrase_file])
                     else:
-                        # XXX Temporary use the wiki while there is no CSV import
-                        # available.
-                        self.test_env['NOMINATIM_LANGUAGES'] = 'en'
-                        self.run_nominatim('special-phrases', '--import-from-wiki')
-                        del self.test_env['NOMINATIM_LANGUAGES']
+                        csv_path = str((testdata / 'full_en_phrases_test.csv').resolve())
+                        self.run_nominatim('special-phrases', '--import-from-csv', csv_path)
                 except:
                     self.db_drop_database(self.api_test_db)
                     raise
index 923e6876a9beaced058600e303c8fa5e874ecafb..9a43a67e041393f9389d3918d5b0d2106dca05ab 100644 (file)
@@ -1,12 +1,9 @@
-import importlib
 import itertools
 import sys
 from pathlib import Path
 
 import psycopg2
-import psycopg2.extras
 import pytest
-import tempfile
 
 SRC_DIR = Path(__file__) / '..' / '..' / '..'
 
@@ -16,42 +13,12 @@ sys.path.insert(0, str(SRC_DIR.resolve()))
 from nominatim.config import Configuration
 from nominatim.db import connection
 from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.db import properties
+import nominatim.tokenizer.factory
+import nominatim.cli
 
 import dummy_tokenizer
 import mocks
-
-class _TestingCursor(psycopg2.extras.DictCursor):
-    """ Extension to the DictCursor class that provides execution
-        short-cuts that simplify writing assertions.
-    """
-
-    def scalar(self, sql, params=None):
-        """ Execute a query with a single return value and return this value.
-            Raises an assertion when not exactly one row is returned.
-        """
-        self.execute(sql, params)
-        assert self.rowcount == 1
-        return self.fetchone()[0]
-
-    def row_set(self, sql, params=None):
-        """ Execute a query and return the result as a set of tuples.
-        """
-        self.execute(sql, params)
-
-        return set((tuple(row) for row in self))
-
-    def table_exists(self, table):
-        """ Check that a table with the given name exists in the database.
-        """
-        num = self.scalar("""SELECT count(*) FROM pg_tables
-                             WHERE tablename = %s""", (table, ))
-        return num == 1
-
-    def table_rows(self, table):
-        """ Return the number of rows in the given table.
-        """
-        return self.scalar('SELECT count(*) FROM ' + table)
+from cursor import CursorForTesting
 
 
 @pytest.fixture
@@ -69,7 +36,7 @@ def temp_db(monkeypatch):
 
     conn.close()
 
-    monkeypatch.setenv('NOMINATIM_DATABASE_DSN' , 'dbname=' + name)
+    monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'dbname=' + name)
 
     yield name
 
@@ -112,18 +79,20 @@ def temp_db_cursor(temp_db):
     """
     conn = psycopg2.connect('dbname=' + temp_db)
     conn.set_isolation_level(0)
-    with conn.cursor(cursor_factory=_TestingCursor) as cur:
+    with conn.cursor(cursor_factory=CursorForTesting) as cur:
         yield cur
     conn.close()
 
 
 @pytest.fixture
 def table_factory(temp_db_cursor):
+    """ A fixture that creates new SQL tables, potentially filled with
+        content.
+    """
     def mk_table(name, definition='id INT', content=None):
         temp_db_cursor.execute('CREATE TABLE {} ({})'.format(name, definition))
         if content is not None:
-            psycopg2.extras.execute_values(
-                temp_db_cursor, "INSERT INTO {} VALUES %s".format(name), content)
+            temp_db_cursor.execute_values("INSERT INTO {} VALUES %s".format(name), content)
 
     return mk_table
 
@@ -137,60 +106,66 @@ def def_config():
                     data=SRC_DIR / 'data')
     return cfg
 
+
 @pytest.fixture
 def src_dir():
     return SRC_DIR.resolve()
 
+
 @pytest.fixture
-def tmp_phplib_dir():
-    with tempfile.TemporaryDirectory() as phpdir:
-        (Path(phpdir) / 'admin').mkdir()
+def cli_call():
+    def _call_nominatim(*args):
+        return nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
+                                       osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
+                                       phplib_dir=str(SRC_DIR / 'lib-php'),
+                                       data_dir=str(SRC_DIR / 'data'),
+                                       phpcgi_path='/usr/bin/php-cgi',
+                                       sqllib_dir=str(SRC_DIR / 'lib-sql'),
+                                       config_dir=str(SRC_DIR / 'settings'),
+                                       cli_args=args)
 
-        yield Path(phpdir)
+    return _call_nominatim
 
 
 @pytest.fixture
-def property_table(table_factory):
+def property_table(table_factory, temp_db_conn):
     table_factory('nominatim_properties', 'property TEXT, value TEXT')
 
+    return mocks.MockPropertyTable(temp_db_conn)
+
+
 @pytest.fixture
-def status_table(temp_db_conn):
+def status_table(table_factory):
     """ Create an empty version of the status table and
         the status logging table.
     """
-    with temp_db_conn.cursor() as cur:
-        cur.execute("""CREATE TABLE import_status (
-                           lastimportdate timestamp with time zone NOT NULL,
-                           sequence_id integer,
-                           indexed boolean
-                       )""")
-        cur.execute("""CREATE TABLE import_osmosis_log (
-                           batchend timestamp,
-                           batchseq integer,
-                           batchsize bigint,
-                           starttime timestamp,
-                           endtime timestamp,
-                           event text
-                           )""")
-    temp_db_conn.commit()
+    table_factory('import_status',
+                  """lastimportdate timestamp with time zone NOT NULL,
+                     sequence_id integer,
+                     indexed boolean""")
+    table_factory('import_osmosis_log',
+                  """batchend timestamp,
+                     batchseq integer,
+                     batchsize bigint,
+                     starttime timestamp,
+                     endtime timestamp,
+                     event text""")
 
 
 @pytest.fixture
-def place_table(temp_db_with_extensions, temp_db_conn):
+def place_table(temp_db_with_extensions, table_factory):
     """ Create an empty version of the place table.
     """
-    with temp_db_conn.cursor() as cur:
-        cur.execute("""CREATE TABLE place (
-                           osm_id int8 NOT NULL,
-                           osm_type char(1) NOT NULL,
-                           class text NOT NULL,
-                           type text NOT NULL,
-                           name hstore,
-                           admin_level smallint,
-                           address hstore,
-                           extratags hstore,
-                           geometry Geometry(Geometry,4326) NOT NULL)""")
-    temp_db_conn.commit()
+    table_factory('place',
+                  """osm_id int8 NOT NULL,
+                     osm_type char(1) NOT NULL,
+                     class text NOT NULL,
+                     type text NOT NULL,
+                     name hstore,
+                     admin_level smallint,
+                     address hstore,
+                     extratags hstore,
+                     geometry Geometry(Geometry,4326) NOT NULL""")
 
 
 @pytest.fixture
@@ -216,24 +191,22 @@ def placex_table(temp_db_with_extensions, temp_db_conn):
 
 
 @pytest.fixture
-def osmline_table(temp_db_with_extensions, temp_db_conn):
-    with temp_db_conn.cursor() as cur:
-        cur.execute("""CREATE TABLE location_property_osmline (
-                           place_id BIGINT,
-                           osm_id BIGINT,
-                           parent_place_id BIGINT,
-                           geometry_sector INTEGER,
-                           indexed_date TIMESTAMP,
-                           startnumber INTEGER,
-                           endnumber INTEGER,
-                           partition SMALLINT,
-                           indexed_status SMALLINT,
-                           linegeo GEOMETRY,
-                           interpolationtype TEXT,
-                           address HSTORE,
-                           postcode TEXT,
-                           country_code VARCHAR(2))""")
-    temp_db_conn.commit()
+def osmline_table(temp_db_with_extensions, table_factory):
+    table_factory('location_property_osmline',
+                  """place_id BIGINT,
+                     osm_id BIGINT,
+                     parent_place_id BIGINT,
+                     geometry_sector INTEGER,
+                     indexed_date TIMESTAMP,
+                     startnumber INTEGER,
+                     endnumber INTEGER,
+                     partition SMALLINT,
+                     indexed_status SMALLINT,
+                     linegeo GEOMETRY,
+                     interpolationtype TEXT,
+                     address HSTORE,
+                     postcode TEXT,
+                     country_code VARCHAR(2)""")
 
 
 @pytest.fixture
@@ -253,7 +226,7 @@ def osm2pgsql_options(temp_db):
                                  main_data='', main_index=''))
 
 @pytest.fixture
-def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory):
+def sql_preprocessor(temp_db_conn, tmp_path, table_factory):
     table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
     cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
     cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
@@ -263,18 +236,18 @@ def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory):
 
 
 @pytest.fixture
-def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
+def tokenizer_mock(monkeypatch, property_table):
     """ Sets up the configuration so that the test dummy tokenizer will be
         loaded when the tokenizer factory is used. Also returns a factory
         with which a new dummy tokenizer may be created.
     """
     monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
-    def _import_dummy(module, *args, **kwargs):
+    def _import_dummy(*args, **kwargs):
         return dummy_tokenizer
 
-    monkeypatch.setattr(importlib, "import_module", _import_dummy)
-    properties.set_property(temp_db_conn, 'tokenizer', 'dummy')
+    monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy)
+    property_table.set('tokenizer', 'dummy')
 
     def _create_tokenizer():
         return dummy_tokenizer.DummyTokenizer(None, None)
diff --git a/test/python/cursor.py b/test/python/cursor.py
new file mode 100644 (file)
index 0000000..4606902
--- /dev/null
@@ -0,0 +1,52 @@
+"""
+Specialised psycopg2 cursor with shortcut functions useful for testing.
+"""
+import psycopg2.extras
+
+class CursorForTesting(psycopg2.extras.DictCursor):
+    """ Extension to the DictCursor class that provides execution
+        short-cuts that simplify writing assertions.
+    """
+
+    def scalar(self, sql, params=None):
+        """ Execute a query with a single return value and return this value.
+            Raises an assertion when not exactly one row is returned.
+        """
+        self.execute(sql, params)
+        assert self.rowcount == 1
+        return self.fetchone()[0]
+
+
+    def row_set(self, sql, params=None):
+        """ Execute a query and return the result as a set of tuples.
+            Fails when the SQL command returns duplicate rows.
+        """
+        self.execute(sql, params)
+
+        result = set((tuple(row) for row in self))
+        assert len(result) == self.rowcount
+
+        return result
+
+
+    def table_exists(self, table):
+        """ Check that a table with the given name exists in the database.
+        """
+        num = self.scalar("""SELECT count(*) FROM pg_tables
+                             WHERE tablename = %s""", (table, ))
+        return num == 1
+
+
+    def table_rows(self, table, where=None):
+        """ Return the number of rows in the given table.
+        """
+        if where is None:
+            return self.scalar('SELECT count(*) FROM ' + table)
+
+        return self.scalar('SELECT count(*) FROM {} WHERE {}'.format(table, where))
+
+
+    def execute_values(self, *args, **kwargs):
+        """ Execute the execute_values() function on the cursor.
+        """
+        psycopg2.extras.execute_values(self, *args, **kwargs)
index 0a86ba8d1598752a00af622e3466f4deede31c0e..69202bc322ffd88e103f60f8ced809bfa8e82fd3 100644 (file)
@@ -17,16 +17,17 @@ class DummyTokenizer:
 
 
     def init_new_db(self, *args, **kwargs):
-        assert self.init_state == None
+        assert self.init_state is None
         self.init_state = "new"
 
 
     def init_from_project(self):
-        assert self.init_state == None
+        assert self.init_state is None
         self.init_state = "loaded"
 
 
-    def finalize_import(self, _):
+    @staticmethod
+    def finalize_import(_):
         pass
 
 
@@ -51,17 +52,20 @@ class DummyNameAnalyzer:
     def close(self):
         pass
 
-    def normalize_postcode(self, postcode):
+    @staticmethod
+    def normalize_postcode(postcode):
         return postcode
 
-    def update_postcodes_from_db(self):
+    @staticmethod
+    def update_postcodes_from_db():
         pass
 
-    def update_special_phrases(self, phrases):
+    def update_special_phrases(self, phrases, should_replace):
         self.analyser_cache['special_phrases'] = phrases
 
     def add_country_names(self, code, names):
         self.analyser_cache['countries'].append((code, names))
 
-    def process_place(self, place):
+    @staticmethod
+    def process_place(place):
         return {}
index 53d84aace63b2783ac87c63f2be2bda389324696..d86f0196810ceb662670ac80cccce327f885e394 100644 (file)
@@ -5,6 +5,8 @@ import itertools
 
 import psycopg2.extras
 
+from nominatim.db import properties
+
 class MockParamCapture:
     """ Mock that records the parameters with which a function was called
         as well as the number of calls.
@@ -12,6 +14,8 @@ class MockParamCapture:
     def __init__(self, retval=0):
         self.called = 0
         self.return_value = retval
+        self.last_args = None
+        self.last_kwargs = None
 
     def __call__(self, *args, **kwargs):
         self.called += 1
@@ -37,11 +41,11 @@ class MockWordTable:
 
         conn.commit()
 
-    def add_special(self, word_token, word, cls, typ, op):
+    def add_special(self, word_token, word, cls, typ, oper):
         with self.conn.cursor() as cur:
             cur.execute("""INSERT INTO word (word_token, word, class, type, operator)
                               VALUES (%s, %s, %s, %s, %s)
-                        """, (word_token, word, cls, typ, op))
+                        """, (word_token, word, cls, typ, oper))
         self.conn.commit()
 
 
@@ -125,3 +129,16 @@ class MockPlacexTable:
                          admin_level, address, extratags, 'SRID=4326;' + geom,
                          country))
         self.conn.commit()
+
+
+class MockPropertyTable:
+    """ A property table for testing.
+    """
+    def __init__(self, conn):
+        self.conn = conn
+
+
+    def set(self, name, value):
+        """ Set a property in the table to the given value.
+        """
+        properties.set_property(self.conn, name, value)
index e8d2e0524308b20f275bfb17d40bcbfbc08df5e2..1d775b1fd6d395ea67be61898187d03c194f0f16 100644 (file)
@@ -5,8 +5,6 @@ These tests just check that the various command line parameters route to the
 correct functionionality. They use a lot of monkeypatching to avoid executing
 the actual functions.
 """
-from pathlib import Path
-
 import pytest
 
 import nominatim.db.properties
@@ -26,19 +24,6 @@ import nominatim.tokenizer.factory
 
 from mocks import MockParamCapture
 
-SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
-
-def call_nominatim(*args):
-    return nominatim.cli.nominatim(module_dir='build/module',
-                                   osm2pgsql_path='build/osm2pgsql/osm2pgsql',
-                                   phplib_dir=str(SRC_DIR / 'lib-php'),
-                                   data_dir=str(SRC_DIR / 'data'),
-                                   phpcgi_path='/usr/bin/php-cgi',
-                                   sqllib_dir=str(SRC_DIR / 'lib-sql'),
-                                   config_dir=str(SRC_DIR / 'settings'),
-                                   cli_args=args)
-
-
 @pytest.fixture
 def mock_run_legacy(monkeypatch):
     mock = MockParamCapture()
@@ -57,273 +42,307 @@ def mock_func_factory(monkeypatch):
     return get_mock
 
 
-@pytest.fixture
-def tokenizer_mock(monkeypatch):
-    class DummyTokenizer:
-        def __init__(self, *args, **kwargs):
-            self.update_sql_functions_called = False
-            self.finalize_import_called = False
 
-        def update_sql_functions(self, *args):
-            self.update_sql_functions_called = True
+class TestCli:
+
+    @pytest.fixture(autouse=True)
+    def setup_cli_call(self, cli_call):
+        self.call_nominatim = cli_call
+
+
+    def test_cli_help(self, capsys):
+        """ Running nominatim tool without arguments prints help.
+        """
+        assert self.call_nominatim() == 1
+
+        captured = capsys.readouterr()
+        assert captured.out.startswith('usage:')
+
+
+    @pytest.mark.parametrize("command,script", [
+                             (('add-data', '--file', 'foo.osm'), 'update'),
+                             (('export',), 'export')
+                             ])
+    def test_legacy_commands_simple(self, mock_run_legacy, command, script):
+        assert self.call_nominatim(*command) == 0
+
+        assert mock_run_legacy.called == 1
+        assert mock_run_legacy.last_args[0] == script + '.php'
+
+
+    @pytest.mark.parametrize("params", [('--warm', ),
+                                        ('--warm', '--reverse-only'),
+                                        ('--warm', '--search-only')])
+    def test_admin_command_legacy(self, mock_func_factory, params):
+        mock_run_legacy = mock_func_factory(nominatim.clicmd.admin, 'run_legacy_script')
+
+        assert self.call_nominatim('admin', *params) == 0
+
+        assert mock_run_legacy.called == 1
+
+
+    def test_admin_command_check_database(self, mock_func_factory):
+        mock = mock_func_factory(nominatim.tools.check_database, 'check_database')
 
-        def finalize_import(self, *args):
-            self.finalize_import_called = True
+        assert self.call_nominatim('admin', '--check-database') == 0
+        assert mock.called == 1
 
-    tok = DummyTokenizer()
-    monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' ,
-                        lambda *args: tok)
-    monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' ,
-                        lambda *args: tok)
 
-    return tok
+    @pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc'),
+                                          ('node', 12), ('way', 8), ('relation', 32)])
+    def test_add_data_command(self, mock_run_legacy, name, oid):
+        assert self.call_nominatim('add-data', '--' + name, str(oid)) == 0
 
+        assert mock_run_legacy.called == 1
+        assert mock_run_legacy.last_args == ('update.php', '--import-' + name, oid)
 
-def test_cli_help(capsys):
-    """ Running nominatim tool without arguments prints help.
-    """
-    assert 1 == call_nominatim()
 
-    captured = capsys.readouterr()
-    assert captured.out.startswith('usage:')
+    def test_serve_command(self, mock_func_factory):
+        func = mock_func_factory(nominatim.cli, 'run_php_server')
 
+        self.call_nominatim('serve')
 
-@pytest.mark.parametrize("command,script", [
-                         (('add-data', '--file', 'foo.osm'), 'update'),
-                         (('export',), 'export')
-                         ])
-def test_legacy_commands_simple(mock_run_legacy, command, script):
-    assert 0 == call_nominatim(*command)
+        assert func.called == 1
 
-    assert mock_run_legacy.called == 1
-    assert mock_run_legacy.last_args[0] == script + '.php'
 
+    @pytest.mark.parametrize("params", [('search', '--query', 'new'),
+                                        ('reverse', '--lat', '0', '--lon', '0'),
+                                        ('lookup', '--id', 'N1'),
+                                        ('details', '--node', '1'),
+                                        ('details', '--way', '1'),
+                                        ('details', '--relation', '1'),
+                                        ('details', '--place_id', '10001'),
+                                        ('status',)])
+    def test_api_commands_simple(self, mock_func_factory, params):
+        mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
 
-def test_import_missing_file(temp_db):
-    assert 1 == call_nominatim('import', '--osm-file', 'sfsafegweweggdgw.reh.erh')
+        assert self.call_nominatim(*params) == 0
 
+        assert mock_run_api.called == 1
+        assert mock_run_api.last_args[0] == params[0]
 
-def test_import_bad_file(temp_db):
-    assert 1 == call_nominatim('import', '--osm-file', '.')
 
 
-def test_import_full(temp_db, mock_func_factory, tokenizer_mock):
-    mocks = [
-        mock_func_factory(nominatim.tools.database_import, 'setup_database_skeleton'),
-        mock_func_factory(nominatim.tools.database_import, 'import_osm_data'),
-        mock_func_factory(nominatim.tools.refresh, 'import_wikipedia_articles'),
-        mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
-        mock_func_factory(nominatim.tools.database_import, 'load_data'),
-        mock_func_factory(nominatim.tools.database_import, 'create_tables'),
-        mock_func_factory(nominatim.tools.database_import, 'create_table_triggers'),
-        mock_func_factory(nominatim.tools.database_import, 'create_partition_tables'),
-        mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
-        mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
-        mock_func_factory(nominatim.tools.refresh, 'load_address_levels_from_file'),
-        mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
-        mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
-        mock_func_factory(nominatim.tools.refresh, 'setup_website'),
-        mock_func_factory(nominatim.db.properties, 'set_property')
-    ]
+class TestCliWithDb:
 
-    cf_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
+    @pytest.fixture(autouse=True)
+    def setup_cli_call(self, cli_call, temp_db):
+        self.call_nominatim = cli_call
 
-    assert 0 == call_nominatim('import', '--osm-file', __file__)
-    assert tokenizer_mock.finalize_import_called
 
-    assert cf_mock.called > 1
+    @pytest.fixture(autouse=True)
+    def setup_tokenizer_mock(self, monkeypatch):
+        class DummyTokenizer:
+            def __init__(self, *args, **kwargs):
+                self.update_sql_functions_called = False
+                self.finalize_import_called = False
 
-    for mock in mocks:
-        assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
+            def update_sql_functions(self, *args):
+                self.update_sql_functions_called = True
 
+            def finalize_import(self, *args):
+                self.finalize_import_called = True
 
-def test_import_continue_load_data(temp_db, mock_func_factory, tokenizer_mock):
-    mocks = [
-        mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
-        mock_func_factory(nominatim.tools.database_import, 'load_data'),
-        mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
-        mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
-        mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
-        mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
-        mock_func_factory(nominatim.tools.refresh, 'setup_website'),
-        mock_func_factory(nominatim.db.properties, 'set_property')
-    ]
+        tok = DummyTokenizer()
+        monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
+                            lambda *args: tok)
+        monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
+                            lambda *args: tok)
 
-    assert 0 == call_nominatim('import', '--continue', 'load-data')
-    assert tokenizer_mock.finalize_import_called
+        self.tokenizer_mock = tok
 
-    for mock in mocks:
-        assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
 
+    def test_import_missing_file(self):
+        assert self.call_nominatim('import', '--osm-file', 'sfsafegwedgw.reh.erh') == 1
 
-def test_import_continue_indexing(temp_db, mock_func_factory, placex_table,
-                                  temp_db_conn, tokenizer_mock):
-    mocks = [
-        mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
-        mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
-        mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
-        mock_func_factory(nominatim.tools.refresh, 'setup_website'),
-        mock_func_factory(nominatim.db.properties, 'set_property')
-    ]
 
-    assert 0 == call_nominatim('import', '--continue', 'indexing')
+    def test_import_bad_file(self):
+        assert self.call_nominatim('import', '--osm-file', '.') == 1
 
-    for mock in mocks:
-        assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
 
-    assert temp_db_conn.index_exists('idx_placex_pendingsector')
+    def test_import_full(self, mock_func_factory):
+        mocks = [
+            mock_func_factory(nominatim.tools.database_import, 'setup_database_skeleton'),
+            mock_func_factory(nominatim.tools.database_import, 'import_osm_data'),
+            mock_func_factory(nominatim.tools.refresh, 'import_wikipedia_articles'),
+            mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
+            mock_func_factory(nominatim.tools.database_import, 'load_data'),
+            mock_func_factory(nominatim.tools.database_import, 'create_tables'),
+            mock_func_factory(nominatim.tools.database_import, 'create_table_triggers'),
+            mock_func_factory(nominatim.tools.database_import, 'create_partition_tables'),
+            mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+            mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+            mock_func_factory(nominatim.tools.refresh, 'load_address_levels_from_file'),
+            mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
+            mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
+            mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+            mock_func_factory(nominatim.db.properties, 'set_property')
+        ]
 
-    # Calling it again still works for the index
-    assert 0 == call_nominatim('import', '--continue', 'indexing')
-    assert temp_db_conn.index_exists('idx_placex_pendingsector')
+        cf_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
 
+        assert self.call_nominatim('import', '--osm-file', __file__) == 0
+        assert self.tokenizer_mock.finalize_import_called
 
-def test_import_continue_postprocess(temp_db, mock_func_factory, tokenizer_mock):
-    mocks = [
-        mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
-        mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
-        mock_func_factory(nominatim.tools.refresh, 'setup_website'),
-        mock_func_factory(nominatim.db.properties, 'set_property')
-    ]
+        assert cf_mock.called > 1
 
-    assert 0 == call_nominatim('import', '--continue', 'db-postprocess')
+        for mock in mocks:
+            assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
 
-    assert tokenizer_mock.finalize_import_called
 
-    for mock in mocks:
-        assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
+    def test_import_continue_load_data(self, mock_func_factory):
+        mocks = [
+            mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
+            mock_func_factory(nominatim.tools.database_import, 'load_data'),
+            mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+            mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+            mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
+            mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
+            mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+            mock_func_factory(nominatim.db.properties, 'set_property')
+        ]
 
+        assert self.call_nominatim('import', '--continue', 'load-data') == 0
+        assert self.tokenizer_mock.finalize_import_called
 
-def test_freeze_command(mock_func_factory, temp_db):
-    mock_drop = mock_func_factory(nominatim.tools.freeze, 'drop_update_tables')
-    mock_flatnode = mock_func_factory(nominatim.tools.freeze, 'drop_flatnode_file')
+        for mock in mocks:
+            assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
 
-    assert 0 == call_nominatim('freeze')
 
-    assert mock_drop.called == 1
-    assert mock_flatnode.called == 1
+    def test_import_continue_indexing(self, mock_func_factory, placex_table,
+                                      temp_db_conn):
+        mocks = [
+            mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+            mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+            mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
+            mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+            mock_func_factory(nominatim.db.properties, 'set_property')
+        ]
 
+        assert self.call_nominatim('import', '--continue', 'indexing') == 0
 
-@pytest.mark.parametrize("params", [('--warm', ),
-                                    ('--warm', '--reverse-only'),
-                                    ('--warm', '--search-only')])
-def test_admin_command_legacy(mock_func_factory, params):
-    mock_run_legacy = mock_func_factory(nominatim.clicmd.admin, 'run_legacy_script')
+        for mock in mocks:
+            assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
 
-    assert 0 == call_nominatim('admin', *params)
+        assert temp_db_conn.index_exists('idx_placex_pendingsector')
 
-    assert mock_run_legacy.called == 1
+        # Calling it again still works for the index
+        assert self.call_nominatim('import', '--continue', 'indexing') == 0
+        assert temp_db_conn.index_exists('idx_placex_pendingsector')
 
 
-@pytest.mark.parametrize("func, params", [('analyse_indexing', ('--analyse-indexing', ))])
-def test_admin_command_tool(temp_db, mock_func_factory, func, params):
-    mock = mock_func_factory(nominatim.tools.admin, func)
+    def test_import_continue_postprocess(self, mock_func_factory):
+        mocks = [
+            mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+            mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+            mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+            mock_func_factory(nominatim.db.properties, 'set_property')
+        ]
 
-    assert 0 == call_nominatim('admin', *params)
-    assert mock.called == 1
+        assert self.call_nominatim('import', '--continue', 'db-postprocess') == 0
 
+        assert self.tokenizer_mock.finalize_import_called
 
-def test_admin_command_check_database(mock_func_factory):
-    mock = mock_func_factory(nominatim.tools.check_database, 'check_database')
+        for mock in mocks:
+            assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
 
-    assert 0 == call_nominatim('admin', '--check-database')
-    assert mock.called == 1
 
+    def test_freeze_command(self, mock_func_factory):
+        mock_drop = mock_func_factory(nominatim.tools.freeze, 'drop_update_tables')
+        mock_flatnode = mock_func_factory(nominatim.tools.freeze, 'drop_flatnode_file')
 
-@pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc'),
-                                      ('node', 12), ('way', 8), ('relation', 32)])
-def test_add_data_command(mock_run_legacy, name, oid):
-    assert 0 == call_nominatim('add-data', '--' + name, str(oid))
+        assert self.call_nominatim('freeze') == 0
 
-    assert mock_run_legacy.called == 1
-    assert mock_run_legacy.last_args == ('update.php', '--import-' + name, oid)
+        assert mock_drop.called == 1
+        assert mock_flatnode.called == 1
 
 
-@pytest.mark.parametrize("params,do_bnds,do_ranks", [
-                          ([], 1, 1),
-                          (['--boundaries-only'], 1, 0),
-                          (['--no-boundaries'], 0, 1),
-                          (['--boundaries-only', '--no-boundaries'], 0, 0)])
-def test_index_command(mock_func_factory, temp_db_cursor, tokenizer_mock,
-                       params, do_bnds, do_ranks):
-    temp_db_cursor.execute("CREATE TABLE import_status (indexed bool)")
-    bnd_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_boundaries')
-    rank_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_by_rank')
 
-    assert 0 == call_nominatim('index', *params)
+    @pytest.mark.parametrize("func, params", [('analyse_indexing', ('--analyse-indexing', ))])
+    def test_admin_command_tool(self, mock_func_factory, func, params):
+        mock = mock_func_factory(nominatim.tools.admin, func)
 
-    assert bnd_mock.called == do_bnds
-    assert rank_mock.called == do_ranks
+        assert self.call_nominatim('admin', *params) == 0
+        assert mock.called == 1
 
-def test_special_phrases_command(temp_db, mock_func_factory, tokenizer_mock):
-    func = mock_func_factory(nominatim.clicmd.special_phrases.SpecialPhrasesImporter, 'import_from_wiki')
 
-    call_nominatim('special-phrases', '--import-from-wiki')
+    @pytest.mark.parametrize("params,do_bnds,do_ranks", [
+                              ([], 1, 1),
+                              (['--boundaries-only'], 1, 0),
+                              (['--no-boundaries'], 0, 1),
+                              (['--boundaries-only', '--no-boundaries'], 0, 0)])
+    def test_index_command(self, mock_func_factory, table_factory,
+                           params, do_bnds, do_ranks):
+        table_factory('import_status', 'indexed bool')
+        bnd_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_boundaries')
+        rank_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_by_rank')
 
-    assert func.called == 1
+        assert self.call_nominatim('index', *params) == 0
 
-@pytest.mark.parametrize("command,func", [
-                         ('word-counts', 'recompute_word_counts'),
-                         ('address-levels', 'load_address_levels_from_file'),
-                         ('wiki-data', 'import_wikipedia_articles'),
-                         ('importance', 'recompute_importance'),
-                         ('website', 'setup_website'),
-                         ])
-def test_refresh_command(mock_func_factory, temp_db, command, func, tokenizer_mock):
-    func_mock = mock_func_factory(nominatim.tools.refresh, func)
+        assert bnd_mock.called == do_bnds
+        assert rank_mock.called == do_ranks
 
-    assert 0 == call_nominatim('refresh', '--' + command)
-    assert func_mock.called == 1
+    @pytest.mark.parametrize("no_replace", [(True), (False)])
+    def test_special_phrases_wiki_command(self, mock_func_factory, no_replace):
+        func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
 
+        if no_replace:
+            self.call_nominatim('special-phrases', '--import-from-wiki', '--no-replace')
+        else:
+            self.call_nominatim('special-phrases', '--import-from-wiki')
 
-def test_refresh_postcodes(mock_func_factory, temp_db, tokenizer_mock):
-    func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
-    idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')
+        assert func.called == 1
 
-    assert 0 == call_nominatim('refresh', '--postcodes')
-    assert func_mock.called == 1
+    @pytest.mark.parametrize("no_replace", [(True), (False)])
+    def test_special_phrases_csv_command(self, src_dir, mock_func_factory, no_replace):
+        func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
+        testdata = src_dir / 'test' / 'testdb'
+        csv_path = str((testdata / 'full_en_phrases_test.csv').resolve())
 
-def test_refresh_create_functions(mock_func_factory, temp_db, tokenizer_mock):
-    func_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
+        if no_replace:
+            self.call_nominatim('special-phrases', '--import-from-csv', csv_path, '--no-replace')
+        else:
+            self.call_nominatim('special-phrases', '--import-from-csv', csv_path)
 
-    assert 0 == call_nominatim('refresh', '--functions')
-    assert func_mock.called == 1
-    assert tokenizer_mock.update_sql_functions_called
+        assert func.called == 1
 
+    @pytest.mark.parametrize("command,func", [
+                             ('word-counts', 'recompute_word_counts'),
+                             ('address-levels', 'load_address_levels_from_file'),
+                             ('wiki-data', 'import_wikipedia_articles'),
+                             ('importance', 'recompute_importance'),
+                             ('website', 'setup_website'),
+                             ])
+    def test_refresh_command(self, mock_func_factory, command, func):
+        func_mock = mock_func_factory(nominatim.tools.refresh, func)
 
-def test_refresh_importance_computed_after_wiki_import(monkeypatch, temp_db, tokenizer_mock):
-    calls = []
-    monkeypatch.setattr(nominatim.tools.refresh, 'import_wikipedia_articles',
-                        lambda *args, **kwargs: calls.append('import') or 0)
-    monkeypatch.setattr(nominatim.tools.refresh, 'recompute_importance',
-                        lambda *args, **kwargs: calls.append('update'))
+        assert self.call_nominatim('refresh', '--' + command) == 0
+        assert func_mock.called == 1
 
-    assert 0 == call_nominatim('refresh', '--importance', '--wiki-data')
 
-    assert calls == ['import', 'update']
+    def test_refresh_postcodes(self, mock_func_factory):
+        func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
+        idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')
 
+        assert self.call_nominatim('refresh', '--postcodes') == 0
+        assert func_mock.called == 1
+        assert idx_mock.called == 1
 
-def test_serve_command(mock_func_factory):
-    func = mock_func_factory(nominatim.cli, 'run_php_server')
+    def test_refresh_create_functions(self, mock_func_factory):
+        func_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
 
-    call_nominatim('serve')
+        assert self.call_nominatim('refresh', '--functions') == 0
+        assert func_mock.called == 1
+        assert self.tokenizer_mock.update_sql_functions_called
 
-    assert func.called == 1
 
-@pytest.mark.parametrize("params", [
-                         ('search', '--query', 'new'),
-                         ('reverse', '--lat', '0', '--lon', '0'),
-                         ('lookup', '--id', 'N1'),
-                         ('details', '--node', '1'),
-                         ('details', '--way', '1'),
-                         ('details', '--relation', '1'),
-                         ('details', '--place_id', '10001'),
-                         ('status',)
-                         ])
-def test_api_commands_simple(mock_func_factory, params):
-    mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
+    def test_refresh_importance_computed_after_wiki_import(self, monkeypatch):
+        calls = []
+        monkeypatch.setattr(nominatim.tools.refresh, 'import_wikipedia_articles',
+                            lambda *args, **kwargs: calls.append('import') or 0)
+        monkeypatch.setattr(nominatim.tools.refresh, 'recompute_importance',
+                            lambda *args, **kwargs: calls.append('update'))
 
-    assert 0 == call_nominatim(*params)
+        assert self.call_nominatim('refresh', '--importance', '--wiki-data') == 0
 
-    assert mock_run_api.called == 1
-    assert mock_run_api.last_args[0] == params[0]
+        assert calls == ['import', 'update']
index b95e6ede1787dd7619bee512e95a5694c1e30b02..dcaeaf25fb8fff283f3228ac0fe2d12d56a99ad5 100644 (file)
@@ -3,7 +3,6 @@ Tests for replication command of command-line interface wrapper.
 """
 import datetime as dt
 import time
-from pathlib import Path
 
 import pytest
 
@@ -14,18 +13,6 @@ from nominatim.db import status
 
 from mocks import MockParamCapture
 
-SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
-
-def call_nominatim(*args):
-    return nominatim.cli.nominatim(module_dir='build/module',
-                                   osm2pgsql_path='build/osm2pgsql/osm2pgsql',
-                                   phplib_dir=str(SRC_DIR / 'lib-php'),
-                                   data_dir=str(SRC_DIR / 'data'),
-                                   phpcgi_path='/usr/bin/php-cgi',
-                                   sqllib_dir=str(SRC_DIR / 'lib-sql'),
-                                   config_dir=str(SRC_DIR / 'settings'),
-                                   cli_args=['replication'] + list(args))
-
 @pytest.fixture
 def tokenizer_mock(monkeypatch):
     class DummyTokenizer:
@@ -40,23 +27,14 @@ def tokenizer_mock(monkeypatch):
             self.finalize_import_called = True
 
     tok = DummyTokenizer()
-    monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' ,
+    monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
                         lambda *args: tok)
-    monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' ,
+    monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
                         lambda *args: tok)
 
     return tok
 
 
-@pytest.fixture
-def index_mock(monkeypatch, tokenizer_mock):
-    mock = MockParamCapture()
-    monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock)
-    monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock)
-
-    return mock
-
-
 @pytest.fixture
 def mock_func_factory(monkeypatch):
     def get_mock(module, func):
@@ -70,80 +48,95 @@ def mock_func_factory(monkeypatch):
 @pytest.fixture
 def init_status(temp_db_conn, status_table):
     status.set_status(temp_db_conn, date=dt.datetime.now(dt.timezone.utc), seq=1)
-    return 1
+
+
+@pytest.fixture
+def index_mock(monkeypatch, tokenizer_mock, init_status):
+    mock = MockParamCapture()
+    monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock)
+    monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock)
+
+    return mock
 
 
 @pytest.fixture
 def update_mock(mock_func_factory, init_status, tokenizer_mock):
     return mock_func_factory(nominatim.tools.replication, 'update')
 
-@pytest.mark.parametrize("params,func", [
-                         (('--init', '--no-update-functions'), 'init_replication'),
-                         (('--check-for-updates',), 'check_for_updates')
-                         ])
-def test_replication_command(mock_func_factory, temp_db, params, func):
-    func_mock = mock_func_factory(nominatim.tools.replication, func)
 
-    assert 0 == call_nominatim(*params)
-    assert func_mock.called == 1
+class TestCliReplication:
+
+    @pytest.fixture(autouse=True)
+    def setup_cli_call(self, cli_call, temp_db):
+        self.call_nominatim = lambda *args: cli_call('replication', *args)
+
+    @pytest.mark.parametrize("params,func", [
+                             (('--init', '--no-update-functions'), 'init_replication'),
+                             (('--check-for-updates',), 'check_for_updates')
+                             ])
+    def test_replication_command(self, mock_func_factory, params, func):
+        func_mock = mock_func_factory(nominatim.tools.replication, func)
+
+        assert self.call_nominatim(*params) == 0
+        assert func_mock.called == 1
 
 
-def test_replication_update_bad_interval(monkeypatch, temp_db):
-    monkeypatch.setenv('NOMINATIM_REPLICATION_UPDATE_INTERVAL', 'xx')
+    def test_replication_update_bad_interval(self, monkeypatch):
+        monkeypatch.setenv('NOMINATIM_REPLICATION_UPDATE_INTERVAL', 'xx')
 
-    assert call_nominatim() == 1
+        assert self.call_nominatim() == 1
 
 
-def test_replication_update_bad_interval_for_geofabrik(monkeypatch, temp_db):
-    monkeypatch.setenv('NOMINATIM_REPLICATION_URL',
-                       'https://download.geofabrik.de/europe/ireland-and-northern-ireland-updates')
+    def test_replication_update_bad_interval_for_geofabrik(self, monkeypatch):
+        monkeypatch.setenv('NOMINATIM_REPLICATION_URL',
+                           'https://download.geofabrik.de/europe/italy-updates')
 
-    assert call_nominatim() == 1
+        assert self.call_nominatim() == 1
 
 
-def test_replication_update_once_no_index(update_mock):
-    assert 0 == call_nominatim('--once', '--no-index')
+    def test_replication_update_once_no_index(self, update_mock):
+        assert self.call_nominatim('--once', '--no-index') == 0
 
-    assert str(update_mock.last_args[1]['osm2pgsql']) == 'build/osm2pgsql/osm2pgsql'
+        assert str(update_mock.last_args[1]['osm2pgsql']) == 'OSM2PGSQL NOT AVAILABLE'
 
 
-def test_replication_update_custom_osm2pgsql(monkeypatch, update_mock):
-    monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', '/secret/osm2pgsql')
-    assert 0 == call_nominatim('--once', '--no-index')
+    def test_replication_update_custom_osm2pgsql(self, monkeypatch, update_mock):
+        monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', '/secret/osm2pgsql')
+        assert self.call_nominatim('--once', '--no-index') == 0
 
-    assert str(update_mock.last_args[1]['osm2pgsql']) == '/secret/osm2pgsql'
+        assert str(update_mock.last_args[1]['osm2pgsql']) == '/secret/osm2pgsql'
 
 
-def test_replication_update_custom_threads(update_mock):
-    assert 0 == call_nominatim('--once', '--no-index', '--threads', '4')
+    def test_replication_update_custom_threads(self, update_mock):
+        assert self.call_nominatim('--once', '--no-index', '--threads', '4') == 0
 
-    assert update_mock.last_args[1]['threads'] == 4
+        assert update_mock.last_args[1]['threads'] == 4
 
 
-def test_replication_update_continuous(monkeypatch, init_status, index_mock):
-    states = [nominatim.tools.replication.UpdateState.UP_TO_DATE,
-              nominatim.tools.replication.UpdateState.UP_TO_DATE]
-    monkeypatch.setattr(nominatim.tools.replication, 'update',
-                        lambda *args, **kwargs: states.pop())
+    def test_replication_update_continuous(self, monkeypatch, index_mock):
+        states = [nominatim.tools.replication.UpdateState.UP_TO_DATE,
+                  nominatim.tools.replication.UpdateState.UP_TO_DATE]
+        monkeypatch.setattr(nominatim.tools.replication, 'update',
+                            lambda *args, **kwargs: states.pop())
 
-    with pytest.raises(IndexError):
-        call_nominatim()
+        with pytest.raises(IndexError):
+            self.call_nominatim()
 
-    assert index_mock.called == 4
+        assert index_mock.called == 4
 
 
-def test_replication_update_continuous_no_change(monkeypatch, init_status, index_mock):
-    states = [nominatim.tools.replication.UpdateState.NO_CHANGES,
-              nominatim.tools.replication.UpdateState.UP_TO_DATE]
-    monkeypatch.setattr(nominatim.tools.replication, 'update',
-                        lambda *args, **kwargs: states.pop())
+    def test_replication_update_continuous_no_change(self, monkeypatch, index_mock):
+        states = [nominatim.tools.replication.UpdateState.NO_CHANGES,
+                  nominatim.tools.replication.UpdateState.UP_TO_DATE]
+        monkeypatch.setattr(nominatim.tools.replication, 'update',
+                            lambda *args, **kwargs: states.pop())
 
-    sleep_mock = MockParamCapture()
-    monkeypatch.setattr(time, 'sleep', sleep_mock)
+        sleep_mock = MockParamCapture()
+        monkeypatch.setattr(time, 'sleep', sleep_mock)
 
-    with pytest.raises(IndexError):
-        call_nominatim()
+        with pytest.raises(IndexError):
+            self.call_nominatim()
 
-    assert index_mock.called == 2
-    assert sleep_mock.called == 1
-    assert sleep_mock.last_args[0] == 60
+        assert index_mock.called == 2
+        assert sleep_mock.called == 1
+        assert sleep_mock.last_args[0] == 60
index d6911957892dd290ec9fab26aeddcafd929281d1..6729f95411e5d01f813a4165402b7df06df801ac 100644 (file)
@@ -1,66 +1,72 @@
 """
 Test for loading dotenv configuration.
 """
-from pathlib import Path
-
 import pytest
 
 from nominatim.config import Configuration
 from nominatim.errors import UsageError
 
-DEFCFG_DIR = Path(__file__) / '..' / '..' / '..' / 'settings'
+@pytest.fixture
+def make_config(src_dir):
+    """ Create a configuration object from the given project directory.
+    """
+    def _mk_config(project_dir=None):
+        return Configuration(project_dir, src_dir / 'settings')
+
+    return _mk_config
 
-def test_no_project_dir():
-    config = Configuration(None, DEFCFG_DIR)
+
+def test_no_project_dir(make_config):
+    config = make_config()
 
     assert config.DATABASE_WEBUSER == 'www-data'
 
 
 @pytest.mark.parametrize("val", ('apache', '"apache"'))
-def test_prefer_project_setting_over_default(val, tmp_path):
+def test_prefer_project_setting_over_default(make_config, val, tmp_path):
     envfile = tmp_path / '.env'
     envfile.write_text('NOMINATIM_DATABASE_WEBUSER={}\n'.format(val))
 
-    config = Configuration(Path(tmp_path), DEFCFG_DIR)
+    config = make_config(tmp_path)
 
     assert config.DATABASE_WEBUSER == 'apache'
 
 
-def test_prefer_os_environ_over_project_setting(monkeypatch, tmp_path):
+def test_prefer_os_environ_over_project_setting(make_config, monkeypatch, tmp_path):
     envfile = tmp_path / '.env'
     envfile.write_text('NOMINATIM_DATABASE_WEBUSER=apache\n')
 
     monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'nobody')
 
-    config = Configuration(Path(tmp_path), DEFCFG_DIR)
+    config = make_config(tmp_path)
 
     assert config.DATABASE_WEBUSER == 'nobody'
 
 
-def test_get_os_env_add_defaults(monkeypatch):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_os_env_add_defaults(make_config, monkeypatch):
+    config = make_config()
 
     monkeypatch.delenv('NOMINATIM_DATABASE_WEBUSER', raising=False)
 
     assert config.get_os_env()['NOMINATIM_DATABASE_WEBUSER'] == 'www-data'
 
 
-def test_get_os_env_prefer_os_environ(monkeypatch):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_os_env_prefer_os_environ(make_config, monkeypatch):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'nobody')
 
     assert config.get_os_env()['NOMINATIM_DATABASE_WEBUSER'] == 'nobody'
 
 
-def test_get_libpq_dsn_convert_default():
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_default(make_config):
+    config = make_config()
 
     assert config.get_libpq_dsn() == 'dbname=nominatim'
 
 
-def test_get_libpq_dsn_convert_php(monkeypatch):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_php(make_config, monkeypatch):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
                        'pgsql:dbname=gis;password=foo;host=localhost')
@@ -71,8 +77,8 @@ def test_get_libpq_dsn_convert_php(monkeypatch):
 @pytest.mark.parametrize("val,expect", [('foo bar', "'foo bar'"),
                                         ("xy'z", "xy\\'z"),
                                        ])
-def test_get_libpq_dsn_convert_php_special_chars(monkeypatch, val, expect):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_php_special_chars(make_config, monkeypatch, val, expect):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
                        'pgsql:dbname=gis;password={}'.format(val))
@@ -80,10 +86,10 @@ def test_get_libpq_dsn_convert_php_special_chars(monkeypatch, val, expect):
     assert config.get_libpq_dsn() == "dbname=gis password={}".format(expect)
 
 
-def test_get_libpq_dsn_convert_libpq(monkeypatch):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_libpq(make_config, monkeypatch):
+    config = make_config()
 
-    monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 
+    monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
                        'host=localhost dbname=gis password=foo')
 
     assert config.get_libpq_dsn() == 'host=localhost dbname=gis password=foo'
@@ -92,24 +98,24 @@ def test_get_libpq_dsn_convert_libpq(monkeypatch):
 @pytest.mark.parametrize("value,result",
                          [(x, True) for x in ('1', 'true', 'True', 'yes', 'YES')] +
                          [(x, False) for x in ('0', 'false', 'no', 'NO', 'x')])
-def test_get_bool(monkeypatch, value, result):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_bool(make_config, monkeypatch, value, result):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_FOOBAR', value)
 
     assert config.get_bool('FOOBAR') == result
 
-def test_get_bool_empty():
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_bool_empty(make_config):
+    config = make_config()
 
     assert config.DATABASE_MODULE_PATH == ''
-    assert config.get_bool('DATABASE_MODULE_PATH') == False
+    assert not config.get_bool('DATABASE_MODULE_PATH')
 
 
 @pytest.mark.parametrize("value,result", [('0', 0), ('1', 1),
                                           ('85762513444', 85762513444)])
-def test_get_int_success(monkeypatch, value, result):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_int_success(make_config, monkeypatch, value, result):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_FOOBAR', value)
 
@@ -117,8 +123,8 @@ def test_get_int_success(monkeypatch, value, result):
 
 
 @pytest.mark.parametrize("value", ['1b', 'fg', '0x23'])
-def test_get_int_bad_values(monkeypatch, value):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_int_bad_values(make_config, monkeypatch, value):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_FOOBAR', value)
 
@@ -126,8 +132,8 @@ def test_get_int_bad_values(monkeypatch, value):
         config.get_int('FOOBAR')
 
 
-def test_get_int_empty():
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_int_empty(make_config):
+    config = make_config()
 
     assert config.DATABASE_MODULE_PATH == ''
 
@@ -135,19 +141,19 @@ def test_get_int_empty():
         config.get_int('DATABASE_MODULE_PATH')
 
 
-def test_get_import_style_intern(monkeypatch):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_import_style_intern(make_config, src_dir, monkeypatch):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'street')
 
-    expected = DEFCFG_DIR / 'import-street.style'
+    expected = src_dir / 'settings' / 'import-street.style'
 
     assert config.get_import_style_file() == expected
 
 
 @pytest.mark.parametrize("value", ['custom', '/foo/bar.stye'])
-def test_get_import_style_intern(monkeypatch, value):
-    config = Configuration(None, DEFCFG_DIR)
+def test_get_import_style_extern(make_config, monkeypatch, value):
+    config = make_config()
 
     monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', value)
 
index 330b86f7cb46e4e324de82467d461523e0dd94b8..7398eac3c0297819adb32a4e9a01b8b1d9ddf2da 100644 (file)
@@ -6,15 +6,14 @@ import concurrent.futures
 
 import pytest
 import psycopg2
-from psycopg2.extras import wait_select
 
 from nominatim.db.async_connection import DBConnection, DeadlockHandler
 
 
 @pytest.fixture
 def conn(temp_db):
-    with closing(DBConnection('dbname=' + temp_db)) as c:
-        yield c
+    with closing(DBConnection('dbname=' + temp_db)) as connection:
+        yield connection
 
 
 @pytest.fixture
@@ -106,5 +105,3 @@ def test_deadlock(simple_conns):
         future.result()
 
         assert len(deadlock_check) == 1
-
-
index 5de686182fc2d3d90c67c68a165ed634f6e40920..41978e59135513682cb04bc088d00338df78c499 100644 (file)
@@ -7,28 +7,28 @@ import psycopg2
 from nominatim.db.connection import connect, get_pg_env
 
 @pytest.fixture
-def db(temp_db):
-    with connect('dbname=' + temp_db) as conn:
+def db(dsn):
+    with connect(dsn) as conn:
         yield conn
 
 
 def test_connection_table_exists(db, table_factory):
-    assert db.table_exists('foobar') == False
+    assert not db.table_exists('foobar')
 
     table_factory('foobar')
 
-    assert db.table_exists('foobar') == True
+    assert db.table_exists('foobar')
 
 
-def test_connection_index_exists(db, temp_db_cursor):
-    assert db.index_exists('some_index') == False
+def test_connection_index_exists(db, table_factory, temp_db_cursor):
+    assert not db.index_exists('some_index')
 
-    temp_db_cursor.execute('CREATE TABLE foobar (id INT)')
+    table_factory('foobar')
     temp_db_cursor.execute('CREATE INDEX some_index ON foobar(id)')
 
-    assert db.index_exists('some_index') == True
-    assert db.index_exists('some_index', table='foobar') == True
-    assert db.index_exists('some_index', table='bar') == False
+    assert db.index_exists('some_index')
+    assert db.index_exists('some_index', table='foobar')
+    assert not db.index_exists('some_index', table='bar')
 
 
 def test_drop_table_existing(db, table_factory):
@@ -55,9 +55,7 @@ def test_connection_server_version_tuple(db):
     assert ver[0] > 8
 
 
-def test_connection_postgis_version_tuple(db, temp_db_cursor):
-    temp_db_cursor.execute('CREATE EXTENSION postgis')
-
+def test_connection_postgis_version_tuple(db, temp_db_with_extensions):
     ver = db.postgis_version_tuple()
 
     assert isinstance(ver, tuple)
index 9621c68cc9db652cb78b2c78c8486ac49d8cde4b..b17d41ea61de1e45c515b765c3b58777de5b6e53 100644 (file)
@@ -6,26 +6,32 @@ import pytest
 from nominatim.db import properties
 
 @pytest.fixture
-def prop_table(table_factory):
-    table_factory('nominatim_properties', 'property TEXT, value TEXT')
+def property_factory(property_table, temp_db_cursor):
+    """ A function fixture that adds a property into the property table.
+    """
+    def _add_property(name, value):
+        temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES(%s, %s)",
+                               (name, value))
 
+    return _add_property
 
-def test_get_property_existing(prop_table, temp_db_conn, temp_db_cursor):
-    temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES('foo', 'bar')")
+
+def test_get_property_existing(property_factory, temp_db_conn):
+    property_factory('foo', 'bar')
 
     assert properties.get_property(temp_db_conn, 'foo') == 'bar'
 
 
-def test_get_property_unknown(prop_table, temp_db_conn, temp_db_cursor):
-    temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES('other', 'bar')")
+def test_get_property_unknown(property_factory, temp_db_conn):
+    property_factory('other', 'bar')
 
     assert properties.get_property(temp_db_conn, 'foo') is None
 
 
 @pytest.mark.parametrize("prefill", (True, False))
-def test_set_property_new(prop_table, temp_db_conn, temp_db_cursor, prefill):
+def test_set_property_new(property_factory, temp_db_conn, temp_db_cursor, prefill):
     if prefill:
-        temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES('something', 'bar')")
+        property_factory('something', 'bar')
 
     properties.set_property(temp_db_conn, 'something', 'else')
 
index 6a254ef3b8e6943342168349dc04aa43598af80e..79f82daccd150a7b9371bcadd02ea6fc121ed8a2 100644 (file)
@@ -1,8 +1,6 @@
 """
 Tests for SQL preprocessing.
 """
-from pathlib import Path
-
 import pytest
 
 @pytest.fixture
index 9f0327637d561314e05819198f6473eef4c54cb1..b6f5a7b19132431c7c5192b84258d994678eca47 100644 (file)
@@ -8,10 +8,6 @@ import pytest
 import nominatim.db.status
 from nominatim.errors import UsageError
 
-def test_compute_database_date_place_empty(status_table, place_table, temp_db_conn):
-    with pytest.raises(UsageError):
-        nominatim.db.status.compute_database_date(temp_db_conn)
-
 OSM_NODE_DATA = """\
 <osm version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/">
 <node id="45673" visible="true" version="1" changeset="2047" timestamp="2006-01-27T22:09:10Z" user="Foo" uid="111" lat="48.7586670" lon="8.1343060">
@@ -24,7 +20,17 @@ def iso_date(date):
                .replace(tzinfo=dt.timezone.utc)
 
 
-def test_compute_database_date_valid(monkeypatch, status_table, place_row, temp_db_conn):
+@pytest.fixture(autouse=True)
+def setup_status_table(status_table):
+    pass
+
+
+def test_compute_database_date_place_empty(place_table, temp_db_conn):
+    with pytest.raises(UsageError):
+        nominatim.db.status.compute_database_date(temp_db_conn)
+
+
+def test_compute_database_date_valid(monkeypatch, place_row, temp_db_conn):
     place_row(osm_type='N', osm_id=45673)
 
     requested_url = []
@@ -40,7 +46,7 @@ def test_compute_database_date_valid(monkeypatch, status_table, place_row, temp_
     assert date == iso_date('2006-01-27T22:09:10')
 
 
-def test_compute_database_broken_api(monkeypatch, status_table, place_row, temp_db_conn):
+def test_compute_database_broken_api(monkeypatch, place_row, temp_db_conn):
     place_row(osm_type='N', osm_id=45673)
 
     requested_url = []
@@ -51,53 +57,47 @@ def test_compute_database_broken_api(monkeypatch, status_table, place_row, temp_
     monkeypatch.setattr(nominatim.db.status, "get_url", mock_url)
 
     with pytest.raises(UsageError):
-        date = nominatim.db.status.compute_database_date(temp_db_conn)
+        nominatim.db.status.compute_database_date(temp_db_conn)
 
 
-def test_set_status_empty_table(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_empty_table(temp_db_conn, temp_db_cursor):
     date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
     nominatim.db.status.set_status(temp_db_conn, date=date)
 
-    temp_db_cursor.execute("SELECT * FROM import_status")
+    assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
+             {(date, None, True)}
 
-    assert temp_db_cursor.rowcount == 1
-    assert temp_db_cursor.fetchone() == [date, None, True]
 
-
-def test_set_status_filled_table(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_filled_table(temp_db_conn, temp_db_cursor):
     date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
     nominatim.db.status.set_status(temp_db_conn, date=date)
 
-    assert 1 == temp_db_cursor.scalar("SELECT count(*) FROM import_status")
+    assert temp_db_cursor.table_rows('import_status') == 1
 
     date = dt.datetime.fromordinal(1000100).replace(tzinfo=dt.timezone.utc)
     nominatim.db.status.set_status(temp_db_conn, date=date, seq=456, indexed=False)
 
-    temp_db_cursor.execute("SELECT * FROM import_status")
-
-    assert temp_db_cursor.rowcount == 1
-    assert temp_db_cursor.fetchone() == [date, 456, False]
+    assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
+             {(date, 456, False)}
 
 
-def test_set_status_missing_date(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_missing_date(temp_db_conn, temp_db_cursor):
     date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
     nominatim.db.status.set_status(temp_db_conn, date=date)
 
-    assert 1 == temp_db_cursor.scalar("SELECT count(*) FROM import_status")
+    assert temp_db_cursor.table_rows('import_status') == 1
 
     nominatim.db.status.set_status(temp_db_conn, date=None, seq=456, indexed=False)
 
-    temp_db_cursor.execute("SELECT * FROM import_status")
+    assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
+             {(date, 456, False)}
 
-    assert temp_db_cursor.rowcount == 1
-    assert temp_db_cursor.fetchone() == [date, 456, False]
 
-
-def test_get_status_empty_table(status_table, temp_db_conn):
+def test_get_status_empty_table(temp_db_conn):
     assert nominatim.db.status.get_status(temp_db_conn) == (None, None, None)
 
 
-def test_get_status_success(status_table, temp_db_conn):
+def test_get_status_success(temp_db_conn):
     date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
     nominatim.db.status.set_status(temp_db_conn, date=date, seq=667, indexed=False)
 
@@ -107,7 +107,7 @@ def test_get_status_success(status_table, temp_db_conn):
 
 @pytest.mark.parametrize("old_state", [True, False])
 @pytest.mark.parametrize("new_state", [True, False])
-def test_set_indexed(status_table, temp_db_conn, temp_db_cursor, old_state, new_state):
+def test_set_indexed(temp_db_conn, temp_db_cursor, old_state, new_state):
     date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
     nominatim.db.status.set_status(temp_db_conn, date=date, indexed=old_state)
     nominatim.db.status.set_indexed(temp_db_conn, new_state)
@@ -115,18 +115,21 @@ def test_set_indexed(status_table, temp_db_conn, temp_db_cursor, old_state, new_
     assert temp_db_cursor.scalar("SELECT indexed FROM import_status") == new_state
 
 
-def test_set_indexed_empty_status(status_table, temp_db_conn, temp_db_cursor):
+def test_set_indexed_empty_status(temp_db_conn, temp_db_cursor):
     nominatim.db.status.set_indexed(temp_db_conn, True)
 
-    assert temp_db_cursor.scalar("SELECT count(*) FROM import_status") == 0
+    assert temp_db_cursor.table_rows("import_status") == 0
 
 
-def text_log_status(status_table, temp_db_conn):
+def test_log_status(temp_db_conn, temp_db_cursor):
     date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
     start = dt.datetime.now() - dt.timedelta(hours=1)
+
     nominatim.db.status.set_status(temp_db_conn, date=date, seq=56)
     nominatim.db.status.log_status(temp_db_conn, start, 'index')
 
-    assert temp_db_cursor.scalar("SELECT count(*) FROM import_osmosis_log") == 1
-    assert temp_db_cursor.scalar("SELECT seq FROM import_osmosis_log") == 56
-    assert temp_db_cursor.scalar("SELECT date FROM import_osmosis_log") == date
+    temp_db_conn.commit()
+
+    assert temp_db_cursor.table_rows("import_osmosis_log") == 1
+    assert temp_db_cursor.scalar("SELECT batchseq FROM import_osmosis_log") == 56
+    assert temp_db_cursor.scalar("SELECT event FROM import_osmosis_log") == 'index'
index b8a49ccf604801ff673d9606c44e8789c24649ed..d549b70f803ec8f7e873e21e8c79f350e73f7af6 100644 (file)
@@ -1,7 +1,6 @@
 """
 Tests for DB utility functions in db.utils
 """
-import psycopg2
 import pytest
 
 import nominatim.db.utils as db_utils
@@ -13,10 +12,7 @@ def test_execute_file_success(dsn, temp_db_cursor, tmp_path):
 
     db_utils.execute_file(dsn, tmpfile)
 
-    temp_db_cursor.execute('SELECT * FROM test')
-
-    assert temp_db_cursor.rowcount == 1
-    assert temp_db_cursor.fetchone()[0] == 56
+    assert temp_db_cursor.row_set('SELECT * FROM test') == {(56, )}
 
 def test_execute_file_bad_file(dsn, tmp_path):
     with pytest.raises(FileNotFoundError):
@@ -44,10 +40,7 @@ def test_execute_file_with_pre_code(dsn, tmp_path, temp_db_cursor):
 
     db_utils.execute_file(dsn, tmpfile, pre_code='CREATE TABLE test (id INT)')
 
-    temp_db_cursor.execute('SELECT * FROM test')
-
-    assert temp_db_cursor.rowcount == 1
-    assert temp_db_cursor.fetchone()[0] == 4
+    assert temp_db_cursor.row_set('SELECT * FROM test') == {(4, )}
 
 
 def test_execute_file_with_post_code(dsn, tmp_path, temp_db_cursor):
@@ -56,7 +49,4 @@ def test_execute_file_with_post_code(dsn, tmp_path, temp_db_cursor):
 
     db_utils.execute_file(dsn, tmpfile, post_code='INSERT INTO test VALUES(23)')
 
-    temp_db_cursor.execute('SELECT * FROM test')
-
-    assert temp_db_cursor.rowcount == 1
-    assert temp_db_cursor.fetchone()[0] == 23
+    assert temp_db_cursor.row_set('SELECT * FROM test') == {(23, )}
index ff84e37964b660a33be2faa0908ba2ad0c1519a7..9873e7d72673a8aaeb3e356276a4b558d97be3e8 100644 (file)
@@ -2,7 +2,6 @@
 Tests for running the indexing.
 """
 import itertools
-import psycopg2
 import pytest
 
 from nominatim.indexer import indexer
@@ -64,7 +63,8 @@ class IndexerTestDB:
                            END;
                            $$ LANGUAGE plpgsql STABLE;
                         """)
-            cur.execute("""CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT)
+            cur.execute("""CREATE OR REPLACE FUNCTION
+                             get_interpolation_address(in_address HSTORE, wayid BIGINT)
                            RETURNS HSTORE AS $$
                            BEGIN
                              RETURN in_address;
@@ -120,7 +120,8 @@ class IndexerTestDB:
         return self.scalar('SELECT count(*) from placex where indexed_status > 0')
 
     def osmline_unindexed(self):
-        return self.scalar('SELECT count(*) from location_property_osmline where indexed_status > 0')
+        return self.scalar("""SELECT count(*) from location_property_osmline
+                              WHERE indexed_status > 0""")
 
 
 @pytest.fixture
@@ -140,37 +141,41 @@ def test_index_all_by_rank(test_db, threads, test_tokenizer):
         test_db.add_place(rank_address=rank, rank_search=rank)
     test_db.add_osmline()
 
-    assert 31 == test_db.placex_unindexed()
-    assert 1 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 31
+    assert test_db.osmline_unindexed() == 1
 
     idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
     idx.index_by_rank(0, 30)
 
-    assert 0 == test_db.placex_unindexed()
-    assert 0 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 0
+    assert test_db.osmline_unindexed() == 0
 
-    assert 0 == test_db.scalar("""SELECT count(*) from placex
-                               WHERE indexed_status = 0 and indexed_date is null""")
+    assert test_db.scalar("""SELECT count(*) from placex
+                             WHERE indexed_status = 0 and indexed_date is null""") == 0
     # ranks come in order of rank address
-    assert 0 == test_db.scalar("""
+    assert test_db.scalar("""
         SELECT count(*) FROM placex p WHERE rank_address > 0
           AND indexed_date >= (SELECT min(indexed_date) FROM placex o
-                               WHERE p.rank_address < o.rank_address)""")
+                               WHERE p.rank_address < o.rank_address)""") == 0
     # placex rank < 30 objects come before interpolations
-    assert 0 == test_db.scalar(
+    assert test_db.scalar(
         """SELECT count(*) FROM placex WHERE rank_address < 30
-             AND indexed_date > (SELECT min(indexed_date) FROM location_property_osmline)""")
+             AND indexed_date >
+                   (SELECT min(indexed_date) FROM location_property_osmline)""") == 0
     # placex rank = 30 objects come after interpolations
-    assert 0 == test_db.scalar(
+    assert test_db.scalar(
         """SELECT count(*) FROM placex WHERE rank_address = 30
-             AND indexed_date < (SELECT max(indexed_date) FROM location_property_osmline)""")
+             AND indexed_date <
+                   (SELECT max(indexed_date) FROM location_property_osmline)""") == 0
     # rank 0 comes after rank 29 and before rank 30
-    assert 0 == test_db.scalar(
+    assert test_db.scalar(
         """SELECT count(*) FROM placex WHERE rank_address < 30
-             AND indexed_date > (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""")
-    assert 0 == test_db.scalar(
+             AND indexed_date >
+                   (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
+    assert test_db.scalar(
         """SELECT count(*) FROM placex WHERE rank_address = 30
-             AND indexed_date < (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""")
+             AND indexed_date <
+                   (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
 
 
 @pytest.mark.parametrize("threads", [1, 15])
@@ -179,19 +184,19 @@ def test_index_partial_without_30(test_db, threads, test_tokenizer):
         test_db.add_place(rank_address=rank, rank_search=rank)
     test_db.add_osmline()
 
-    assert 31 == test_db.placex_unindexed()
-    assert 1 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 31
+    assert test_db.osmline_unindexed() == 1
 
     idx = indexer.Indexer('dbname=test_nominatim_python_unittest',
                           test_tokenizer, threads)
     idx.index_by_rank(4, 15)
 
-    assert 19 == test_db.placex_unindexed()
-    assert 1 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 19
+    assert test_db.osmline_unindexed() == 1
 
-    assert 0 == test_db.scalar("""
+    assert test_db.scalar("""
                     SELECT count(*) FROM placex
-                      WHERE indexed_status = 0 AND not rank_address between 4 and 15""")
+                      WHERE indexed_status = 0 AND not rank_address between 4 and 15""") == 0
 
 
 @pytest.mark.parametrize("threads", [1, 15])
@@ -200,18 +205,18 @@ def test_index_partial_with_30(test_db, threads, test_tokenizer):
         test_db.add_place(rank_address=rank, rank_search=rank)
     test_db.add_osmline()
 
-    assert 31 == test_db.placex_unindexed()
-    assert 1 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 31
+    assert test_db.osmline_unindexed() == 1
 
     idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
     idx.index_by_rank(28, 30)
 
-    assert 27 == test_db.placex_unindexed()
-    assert 0 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 27
+    assert test_db.osmline_unindexed() == 0
 
-    assert 0 == test_db.scalar("""
+    assert test_db.scalar("""
                     SELECT count(*) FROM placex
-                      WHERE indexed_status = 0 AND rank_address between 1 and 27""")
+                      WHERE indexed_status = 0 AND rank_address between 1 and 27""") == 0
 
 @pytest.mark.parametrize("threads", [1, 15])
 def test_index_boundaries(test_db, threads, test_tokenizer):
@@ -221,18 +226,18 @@ def test_index_boundaries(test_db, threads, test_tokenizer):
         test_db.add_place(rank_address=rank, rank_search=rank)
     test_db.add_osmline()
 
-    assert 37 == test_db.placex_unindexed()
-    assert 1 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 37
+    assert test_db.osmline_unindexed() == 1
 
     idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
     idx.index_boundaries(0, 30)
 
-    assert 31 == test_db.placex_unindexed()
-    assert 1 == test_db.osmline_unindexed()
+    assert test_db.placex_unindexed() == 31
+    assert test_db.osmline_unindexed() == 1
 
-    assert 0 == test_db.scalar("""
+    assert test_db.scalar("""
                     SELECT count(*) FROM placex
-                      WHERE indexed_status = 0 AND class != 'boundary'""")
+                      WHERE indexed_status = 0 AND class != 'boundary'""") == 0
 
 
 @pytest.mark.parametrize("threads", [1, 15])
@@ -245,8 +250,8 @@ def test_index_postcodes(test_db, threads, test_tokenizer):
     idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
     idx.index_postcodes()
 
-    assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode
-                                  WHERE indexed_status != 0""")
+    assert test_db.scalar("""SELECT count(*) FROM location_postcode
+                                  WHERE indexed_status != 0""") == 0
 
 
 @pytest.mark.parametrize("analyse", [True, False])
@@ -262,10 +267,10 @@ def test_index_full(test_db, analyse, test_tokenizer):
     idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, 4)
     idx.index_full(analyse=analyse)
 
-    assert 0 == test_db.placex_unindexed()
-    assert 0 == test_db.osmline_unindexed()
-    assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode
-                                  WHERE indexed_status != 0""")
+    assert test_db.placex_unindexed() == 0
+    assert test_db.osmline_unindexed() == 0
+    assert test_db.scalar("""SELECT count(*) FROM location_postcode
+                             WHERE indexed_status != 0""") == 0
 
 
 @pytest.mark.parametrize("threads", [1, 15])
@@ -278,4 +283,4 @@ def test_index_reopen_connection(test_db, threads, monkeypatch, test_tokenizer):
     idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
     idx.index_by_rank(28, 30)
 
-    assert 0 == test_db.placex_unindexed()
+    assert test_db.placex_unindexed() == 0
index 69517e9401739c1e23ae8cdc3d892cf84f9ca2a2..9dc0b7cb5cec8f575f7e4664425d717ca22d53de 100644 (file)
@@ -1,7 +1,6 @@
 """
 Tests for creating new tokenizers.
 """
-import importlib
 import pytest
 
 from nominatim.db import properties
@@ -10,13 +9,12 @@ from nominatim.errors import UsageError
 from dummy_tokenizer import DummyTokenizer
 
 @pytest.fixture
-def test_config(def_config, tmp_path):
+def test_config(def_config, tmp_path, property_table, tokenizer_mock):
     def_config.project_dir = tmp_path
     return def_config
 
 
-def test_setup_dummy_tokenizer(temp_db_conn, test_config,
-                               tokenizer_mock, property_table):
+def test_setup_dummy_tokenizer(temp_db_conn, test_config):
     tokenizer = factory.create_tokenizer(test_config)
 
     assert isinstance(tokenizer, DummyTokenizer)
@@ -26,7 +24,7 @@ def test_setup_dummy_tokenizer(temp_db_conn, test_config,
     assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
 
 
-def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_exists(test_config):
     (test_config.project_dir / 'tokenizer').mkdir()
 
     tokenizer = factory.create_tokenizer(test_config)
@@ -35,21 +33,22 @@ def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table)
     assert tokenizer.init_state == "new"
 
 
-def test_setup_tokenizer_dir_failure(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_failure(test_config):
     (test_config.project_dir / 'tokenizer').write_text("foo")
 
     with pytest.raises(UsageError):
         factory.create_tokenizer(test_config)
 
 
-def test_setup_bad_tokenizer_name(test_config, monkeypatch):
+def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
+    def_config.project_dir = tmp_path
     monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
     with pytest.raises(UsageError):
-        factory.create_tokenizer(test_config)
+        factory.create_tokenizer(def_config)
 
-def test_load_tokenizer(temp_db_conn, test_config,
-                        tokenizer_mock, property_table):
+
+def test_load_tokenizer(test_config):
     factory.create_tokenizer(test_config)
 
     tokenizer = factory.get_tokenizer_for_db(test_config)
@@ -58,7 +57,7 @@ def test_load_tokenizer(temp_db_conn, test_config,
     assert tokenizer.init_state == "loaded"
 
 
-def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table):
+def test_load_no_tokenizer_dir(test_config):
     factory.create_tokenizer(test_config)
 
     test_config.project_dir = test_config.project_dir / 'foo'
@@ -67,11 +66,10 @@ def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table):
         factory.get_tokenizer_for_db(test_config)
 
 
-def test_load_missing_propoerty(temp_db_cursor, test_config, tokenizer_mock, property_table):
+def test_load_missing_propoerty(temp_db_cursor, test_config):
     factory.create_tokenizer(test_config)
 
     temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
 
     with pytest.raises(UsageError):
         factory.get_tokenizer_for_db(test_config)
-
index 15ae50a4ce94175b78fd444d97c9bb0a4e5ab2e5..3cd18fcff53f910210932f56ecd5ed7300d64c98 100644 (file)
@@ -46,7 +46,7 @@ def tokenizer_factory(dsn, tmp_path, property_table):
 
 @pytest.fixture
 def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
-    monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+    monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
 
@@ -60,7 +60,7 @@ def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor,
           RETURNS INTEGER AS $$ SELECT 342; $$ LANGUAGE SQL;
         """)
 
-    monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+    monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
     monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
@@ -86,16 +86,6 @@ def create_postcode_id(temp_db_cursor):
                               $$ LANGUAGE SQL""")
 
 
-@pytest.fixture
-def create_housenumbers(temp_db_cursor):
-    temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers(
-                                  housenumbers TEXT[],
-                                  OUT tokens TEXT, OUT normtext TEXT)
-                              AS $$
-                              SELECT housenumbers::TEXT, array_to_string(housenumbers, ';')
-                              $$ LANGUAGE SQL""")
-
-
 @pytest.fixture
 def make_keywords(temp_db_cursor, temp_db_with_extensions):
     temp_db_cursor.execute(
@@ -105,7 +95,7 @@ def make_keywords(temp_db_cursor, temp_db_with_extensions):
 def test_init_new(tokenizer_factory, test_config, monkeypatch,
                   temp_db_conn, sql_preprocessor):
     monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', 'xxvv')
-    monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+    monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
 
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
@@ -119,8 +109,7 @@ def test_init_new(tokenizer_factory, test_config, monkeypatch,
     assert outfile.stat().st_mode == 33261
 
 
-def test_init_module_load_failed(tokenizer_factory, test_config,
-                                 monkeypatch, temp_db_conn):
+def test_init_module_load_failed(tokenizer_factory, test_config):
     tok = tokenizer_factory()
 
     with pytest.raises(UsageError):
@@ -134,7 +123,7 @@ def test_init_module_custom(tokenizer_factory, test_config,
     (module_dir/ 'nominatim.so').write_text('CUSTOM nomiantim.so')
 
     monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', str(module_dir))
-    monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+    monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
 
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
@@ -154,7 +143,7 @@ def test_update_sql_functions(sql_preprocessor, temp_db_conn,
                               tokenizer_factory, test_config, table_factory,
                               monkeypatch, temp_db_cursor):
     monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
-    monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+    monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
     monkeypatch.undo()
@@ -174,7 +163,7 @@ def test_update_sql_functions(sql_preprocessor, temp_db_conn,
 
 
 def test_migrate_database(tokenizer_factory, test_config, temp_db_conn, monkeypatch):
-    monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+    monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
     tok = tokenizer_factory()
     tok.migrate_database(test_config)
 
@@ -221,7 +210,7 @@ def test_update_special_phrase_empty_table(analyzer, word_table, make_standard_n
         ("König bei", "amenity", "royal", "near"),
         ("Könige", "amenity", "royal", "-"),
         ("strasse", "highway", "primary", "in")
-    ])
+    ], True)
 
     assert word_table.get_special() \
                == set(((' könig bei', 'könig bei', 'amenity', 'royal', 'near'),
@@ -229,18 +218,28 @@ def test_update_special_phrase_empty_table(analyzer, word_table, make_standard_n
                        (' strasse', 'strasse', 'highway', 'primary', 'in')))
 
 
-def test_update_special_phrase_delete_all(analyzer, word_table, temp_db_cursor,
-                                          make_standard_name):
+def test_update_special_phrase_delete_all(analyzer, word_table, make_standard_name):
     word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in')
     word_table.add_special(' bar', 'bar', 'highway', 'road', None)
 
     assert word_table.count_special() == 2
 
-    analyzer.update_special_phrases([])
+    analyzer.update_special_phrases([], True)
 
     assert word_table.count_special() == 0
 
 
+def test_update_special_phrases_no_replace(analyzer, word_table, make_standard_name):
+    word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in')
+    word_table.add_special(' bar', 'bar', 'highway', 'road', None)
+
+    assert word_table.count_special() == 2
+
+    analyzer.update_special_phrases([], False)
+
+    assert word_table.count_special() == 2
+
+
 def test_update_special_phrase_modify(analyzer, word_table, make_standard_name):
     word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in')
     word_table.add_special(' bar', 'bar', 'highway', 'road', None)
@@ -248,10 +247,10 @@ def test_update_special_phrase_modify(analyzer, word_table, make_standard_name):
     assert word_table.count_special() == 2
 
     analyzer.update_special_phrases([
-      ('prison', 'amenity', 'prison', 'in'),
-      ('bar', 'highway', 'road', '-'),
-      ('garden', 'leisure', 'garden', 'near')
-    ])
+        ('prison', 'amenity', 'prison', 'in'),
+        ('bar', 'highway', 'road', '-'),
+        ('garden', 'leisure', 'garden', 'near')
+    ], True)
 
     assert word_table.get_special() \
                == set(((' prison', 'prison', 'amenity', 'prison', 'in'),
@@ -260,43 +259,58 @@ def test_update_special_phrase_modify(analyzer, word_table, make_standard_name):
 
 
 def test_process_place_names(analyzer, make_keywords):
-
     info = analyzer.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
 
     assert info['names'] == '{1,2,3}'
 
 
-@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345'])
-def test_process_place_postcode(analyzer, create_postcode_id, word_table, pc):
-    info = analyzer.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
+def test_process_place_postcode(analyzer, create_postcode_id, word_table, pcode):
+    analyzer.process_place({'address': {'postcode' : pcode}})
 
-    assert word_table.get_postcodes() == {pc, }
+    assert word_table.get_postcodes() == {pcode, }
 
 
-@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836'])
-def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pc):
-    info = analyzer.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
+def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pcode):
+    analyzer.process_place({'address': {'postcode' : pcode}})
 
     assert not word_table.get_postcodes()
 
 
-@pytest.mark.parametrize('hnr', ['123a', '1', '101'])
-def test_process_place_housenumbers_simple(analyzer, create_housenumbers, hnr):
-    info = analyzer.process_place({'address': {'housenumber' : hnr}})
+class TestHousenumberName:
+
+    @staticmethod
+    @pytest.fixture(autouse=True)
+    def setup_create_housenumbers(temp_db_cursor):
+        temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers(
+                                      housenumbers TEXT[],
+                                      OUT tokens TEXT, OUT normtext TEXT)
+                                  AS $$
+                                  SELECT housenumbers::TEXT, array_to_string(housenumbers, ';')
+                                  $$ LANGUAGE SQL""")
+
+
+    @staticmethod
+    @pytest.mark.parametrize('hnr', ['123a', '1', '101'])
+    def test_process_place_housenumbers_simple(analyzer, hnr):
+        info = analyzer.process_place({'address': {'housenumber' : hnr}})
 
-    assert info['hnr'] == hnr
-    assert info['hnr_tokens'].startswith("{")
+        assert info['hnr'] == hnr
+        assert info['hnr_tokens'].startswith("{")
 
 
-def test_process_place_housenumbers_lists(analyzer, create_housenumbers):
-    info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
+    @staticmethod
+    def test_process_place_housenumbers_lists(analyzer):
+        info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
 
-    assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
+        assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
 
 
-def test_process_place_housenumbers_duplicates(analyzer, create_housenumbers):
-    info = analyzer.process_place({'address': {'housenumber' : '134',
-                                               'conscriptionnumber' : '134',
-                                               'streetnumber' : '99a'}})
+    @staticmethod
+    def test_process_place_housenumbers_duplicates(analyzer):
+        info = analyzer.process_place({'address': {'housenumber' : '134',
+                                                   'conscriptionnumber' : '134',
+                                                   'streetnumber' : '99a'}})
 
-    assert set(info['hnr'].split(';')) == set(('134', '99a'))
+        assert set(info['hnr'].split(';')) == set(('134', '99a'))
index 8dc5c8301d401c1d6022036a51b087a4d1f6d2e6..d8ca2f22660b9bde665a7563372398d9130225f2 100644 (file)
@@ -46,14 +46,14 @@ def db_prop(temp_db_conn):
     return _get_db_property
 
 @pytest.fixture
-def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
+def tokenizer_setup(tokenizer_factory, test_config):
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
 
 
 @pytest.fixture
-def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor,
-             word_table, temp_db_with_extensions, tmp_path):
+def analyzer(tokenizer_factory, test_config, monkeypatch,
+             temp_db_with_extensions, tmp_path):
     sql = tmp_path / 'sql' / 'tokenizer' / 'legacy_icu_tokenizer.sql'
     sql.write_text("SELECT 'a';")
 
@@ -74,17 +74,18 @@ def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor,
 @pytest.fixture
 def getorcreate_term_id(temp_db_cursor):
     temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_term_id(lookup_term TEXT)
-                              RETURNS INTEGER AS $$ SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
+                              RETURNS INTEGER AS $$
+                                SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
 
 
 @pytest.fixture
 def getorcreate_hnr_id(temp_db_cursor):
     temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_hnr_id(lookup_term TEXT)
-                              RETURNS INTEGER AS $$ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
+                              RETURNS INTEGER AS $$
+                                SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
 
 
-def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop,
-                  sql_preprocessor, place_table, word_table):
+def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop):
     monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
 
     tok = tokenizer_factory()
@@ -105,10 +106,9 @@ def test_init_from_project(tokenizer_setup, tokenizer_factory):
     assert tok.abbreviations is not None
 
 
-def test_update_sql_functions(temp_db_conn, db_prop, temp_db_cursor,
+def test_update_sql_functions(db_prop, temp_db_cursor,
                               tokenizer_factory, test_config, table_factory,
-                              monkeypatch,
-                              sql_preprocessor, place_table, word_table):
+                              monkeypatch):
     monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
@@ -128,25 +128,25 @@ def test_update_sql_functions(temp_db_conn, db_prop, temp_db_cursor,
 
 
 def test_make_standard_word(analyzer):
-    with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as a:
-        assert a.make_standard_word('tiny street') == 'TINY ST'
+    with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as anl:
+        assert anl.make_standard_word('tiny street') == 'TINY ST'
 
-    with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as a:
-        assert a.make_standard_word('Hauptstrasse') == 'HAUPTST'
+    with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as anl:
+        assert anl.make_standard_word('Hauptstrasse') == 'HAUPTST'
 
 
 def test_make_standard_hnr(analyzer):
-    with analyzer(abbr=(('IV', '4'),)) as a:
-        assert a._make_standard_hnr('345') == '345'
-        assert a._make_standard_hnr('iv') == 'IV'
+    with analyzer(abbr=(('IV', '4'),)) as anl:
+        assert anl._make_standard_hnr('345') == '345'
+        assert anl._make_standard_hnr('iv') == 'IV'
 
 
 def test_update_postcodes_from_db_empty(analyzer, table_factory, word_table):
     table_factory('location_postcode', 'postcode TEXT',
                   content=(('1234',), ('12 34',), ('AB23',), ('1234',)))
 
-    with analyzer() as a:
-        a.update_postcodes_from_db()
+    with analyzer() as anl:
+        anl.update_postcodes_from_db()
 
     assert word_table.count() == 3
     assert word_table.get_postcodes() == {'1234', '12 34', 'AB23'}
@@ -158,108 +158,130 @@ def test_update_postcodes_from_db_add_and_remove(analyzer, table_factory, word_t
     word_table.add_postcode(' 1234', '1234')
     word_table.add_postcode(' 5678', '5678')
 
-    with analyzer() as a:
-        a.update_postcodes_from_db()
+    with analyzer() as anl:
+        anl.update_postcodes_from_db()
 
     assert word_table.count() == 3
     assert word_table.get_postcodes() == {'1234', '45BC', 'XX45'}
 
 
-def test_update_special_phrase_empty_table(analyzer, word_table, temp_db_cursor):
-    with analyzer() as a:
-        a.update_special_phrases([
+def test_update_special_phrase_empty_table(analyzer, word_table):
+    with analyzer() as anl:
+        anl.update_special_phrases([
             ("König bei", "amenity", "royal", "near"),
             ("Könige", "amenity", "royal", "-"),
             ("street", "highway", "primary", "in")
-        ])
+        ], True)
 
-    assert temp_db_cursor.row_set("""SELECT word_token, word, class, type, operator
-                                     FROM word WHERE class != 'place'""") \
-               == set(((' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'),
-                       (' KÖNIGE', 'könige', 'amenity', 'royal', None),
-                       (' ST', 'street', 'highway', 'primary', 'in')))
+    assert word_table.get_special() \
+               == {(' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'),
+                   (' KÖNIGE', 'könige', 'amenity', 'royal', None),
+                   (' ST', 'street', 'highway', 'primary', 'in')}
 
 
-def test_update_special_phrase_delete_all(analyzer, word_table, temp_db_cursor):
-    temp_db_cursor.execute("""INSERT INTO word (word_token, word, class, type, operator)
-                              VALUES (' FOO', 'foo', 'amenity', 'prison', 'in'),
-                                     (' BAR', 'bar', 'highway', 'road', null)""")
+def test_update_special_phrase_delete_all(analyzer, word_table):
+    word_table.add_special(' FOO', 'foo', 'amenity', 'prison', 'in')
+    word_table.add_special(' BAR', 'bar', 'highway', 'road', None)
 
-    assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+    assert word_table.count_special() == 2
 
-    with analyzer() as a:
-        a.update_special_phrases([])
+    with analyzer() as anl:
+        anl.update_special_phrases([], True)
 
-    assert 0 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+    assert word_table.count_special() == 0
 
 
-def test_update_special_phrase_modify(analyzer, word_table, temp_db_cursor):
-    temp_db_cursor.execute("""INSERT INTO word (word_token, word, class, type, operator)
-                              VALUES (' FOO', 'foo', 'amenity', 'prison', 'in'),
-                                     (' BAR', 'bar', 'highway', 'road', null)""")
+def test_update_special_phrases_no_replace(analyzer, word_table):
+    word_table.add_special(' FOO', 'foo', 'amenity', 'prison', 'in')
+    word_table.add_special(' BAR', 'bar', 'highway', 'road', None)
 
-    assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+    assert word_table.count_special() == 2
 
-    with analyzer() as a:
-        a.update_special_phrases([
-          ('prison', 'amenity', 'prison', 'in'),
-          ('bar', 'highway', 'road', '-'),
-          ('garden', 'leisure', 'garden', 'near')
-        ])
+    with analyzer() as anl:
+        anl.update_special_phrases([], False)
 
-    assert temp_db_cursor.row_set("""SELECT word_token, word, class, type, operator
-                                     FROM word WHERE class != 'place'""") \
-               == set(((' PRISON', 'prison', 'amenity', 'prison', 'in'),
-                       (' BAR', 'bar', 'highway', 'road', None),
-                       (' GARDEN', 'garden', 'leisure', 'garden', 'near')))
+    assert word_table.count_special() == 2
+
+
+def test_update_special_phrase_modify(analyzer, word_table):
+    word_table.add_special(' FOO', 'foo', 'amenity', 'prison', 'in')
+    word_table.add_special(' BAR', 'bar', 'highway', 'road', None)
+
+    assert word_table.count_special() == 2
+
+    with analyzer() as anl:
+        anl.update_special_phrases([
+            ('prison', 'amenity', 'prison', 'in'),
+            ('bar', 'highway', 'road', '-'),
+            ('garden', 'leisure', 'garden', 'near')
+        ], True)
+
+    assert word_table.get_special() \
+               == {(' PRISON', 'prison', 'amenity', 'prison', 'in'),
+                   (' BAR', 'bar', 'highway', 'road', None),
+                   (' GARDEN', 'garden', 'leisure', 'garden', 'near')}
 
 
 def test_process_place_names(analyzer, getorcreate_term_id):
+    with analyzer() as anl:
+        info = anl.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
+
+    assert info['names'] == '{1,2,3,4,5}'
+
+
+@pytest.mark.parametrize('sep', [',' , ';'])
+def test_full_names_with_separator(analyzer, getorcreate_term_id, sep):
+    with analyzer() as anl:
+        names = anl._compute_full_names({'name' : sep.join(('New York', 'Big Apple'))})
+
+    assert names == set(('NEW YORK', 'BIG APPLE'))
+
 
-    with analyzer() as a:
-        info = a.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
+def test_full_names_with_bracket(analyzer, getorcreate_term_id):
+    with analyzer() as anl:
+        names = anl._compute_full_names({'name' : 'Houseboat (left)'})
 
-    assert info['names'] == '{1,2,3,4,5,6}'
+    assert names == set(('HOUSEBOAT (LEFT)', 'HOUSEBOAT'))
 
 
-@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345'])
-def test_process_place_postcode(analyzer, word_table, pc):
-    with analyzer() as a:
-        info = a.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
+def test_process_place_postcode(analyzer, word_table, pcode):
+    with analyzer() as anl:
+        anl.process_place({'address': {'postcode' : pcode}})
 
-    assert word_table.get_postcodes() == {pc, }
+    assert word_table.get_postcodes() == {pcode, }
 
 
-@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836'])
-def test_process_place_bad_postcode(analyzer, word_table, pc):
-    with analyzer() as a:
-        info = a.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
+def test_process_place_bad_postcode(analyzer, word_table, pcode):
+    with analyzer() as anl:
+        anl.process_place({'address': {'postcode' : pcode}})
 
     assert not word_table.get_postcodes()
 
 
 @pytest.mark.parametrize('hnr', ['123a', '1', '101'])
 def test_process_place_housenumbers_simple(analyzer, hnr, getorcreate_hnr_id):
-    with analyzer() as a:
-        info = a.process_place({'address': {'housenumber' : hnr}})
+    with analyzer() as anl:
+        info = anl.process_place({'address': {'housenumber' : hnr}})
 
     assert info['hnr'] == hnr.upper()
     assert info['hnr_tokens'] == "{-1}"
 
 
 def test_process_place_housenumbers_lists(analyzer, getorcreate_hnr_id):
-    with analyzer() as a:
-        info = a.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
+    with analyzer() as anl:
+        info = anl.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
 
     assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
     assert info['hnr_tokens'] == "{-1,-2,-3}"
 
 
 def test_process_place_housenumbers_duplicates(analyzer, getorcreate_hnr_id):
-    with analyzer() as a:
-        info = a.process_place({'address': {'housenumber' : '134',
-                                               'conscriptionnumber' : '134',
-                                               'streetnumber' : '99a'}})
+    with analyzer() as anl:
+        info = anl.process_place({'address': {'housenumber' : '134',
+                                              'conscriptionnumber' : '134',
+                                              'streetnumber' : '99a'}})
 
     assert set(info['hnr'].split(';')) == set(('134', '99A'))
     assert info['hnr_tokens'] == "{-1,-2}"
index 36c7d6ff0365ecc9e43a4c0a5ecd3efe6e994276..bd47e0e4632ee76215f9aa61bcf6234a8c752c6a 100644 (file)
@@ -3,39 +3,39 @@ Tests for maintenance and analysis functions.
 """
 import pytest
 
-from nominatim.db.connection import connect
 from nominatim.errors import UsageError
 from nominatim.tools import admin
 
-@pytest.fixture
-def db(temp_db, placex_table):
-    with connect('dbname=' + temp_db) as conn:
-        yield conn
+@pytest.fixture(autouse=True)
+def create_placex_table(placex_table):
+    """ All tests in this module require the placex table to be set up.
+    """
 
-def test_analyse_indexing_no_objects(db):
+
+def test_analyse_indexing_no_objects(temp_db_conn):
     with pytest.raises(UsageError):
-        admin.analyse_indexing(db)
+        admin.analyse_indexing(temp_db_conn)
 
 
 @pytest.mark.parametrize("oid", ['1234', 'N123a', 'X123'])
-def test_analyse_indexing_bad_osmid(db, oid):
+def test_analyse_indexing_bad_osmid(temp_db_conn, oid):
     with pytest.raises(UsageError):
-        admin.analyse_indexing(db, osm_id=oid)
+        admin.analyse_indexing(temp_db_conn, osm_id=oid)
 
 
-def test_analyse_indexing_unknown_osmid(db):
+def test_analyse_indexing_unknown_osmid(temp_db_conn):
     with pytest.raises(UsageError):
-        admin.analyse_indexing(db, osm_id='W12345674')
+        admin.analyse_indexing(temp_db_conn, osm_id='W12345674')
 
 
-def test_analyse_indexing_with_place_id(db, temp_db_cursor):
+def test_analyse_indexing_with_place_id(temp_db_conn, temp_db_cursor):
     temp_db_cursor.execute("INSERT INTO placex (place_id) VALUES(12345)")
 
-    admin.analyse_indexing(db, place_id=12345)
+    admin.analyse_indexing(temp_db_conn, place_id=12345)
 
 
-def test_analyse_indexing_with_osm_id(db, temp_db_cursor):
+def test_analyse_indexing_with_osm_id(temp_db_conn, temp_db_cursor):
     temp_db_cursor.execute("""INSERT INTO placex (place_id, osm_type, osm_id)
                               VALUES(9988, 'N', 10000)""")
 
-    admin.analyse_indexing(db, osm_id='N10000')
+    admin.analyse_indexing(temp_db_conn, osm_id='N10000')
index 53001c271691ed5d1d39571c25346203db73a4aa..aed5cb7e98c75fe9a9f0b029cb04db9cceed8efe 100644 (file)
@@ -7,11 +7,11 @@ from nominatim.tools import check_database as chkdb
 
 def test_check_database_unknown_db(def_config, monkeypatch):
     monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=fjgkhughwgh2423gsags')
-    assert 1 == chkdb.check_database(def_config)
+    assert chkdb.check_database(def_config) == 1
 
 
 def test_check_database_fatal_test(def_config, temp_db):
-    assert 1 == chkdb.check_database(def_config)
+    assert chkdb.check_database(def_config) == 1
 
 
 def test_check_conection_good(temp_db_conn, def_config):
@@ -23,8 +23,8 @@ def test_check_conection_bad(def_config):
     assert chkdb.check_connection(badconn, def_config) == chkdb.CheckState.FATAL
 
 
-def test_check_placex_table_good(temp_db_cursor, temp_db_conn, def_config):
-    temp_db_cursor.execute('CREATE TABLE placex (place_id int)')
+def test_check_placex_table_good(table_factory, temp_db_conn, def_config):
+    table_factory('placex')
     assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.OK
 
 
@@ -32,14 +32,13 @@ def test_check_placex_table_bad(temp_db_conn, def_config):
     assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.FATAL
 
 
-def test_check_placex_table_size_good(temp_db_cursor, temp_db_conn, def_config):
-    temp_db_cursor.execute('CREATE TABLE placex (place_id int)')
-    temp_db_cursor.execute('INSERT INTO placex VALUES (1), (2)')
+def test_check_placex_table_size_good(table_factory, temp_db_conn, def_config):
+    table_factory('placex', content=((1, ), (2, )))
     assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.OK
 
 
-def test_check_placex_table_size_bad(temp_db_cursor, temp_db_conn, def_config):
-    temp_db_cursor.execute('CREATE TABLE placex (place_id int)')
+def test_check_placex_table_size_bad(table_factory, temp_db_conn, def_config):
+    table_factory('placex')
     assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.FATAL
 
 
@@ -50,26 +49,27 @@ def test_check_tokenizer_missing(temp_db_conn, def_config, tmp_path):
 
 @pytest.mark.parametrize("check_result,state", [(None, chkdb.CheckState.OK),
                                                 ("Something wrong", chkdb.CheckState.FAIL)])
-def test_check_tokenizer(tokenizer_mock, temp_db_conn, def_config, monkeypatch,
+def test_check_tokenizer(temp_db_conn, def_config, monkeypatch,
                          check_result, state):
     class _TestTokenizer:
-        def check_database(self):
+        @staticmethod
+        def check_database():
             return check_result
 
     monkeypatch.setattr(chkdb.tokenizer_factory, 'get_tokenizer_for_db',
-                         lambda *a, **k: _TestTokenizer())
+                        lambda *a, **k: _TestTokenizer())
     assert chkdb.check_tokenizer(temp_db_conn, def_config) == state
 
 
-def test_check_indexing_good(temp_db_cursor, temp_db_conn, def_config):
-    temp_db_cursor.execute('CREATE TABLE placex (place_id int, indexed_status smallint)')
-    temp_db_cursor.execute('INSERT INTO placex VALUES (1, 0), (2, 0)')
+def test_check_indexing_good(table_factory, temp_db_conn, def_config):
+    table_factory('placex', 'place_id int, indexed_status smallint',
+                  content=((1, 0), (2, 0)))
     assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.OK
 
 
-def test_check_indexing_bad(temp_db_cursor, temp_db_conn, def_config):
-    temp_db_cursor.execute('CREATE TABLE placex (place_id int, indexed_status smallint)')
-    temp_db_cursor.execute('INSERT INTO placex VALUES (1, 0), (2, 2)')
+def test_check_indexing_bad(table_factory, temp_db_conn, def_config):
+    table_factory('placex', 'place_id int, indexed_status smallint',
+                  content=((1, 0), (2, 2)))
     assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.FAIL
 
 
@@ -82,12 +82,12 @@ def test_check_database_indexes_valid(temp_db_conn, def_config):
 
 
 def test_check_tiger_table_disabled(temp_db_conn, def_config, monkeypatch):
-    monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'no')
+    monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'no')
     assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.NOT_APPLICABLE
 
 
 def test_check_tiger_table_enabled(temp_db_cursor, temp_db_conn, def_config, monkeypatch):
-    monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'yes')
+    monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'yes')
     assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.FAIL
 
     temp_db_cursor.execute('CREATE TABLE location_property_tiger (place_id int)')
@@ -95,4 +95,3 @@ def test_check_tiger_table_enabled(temp_db_cursor, temp_db_conn, def_config, mon
 
     temp_db_cursor.execute('INSERT INTO location_property_tiger VALUES (1), (2)')
     assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.OK
-
index e370e084b81795cd24ab70a98d263b31446219af..621610cf6b248cf01faac8660d1cd1bebdb308d4 100644 (file)
@@ -1,10 +1,10 @@
 """
 Tests for functions to import a new database.
 """
+from pathlib import Path
+
 import pytest
 import psycopg2
-import sys
-from pathlib import Path
 
 from nominatim.tools import database_import
 from nominatim.errors import UsageError
@@ -34,9 +34,9 @@ def test_setup_skeleton(src_dir, nonexistant_db, no_partitions):
     try:
         with conn.cursor() as cur:
             cur.execute("SELECT distinct partition FROM country_name")
-            partitions = set([r[0] for r in list(cur)])
+            partitions = set((r[0] for r in list(cur)))
             if no_partitions:
-                assert partitions == set([0])
+                assert partitions == set((0, ))
             else:
                 assert len(partitions) > 10
     finally:
@@ -67,10 +67,11 @@ def test_create_db_missing_ro_user(nonexistant_db):
         database_import.create_db('dbname=' + nonexistant_db, rouser='sdfwkjkjgdugu2;jgsafkljas;')
 
 
-def test_setup_extensions(temp_db_conn, temp_db_cursor):
+def test_setup_extensions(temp_db_conn, table_factory):
     database_import.setup_extensions(temp_db_conn)
 
-    temp_db_cursor.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))')
+    # Use table creation to check that hstore and geometry types are available.
+    table_factory('t', 'h HSTORE, geom GEOMETRY(Geometry, 4326)')
 
 
 def test_setup_extensions_old_postgis(temp_db_conn, monkeypatch):
@@ -80,42 +81,36 @@ def test_setup_extensions_old_postgis(temp_db_conn, monkeypatch):
         database_import.setup_extensions(temp_db_conn)
 
 
-def test_import_base_data(src_dir, temp_db, temp_db_cursor):
-    temp_db_cursor.execute('CREATE EXTENSION hstore')
-    temp_db_cursor.execute('CREATE EXTENSION postgis')
-    database_import.import_base_data('dbname=' + temp_db, src_dir / 'data')
+def test_import_base_data(dsn, src_dir, temp_db_with_extensions, temp_db_cursor):
+    database_import.import_base_data(dsn, src_dir / 'data')
 
-    assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0
+    assert temp_db_cursor.table_rows('country_name') > 0
 
 
-def test_import_base_data_ignore_partitions(src_dir, temp_db, temp_db_cursor):
-    temp_db_cursor.execute('CREATE EXTENSION hstore')
-    temp_db_cursor.execute('CREATE EXTENSION postgis')
-    database_import.import_base_data('dbname=' + temp_db, src_dir / 'data',
-                                     ignore_partitions=True)
+def test_import_base_data_ignore_partitions(dsn, src_dir, temp_db_with_extensions,
+                                            temp_db_cursor):
+    database_import.import_base_data(dsn, src_dir / 'data', ignore_partitions=True)
 
-    assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0
-    assert temp_db_cursor.scalar('SELECT count(*) FROM country_name WHERE partition != 0') == 0
+    assert temp_db_cursor.table_rows('country_name') > 0
+    assert temp_db_cursor.table_rows('country_name', where='partition != 0') == 0
 
 
-def test_import_osm_data_simple(temp_db_cursor,osm2pgsql_options):
-    temp_db_cursor.execute('CREATE TABLE place (id INT)')
-    temp_db_cursor.execute('INSERT INTO place values (1)')
+def test_import_osm_data_simple(table_factory, osm2pgsql_options):
+    table_factory('place', content=((1, ), ))
 
     database_import.import_osm_data('file.pdf', osm2pgsql_options)
 
 
-def test_import_osm_data_simple_no_data(temp_db_cursor,osm2pgsql_options):
-    temp_db_cursor.execute('CREATE TABLE place (id INT)')
+def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options):
+    table_factory('place')
 
     with pytest.raises(UsageError, match='No data.*'):
         database_import.import_osm_data('file.pdf', osm2pgsql_options)
 
 
-def test_import_osm_data_drop(temp_db_conn, temp_db_cursor, tmp_path, osm2pgsql_options):
-    temp_db_cursor.execute('CREATE TABLE place (id INT)')
-    temp_db_cursor.execute('CREATE TABLE planet_osm_nodes (id INT)')
-    temp_db_cursor.execute('INSERT INTO place values (1)')
+def test_import_osm_data_drop(table_factory, temp_db_conn, tmp_path, osm2pgsql_options):
+    table_factory('place', content=((1, ), ))
+    table_factory('planet_osm_nodes')
 
     flatfile = tmp_path / 'flatfile'
     flatfile.write_text('touch')
@@ -128,9 +123,8 @@ def test_import_osm_data_drop(temp_db_conn, temp_db_cursor, tmp_path, osm2pgsql_
     assert not temp_db_conn.table_exists('planet_osm_nodes')
 
 
-def test_import_osm_data_default_cache(temp_db_cursor,osm2pgsql_options):
-    temp_db_cursor.execute('CREATE TABLE place (id INT)')
-    temp_db_cursor.execute('INSERT INTO place values (1)')
+def test_import_osm_data_default_cache(table_factory, osm2pgsql_options):
+    table_factory('place', content=((1, ), ))
 
     osm2pgsql_options['osm2pgsql_cache'] = 0
 
@@ -153,7 +147,7 @@ def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory):
 
 
 @pytest.mark.parametrize("threads", (1, 5))
-def test_load_data(dsn, src_dir, place_row, placex_table, osmline_table,
+def test_load_data(dsn, place_row, placex_table, osmline_table,
                    word_table, temp_db_cursor, threads):
     for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'):
         temp_db_cursor.execute("""CREATE FUNCTION {} (src TEXT)
index 3abe98181232cfbb1a45e98e20e0d57175defb56..25ccf163439dcd77053ed07c65d58e1b8c5090f5 100644 (file)
@@ -3,97 +3,94 @@ Tests for tools.exec_utils module.
 """
 from pathlib import Path
 import subprocess
-import tempfile
 
 import pytest
 
 import nominatim.tools.exec_utils as exec_utils
 
-@pytest.fixture
-def nominatim_env(tmp_phplib_dir, def_config):
-    class _NominatimEnv:
-        config = def_config
-        phplib_dir = tmp_phplib_dir
-        data_dir = Path('data')
-        project_dir = Path('.')
-        sqllib_dir = Path('lib-sql')
-        config_dir = Path('settings')
-        module_dir = 'module'
-        osm2pgsql_path = 'osm2pgsql'
-
-    return _NominatimEnv
-
-@pytest.fixture
-def test_script(nominatim_env):
-    def _create_file(code):
-        with (nominatim_env.phplib_dir / 'admin' / 't.php').open(mode='w') as fd:
-            fd.write('<?php\n')
-            fd.write(code + '\n')
+class TestRunLegacyScript:
 
-        return 't.php'
+    @pytest.fixture(autouse=True)
+    def setup_nominatim_env(self, tmp_path, def_config):
+        tmp_phplib_dir = tmp_path / 'phplib'
+        tmp_phplib_dir.mkdir()
+        (tmp_phplib_dir / 'admin').mkdir()
+
+        class _NominatimEnv:
+            config = def_config
+            phplib_dir = tmp_phplib_dir
+            data_dir = Path('data')
+            project_dir = Path('.')
+            sqllib_dir = Path('lib-sql')
+            config_dir = Path('settings')
+            module_dir = 'module'
+            osm2pgsql_path = 'osm2pgsql'
 
-    return _create_file
+        self.testenv = _NominatimEnv
 
-@pytest.fixture(params=[0, 1, 15, 255])
-def return_code(request):
-    return request.param
 
-### run_legacy_script
+    def mk_script(self, code):
+        codefile = self.testenv.phplib_dir / 'admin' / 't.php'
+        codefile.write_text('<?php\n' + code + '\n')
 
-def test_run_legacy_return_exit_code(nominatim_env, test_script, return_code):
-    fname = test_script('exit({});'.format(return_code))
-    assert return_code == exec_utils.run_legacy_script(fname,
-                                                       nominatim_env=nominatim_env)
+        return 't.php'
 
 
-def test_run_legacy_return_throw_on_fail(nominatim_env, test_script):
-    fname = test_script('exit(11);')
-    with pytest.raises(subprocess.CalledProcessError):
-        exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env,
-                                     throw_on_fail=True)
+    @pytest.mark.parametrize("return_code", (0, 1, 15, 255))
+    def test_run_legacy_return_exit_code(self, return_code):
+        fname = self.mk_script('exit({});'.format(return_code))
+        assert return_code == \
+                 exec_utils.run_legacy_script(fname, nominatim_env=self.testenv)
 
 
-def test_run_legacy_return_dont_throw_on_success(nominatim_env, test_script):
-    fname = test_script('exit(0);')
-    assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env,
-                                             throw_on_fail=True)
+    def test_run_legacy_return_throw_on_fail(self):
+        fname = self.mk_script('exit(11);')
+        with pytest.raises(subprocess.CalledProcessError):
+            exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
+                                         throw_on_fail=True)
 
-def test_run_legacy_use_given_module_path(nominatim_env, test_script):
-    fname = test_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);")
 
-    assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env)
+    def test_run_legacy_return_dont_throw_on_success(self):
+        fname = self.mk_script('exit(0);')
+        assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
+                                            throw_on_fail=True) == 0
 
+    def test_run_legacy_use_given_module_path(self):
+        fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);")
 
-def test_run_legacy_do_not_overwrite_module_path(nominatim_env, test_script, monkeypatch):
-    monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', 'other')
-    fname = test_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
+        assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
 
-    assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env)
 
-### run_api_script
+    def test_run_legacy_do_not_overwrite_module_path(self, monkeypatch):
+        monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', 'other')
+        fname = self.mk_script(
+            "exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
 
-@pytest.fixture
-def tmp_project_dir():
-    with tempfile.TemporaryDirectory() as tempd:
-        project_dir = Path(tempd)
-        webdir = project_dir / 'website'
-        webdir.mkdir()
+        assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
 
-        with (webdir / 'test.php').open(mode='w') as fd:
-            fd.write("<?php\necho 'OK\n';")
 
-        yield project_dir
+class TestRunApiScript:
+
+    @staticmethod
+    @pytest.fixture(autouse=True)
+    def setup_project_dir(tmp_path):
+        webdir = tmp_path / 'website'
+        webdir.mkdir()
+        (webdir / 'test.php').write_text("<?php\necho 'OK\n';")
+
 
-def test_run_api(tmp_project_dir):
-    assert 0 == exec_utils.run_api_script('test', tmp_project_dir)
+    @staticmethod
+    def test_run_api(tmp_path):
+        assert exec_utils.run_api_script('test', tmp_path) == 0
 
-def test_run_api_execution_error(tmp_project_dir):
-    assert 0 != exec_utils.run_api_script('badname', tmp_project_dir)
+    @staticmethod
+    def test_run_api_execution_error(tmp_path):
+        assert exec_utils.run_api_script('badname', tmp_path) != 0
 
-def test_run_api_with_extra_env(tmp_project_dir):
-    extra_env = dict(SCRIPT_FILENAME=str(tmp_project_dir / 'website' / 'test.php'))
-    assert 0 == exec_utils.run_api_script('badname', tmp_project_dir,
-                                          extra_env=extra_env)
+    @staticmethod
+    def test_run_api_with_extra_env(tmp_path):
+        extra_env = dict(SCRIPT_FILENAME=str(tmp_path / 'website' / 'test.php'))
+        assert exec_utils.run_api_script('badname', tmp_path, extra_env=extra_env) == 0
 
 
 ### run_osm2pgsql
index fcdab23a1dce3dd891d54c6edaab9aa62dab3bb2..19a06a9f7a49041c81d603f5ede9c57869515fd6 100644 (file)
@@ -1,8 +1,6 @@
 """
 Tests for freeze functions (removing unused database parts).
 """
-import pytest
-
 from nominatim.tools import freeze
 
 NOMINATIM_RUNTIME_TABLES = [
@@ -22,9 +20,9 @@ NOMINATIM_DROP_TABLES = [
     'wikipedia_article', 'wikipedia_redirect'
 ]
 
-def test_drop_tables(temp_db_conn, temp_db_cursor):
+def test_drop_tables(temp_db_conn, temp_db_cursor, table_factory):
     for table in NOMINATIM_RUNTIME_TABLES + NOMINATIM_DROP_TABLES:
-        temp_db_cursor.execute('CREATE TABLE {} (id int)'.format(table))
+        table_factory(table)
 
     freeze.drop_update_tables(temp_db_conn)
 
index 24b3318da37d86db31535dbb427f8081dcb5f44b..f0a34b0896db1102c1a54d7dae991f4ee02673fe 100644 (file)
 """
     Tests for import special phrases methods
-    of the class SpecialPhrasesImporter.
+    of the class SPImporter.
 """
-from nominatim.errors import UsageError
-from pathlib import Path
-import tempfile
 from shutil import copyfile
 import pytest
-from nominatim.tools import SpecialPhrasesImporter
+from nominatim.tools.special_phrases.sp_importer import SPImporter
+from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
+from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
+from nominatim.errors import UsageError
+
+from cursor import CursorForTesting
+
+@pytest.fixture
+def testfile_dir(src_dir):
+    return src_dir / 'test' / 'testfiles'
+
+
+@pytest.fixture
+def sp_importer(temp_db_conn, def_config, temp_phplib_dir_with_migration):
+    """
+        Return an instance of SPImporter.
+    """
+    loader = SPWikiLoader(def_config, ['en'])
+    return SPImporter(def_config, temp_phplib_dir_with_migration, temp_db_conn, loader)
+
+
+@pytest.fixture
+def temp_phplib_dir_with_migration(src_dir, tmp_path):
+    """
+        Return temporary phpdir with migration subdirectory and
+        PhraseSettingsToJson.php script inside.
+    """
+    migration_file = (src_dir / 'lib-php' / 'migration' / 'PhraseSettingsToJson.php').resolve()
+
+    phpdir = tmp_path / 'tempphp'
+    phpdir.mkdir()
+
+    (phpdir / 'migration').mkdir()
+    migration_dest_path = (phpdir / 'migration' / 'PhraseSettingsToJson.php').resolve()
+    copyfile(str(migration_file), str(migration_dest_path))
+
+    return phpdir
+
+
+@pytest.fixture
+def xml_wiki_content(src_dir):
+    """
+        return the content of the static xml test file.
+    """
+    xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt'
+    return xml_test_content.read_text()
 
-TEST_BASE_DIR = Path(__file__) / '..' / '..'
 
-def test_fetch_existing_place_classtype_tables(special_phrases_importer, temp_db_cursor):
+@pytest.fixture
+def default_phrases(table_factory):
+    table_factory('place_classtype_testclasstypetable_to_delete')
+    table_factory('place_classtype_testclasstypetable_to_keep')
+
+
+def test_fetch_existing_place_classtype_tables(sp_importer, table_factory):
     """
         Check for the fetch_existing_place_classtype_tables() method.
         It should return the table just created.
     """
-    temp_db_cursor.execute('CREATE TABLE place_classtype_testclasstypetable()')
+    table_factory('place_classtype_testclasstypetable')
 
-    special_phrases_importer._fetch_existing_place_classtype_tables()
-    contained_table = special_phrases_importer.table_phrases_to_delete.pop()
+    sp_importer._fetch_existing_place_classtype_tables()
+    contained_table = sp_importer.table_phrases_to_delete.pop()
     assert contained_table == 'place_classtype_testclasstypetable'
 
-def test_check_sanity_class(special_phrases_importer):
+def test_check_sanity_class(sp_importer):
     """
         Check for _check_sanity() method.
         If a wrong class or type is given, an UsageError should raise.
         If a good class and type are given, nothing special happens.
     """
-    
-    assert not special_phrases_importer._check_sanity('en', '', 'type')
-    assert not special_phrases_importer._check_sanity('en', 'class', '')
 
-    assert special_phrases_importer._check_sanity('en', 'class', 'type')
+    assert not sp_importer._check_sanity(SpecialPhrase('en', '', 'type', ''))
+    assert not sp_importer._check_sanity(SpecialPhrase('en', 'class', '', ''))
+
+    assert sp_importer._check_sanity(SpecialPhrase('en', 'class', 'type', ''))
 
-def test_load_white_and_black_lists(special_phrases_importer):
+def test_load_white_and_black_lists(sp_importer):
     """
         Test that _load_white_and_black_lists() well return
         black list and white list and that they are of dict type.
     """
-    black_list, white_list = special_phrases_importer._load_white_and_black_lists()
+    black_list, white_list = sp_importer._load_white_and_black_lists()
 
     assert isinstance(black_list, dict) and isinstance(white_list, dict)
 
-def test_convert_php_settings(special_phrases_importer):
+def test_convert_php_settings(sp_importer, testfile_dir, tmp_path):
     """
         Test that _convert_php_settings_if_needed() convert the given
         php file to a json file.
     """
-    php_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.php').resolve()
+    php_file = (testfile_dir / 'phrase_settings.php').resolve()
 
-    with tempfile.TemporaryDirectory() as temp_dir:
-        temp_settings = (Path(temp_dir) / 'phrase_settings.php').resolve()
-        copyfile(php_file, temp_settings)
-        special_phrases_importer._convert_php_settings_if_needed(temp_settings)
+    temp_settings = (tmp_path / 'phrase_settings.php').resolve()
+    copyfile(php_file, temp_settings)
+    sp_importer._convert_php_settings_if_needed(temp_settings)
 
-        assert (Path(temp_dir) / 'phrase_settings.json').is_file()
+    assert (tmp_path / 'phrase_settings.json').is_file()
 
-def test_convert_settings_wrong_file(special_phrases_importer):
+def test_convert_settings_wrong_file(sp_importer):
     """
         Test that _convert_php_settings_if_needed() raise an exception
         if the given file is not a valid file.
     """
     with pytest.raises(UsageError, match='random_file is not a valid file.'):
-        special_phrases_importer._convert_php_settings_if_needed('random_file')
+        sp_importer._convert_php_settings_if_needed('random_file')
 
-def test_convert_settings_json_already_exist(special_phrases_importer):
+def test_convert_settings_json_already_exist(sp_importer, testfile_dir):
     """
         Test that if we give to '_convert_php_settings_if_needed' a php file path
         and that a the corresponding json file already exists, it is returned.
     """
-    php_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.php').resolve()
-    json_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.json').resolve()
+    php_file = (testfile_dir / 'phrase_settings.php').resolve()
+    json_file = (testfile_dir / 'phrase_settings.json').resolve()
 
-    returned = special_phrases_importer._convert_php_settings_if_needed(php_file)
+    returned = sp_importer._convert_php_settings_if_needed(php_file)
 
     assert returned == json_file
 
-def test_convert_settings_giving_json(special_phrases_importer):
+def test_convert_settings_giving_json(sp_importer, testfile_dir):
     """
         Test that if we give to '_convert_php_settings_if_needed' a json file path
         the same path is directly returned
     """
-    json_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.json').resolve()
+    json_file = (testfile_dir / 'phrase_settings.json').resolve()
 
-    returned = special_phrases_importer._convert_php_settings_if_needed(json_file)
+    returned = sp_importer._convert_php_settings_if_needed(json_file)
 
     assert returned == json_file
 
-def test_create_place_classtype_indexes(temp_db_conn, special_phrases_importer):
+def test_create_place_classtype_indexes(temp_db_with_extensions, temp_db_conn,
+                                        table_factory, sp_importer):
     """
         Test that _create_place_classtype_indexes() create the
         place_id index and centroid index on the right place_class_type table.
@@ -97,44 +144,41 @@ def test_create_place_classtype_indexes(temp_db_conn, special_phrases_importer):
     phrase_type = 'type'
     table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
 
-    with temp_db_conn.cursor() as temp_db_cursor:
-        temp_db_cursor.execute("CREATE EXTENSION postgis;")
-        temp_db_cursor.execute('CREATE TABLE {}(place_id BIGINT, centroid GEOMETRY)'.format(table_name))
+    table_factory(table_name, 'place_id BIGINT, centroid GEOMETRY')
 
-    special_phrases_importer._create_place_classtype_indexes('', phrase_class, phrase_type)
+    sp_importer._create_place_classtype_indexes('', phrase_class, phrase_type)
 
     assert check_placeid_and_centroid_indexes(temp_db_conn, phrase_class, phrase_type)
 
-def test_create_place_classtype_table(temp_db_conn, placex_table, special_phrases_importer):
+def test_create_place_classtype_table(temp_db_conn, placex_table, sp_importer):
     """
         Test that _create_place_classtype_table() create
         the right place_classtype table.
     """
     phrase_class = 'class'
     phrase_type = 'type'
-    special_phrases_importer._create_place_classtype_table('', phrase_class, phrase_type)
+    sp_importer._create_place_classtype_table('', phrase_class, phrase_type)
 
     assert check_table_exist(temp_db_conn, phrase_class, phrase_type)
 
-def test_grant_access_to_web_user(temp_db_conn, def_config, special_phrases_importer):
+def test_grant_access_to_web_user(temp_db_conn, table_factory, def_config, sp_importer):
     """
-        Test that _grant_access_to_webuser() give 
+        Test that _grant_access_to_webuser() give
         right access to the web user.
     """
     phrase_class = 'class'
     phrase_type = 'type'
     table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
 
-    with temp_db_conn.cursor() as temp_db_cursor:
-        temp_db_cursor.execute('CREATE TABLE {}()'.format(table_name))
+    table_factory(table_name)
 
-    special_phrases_importer._grant_access_to_webuser(phrase_class, phrase_type)
+    sp_importer._grant_access_to_webuser(phrase_class, phrase_type)
 
     assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, phrase_class, phrase_type)
 
 def test_create_place_classtype_table_and_indexes(
         temp_db_conn, def_config, placex_table,
-        special_phrases_importer):
+        sp_importer):
     """
         Test that _create_place_classtype_table_and_indexes()
         create the right place_classtype tables and place_id indexes
@@ -143,29 +187,15 @@ def test_create_place_classtype_table_and_indexes(
     """
     pairs = set([('class1', 'type1'), ('class2', 'type2')])
 
-    special_phrases_importer._create_place_classtype_table_and_indexes(pairs)
+    sp_importer._create_place_classtype_table_and_indexes(pairs)
 
     for pair in pairs:
         assert check_table_exist(temp_db_conn, pair[0], pair[1])
         assert check_placeid_and_centroid_indexes(temp_db_conn, pair[0], pair[1])
         assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, pair[0], pair[1])
 
-def test_process_xml_content(temp_db_conn, def_config, special_phrases_importer):
-    """
-        Test that _process_xml_content() process the given xml content right
-        by executing the right SQL functions for amenities and 
-        by returning the right set of pairs.
-    """
-    class_test = 'aerialway'
-    type_test = 'zip_line'
-
-    #Converted output set to a dict for easy assert further.
-    results = dict(special_phrases_importer._process_xml_content(get_test_xml_wiki_content(), 'en'))
-
-    assert results[class_test] and type_test in results.values()
-
-def test_remove_non_existent_tables_from_db(special_phrases_importer, default_phrases,
-                                             temp_db_conn):
+def test_remove_non_existent_tables_from_db(sp_importer, default_phrases,
+                                            temp_db_conn):
     """
         Check for the remove_non_existent_phrases_from_db() method.
 
@@ -176,45 +206,46 @@ def test_remove_non_existent_tables_from_db(special_phrases_importer, default_ph
         place_classtype tables contained in table_phrases_to_delete should
         be deleted.
     """
-    with temp_db_conn.cursor() as temp_db_cursor:
-        special_phrases_importer.table_phrases_to_delete = {
-            'place_classtype_testclasstypetable_to_delete'
-        }
+    sp_importer.table_phrases_to_delete = {
+        'place_classtype_testclasstypetable_to_delete'
+    }
+
+    query_tables = """
+        SELECT table_name
+        FROM information_schema.tables
+        WHERE table_schema='public'
+        AND table_name like 'place_classtype_%';
+    """
 
-        query_tables = """
-            SELECT table_name
-            FROM information_schema.tables
-            WHERE table_schema='public'
-            AND table_name like 'place_classtype_%';
-        """
+    sp_importer._remove_non_existent_tables_from_db()
 
-        special_phrases_importer._remove_non_existent_tables_from_db()
+    # Changes are not committed yet. Use temp_db_conn for checking results.
+    with temp_db_conn.cursor(cursor_factory=CursorForTesting) as cur:
+        assert cur.row_set(query_tables) \
+                 == {('place_classtype_testclasstypetable_to_keep', )}
 
-        temp_db_cursor.execute(query_tables)
-        tables_result = temp_db_cursor.fetchall()
-        assert (len(tables_result) == 1 and
-            tables_result[0][0] == 'place_classtype_testclasstypetable_to_keep'
-        )
 
-def test_import_from_wiki(monkeypatch, temp_db_conn, def_config, special_phrases_importer,
-                          placex_table, tokenizer_mock):
+@pytest.mark.parametrize("should_replace", [(True), (False)])
+def test_import_phrases(monkeypatch, temp_db_conn, def_config, sp_importer,
+                        placex_table, table_factory, tokenizer_mock,
+                        xml_wiki_content, should_replace):
     """
-        Check that the main import_from_wiki() method is well executed.
+        Check that the main import_phrases() method is well executed.
         It should create the place_classtype table, the place_id and centroid indexes,
         grand access to the web user and executing the SQL functions for amenities.
-        It should also update the database well by deleting or preserving existing entries 
+        It should also update the database well by deleting or preserving existing entries
         of the database.
     """
     #Add some data to the database before execution in order to test
     #what is deleted and what is preserved.
-    with temp_db_conn.cursor() as temp_db_cursor:
-        temp_db_cursor.execute("""
-            CREATE TABLE place_classtype_amenity_animal_shelter();
-            CREATE TABLE place_classtype_wrongclass_wrongtype();""")
+    table_factory('place_classtype_amenity_animal_shelter')
+    table_factory('place_classtype_wrongclass_wrongtype')
+
+    monkeypatch.setattr('nominatim.tools.special_phrases.sp_wiki_loader.SPWikiLoader._get_wiki_content',
+                        lambda self, lang: xml_wiki_content)
 
-    monkeypatch.setattr('nominatim.tools.SpecialPhrasesImporter._get_wiki_content', mock_get_wiki_content)
     tokenizer = tokenizer_mock()
-    special_phrases_importer.import_from_wiki(tokenizer, ['en'])
+    sp_importer.import_phrases(tokenizer, should_replace)
 
     assert len(tokenizer.analyser_cache['special_phrases']) == 18
 
@@ -225,66 +256,20 @@ def test_import_from_wiki(monkeypatch, temp_db_conn, def_config, special_phrases
     assert check_placeid_and_centroid_indexes(temp_db_conn, class_test, type_test)
     assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, class_test, type_test)
     assert check_table_exist(temp_db_conn, 'amenity', 'animal_shelter')
-    assert not check_table_exist(temp_db_conn, 'wrong_class', 'wrong_type')
-
-    #Format (query, should_return_something_bool) use to easily execute all asserts
-    queries_tests = set()
-
-    #Used to check that correct place_classtype table already in the datase before is still there.
-    query_existing_table = """
-        SELECT table_name
-        FROM information_schema.tables
-        WHERE table_schema='public'
-        AND table_name = 'place_classtype_amenity_animal_shelter';
-    """
-    queries_tests.add((query_existing_table, True))
-
-    #Used to check that wrong place_classtype table was deleted from the database.
-    query_wrong_table = """
-        SELECT table_name
-        FROM information_schema.tables
-        WHERE table_schema='public'
-        AND table_name = 'place_classtype_wrongclass_wrongtype';
-    """
-    queries_tests.add((query_wrong_table, False))
-
-    with temp_db_conn.cursor() as temp_db_cursor:
-        for query in queries_tests:
-            temp_db_cursor.execute(query[0])
-            if (query[1] == True):
-                assert temp_db_cursor.fetchone()
-            else:
-                assert not temp_db_cursor.fetchone()
-
-def mock_get_wiki_content(lang):
-    """
-        Mock the _get_wiki_content() method to return
-        static xml test file content.
-    """
-    return get_test_xml_wiki_content()
+    if should_replace:
+        assert not check_table_exist(temp_db_conn, 'wrong_class', 'wrong_type')
 
-def get_test_xml_wiki_content():
-    """
-        return the content of the static xml test file.
-    """
-    xml_test_content_path = (TEST_BASE_DIR / 'testdata' / 'special_phrases_test_content.txt').resolve()
-    with open(xml_test_content_path) as xml_content_reader:
-        return xml_content_reader.read()
+    assert temp_db_conn.table_exists('place_classtype_amenity_animal_shelter')
+    if should_replace:
+        assert not temp_db_conn.table_exists('place_classtype_wrongclass_wrongtype')
 
 def check_table_exist(temp_db_conn, phrase_class, phrase_type):
     """
         Verify that the place_classtype table exists for the given
         phrase_class and phrase_type.
     """
-    table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
+    return temp_db_conn.table_exists('place_classtype_{}_{}'.format(phrase_class, phrase_type))
 
-    with temp_db_conn.cursor() as temp_db_cursor:
-        temp_db_cursor.execute("""
-            SELECT *
-            FROM information_schema.tables
-            WHERE table_type='BASE TABLE'
-            AND table_name='{}'""".format(table_name))
-        return temp_db_cursor.fetchone()
 
 def check_grant_access(temp_db_conn, user, phrase_class, phrase_type):
     """
@@ -313,31 +298,3 @@ def check_placeid_and_centroid_indexes(temp_db_conn, phrase_class, phrase_type):
         and
         temp_db_conn.index_exists(index_prefix + 'place_id')
     )
-
-@pytest.fixture
-def special_phrases_importer(temp_db_conn, def_config, temp_phplib_dir_with_migration):
-    """
-        Return an instance of SpecialPhrasesImporter.
-    """
-    return SpecialPhrasesImporter(def_config, temp_phplib_dir_with_migration, temp_db_conn)
-
-@pytest.fixture
-def temp_phplib_dir_with_migration():
-    """
-        Return temporary phpdir with migration subdirectory and
-        PhraseSettingsToJson.php script inside.
-    """
-    migration_file = (TEST_BASE_DIR / '..' / 'lib-php' / 'migration'
-                      / 'PhraseSettingsToJson.php').resolve()
-    with tempfile.TemporaryDirectory() as phpdir:
-        (Path(phpdir) / 'migration').mkdir()
-        migration_dest_path = (Path(phpdir) / 'migration' / 'PhraseSettingsToJson.php').resolve()
-        copyfile(migration_file, migration_dest_path)
-
-        yield Path(phpdir)
-
-@pytest.fixture
-def default_phrases(temp_db_cursor):
-    temp_db_cursor.execute("""
-        CREATE TABLE place_classtype_testclasstypetable_to_delete();
-        CREATE TABLE place_classtype_testclasstypetable_to_keep();""")
index adbc0e74b3608096a340fab369b69d749281743b..8f54ad41369af5df80a601d0479c9659d0329da2 100644 (file)
@@ -58,13 +58,13 @@ def postcode_table(temp_db_conn, placex_table, word_table):
     return MockPostcodeTable(temp_db_conn)
 
 
-def test_import_postcodes_empty(dsn, postcode_table, tmp_path, tokenizer):
+def test_postcodes_empty(dsn, postcode_table, tmp_path, tokenizer):
     postcodes.update_postcodes(dsn, tmp_path, tokenizer)
 
     assert not postcode_table.row_set
 
 
-def test_import_postcodes_add_new(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_add_new(dsn, placex_table, postcode_table, tmp_path, tokenizer):
     placex_table.add(country='xx', geom='POINT(10 12)',
                      address=dict(postcode='9486'))
     postcode_table.add('yy', '9486', 99, 34)
@@ -74,7 +74,8 @@ def test_import_postcodes_add_new(dsn, placex_table, postcode_table, tmp_path, t
     assert postcode_table.row_set == {('xx', '9486', 10, 12), }
 
 
-def test_import_postcodes_replace_coordinates(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_replace_coordinates(dsn, placex_table, postcode_table,
+                                       tmp_path, tokenizer):
     placex_table.add(country='xx', geom='POINT(10 12)',
                      address=dict(postcode='AB 4511'))
     postcode_table.add('xx', 'AB 4511', 99, 34)
@@ -84,7 +85,8 @@ def test_import_postcodes_replace_coordinates(dsn, placex_table, postcode_table,
     assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
 
 
-def test_import_postcodes_replace_coordinates_close(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_replace_coordinates_close(dsn, placex_table, postcode_table,
+                                             tmp_path, tokenizer):
     placex_table.add(country='xx', geom='POINT(10 12)',
                      address=dict(postcode='AB 4511'))
     postcode_table.add('xx', 'AB 4511', 10, 11.99999)
@@ -94,7 +96,7 @@ def test_import_postcodes_replace_coordinates_close(dsn, placex_table, postcode_
     assert postcode_table.row_set == {('xx', 'AB 4511', 10, 11.99999)}
 
 
-def test_import_postcodes_remove(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_remove(dsn, placex_table, postcode_table, tmp_path, tokenizer):
     placex_table.add(country='xx', geom='POINT(10 12)',
                      address=dict(postcode='AB 4511'))
     postcode_table.add('xx', 'badname', 10, 12)
@@ -104,7 +106,7 @@ def test_import_postcodes_remove(dsn, placex_table, postcode_table, tmp_path, to
     assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
 
 
-def test_import_postcodes_ignore_empty_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_ignore_empty_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
     placex_table.add(country=None, geom='POINT(10 12)',
                      address=dict(postcode='AB 4511'))
 
@@ -113,7 +115,7 @@ def test_import_postcodes_ignore_empty_country(dsn, placex_table, postcode_table
     assert not postcode_table.row_set
 
 
-def test_import_postcodes_remove_all(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_remove_all(dsn, postcode_table, tmp_path, tokenizer):
     postcode_table.add('ch', '5613', 10, 12)
 
     postcodes.update_postcodes(dsn, tmp_path, tokenizer)
@@ -121,7 +123,7 @@ def test_import_postcodes_remove_all(dsn, placex_table, postcode_table, tmp_path
     assert not postcode_table.row_set
 
 
-def test_import_postcodes_multi_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_multi_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
     placex_table.add(country='de', geom='POINT(10 12)',
                      address=dict(postcode='54451'))
     placex_table.add(country='cc', geom='POINT(100 56)',
@@ -140,8 +142,8 @@ def test_import_postcodes_multi_country(dsn, placex_table, postcode_table, tmp_p
 
 
 @pytest.mark.parametrize("gzipped", [True, False])
-def test_import_postcodes_extern(dsn, placex_table, postcode_table, tmp_path,
-                                 tokenizer, gzipped):
+def test_postcodes_extern(dsn, placex_table, postcode_table, tmp_path,
+                          tokenizer, gzipped):
     placex_table.add(country='xx', geom='POINT(10 12)',
                      address=dict(postcode='AB 4511'))
 
@@ -158,8 +160,8 @@ def test_import_postcodes_extern(dsn, placex_table, postcode_table, tmp_path,
                                       ('xx', 'CD 4511', -10, -5)}
 
 
-def test_import_postcodes_extern_bad_column(dsn, placex_table, postcode_table,
-                                            tmp_path, tokenizer):
+def test_postcodes_extern_bad_column(dsn, placex_table, postcode_table,
+                                     tmp_path, tokenizer):
     placex_table.add(country='xx', geom='POINT(10 12)',
                      address=dict(postcode='AB 4511'))
 
@@ -171,8 +173,8 @@ def test_import_postcodes_extern_bad_column(dsn, placex_table, postcode_table,
     assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
 
 
-def test_import_postcodes_extern_bad_number(dsn, placex_table, postcode_table,
-                                            tmp_path, tokenizer):
+def test_postcodes_extern_bad_number(dsn, placex_table, postcode_table,
+                                     tmp_path, tokenizer):
     placex_table.add(country='xx', geom='POINT(10 12)',
                      address=dict(postcode='AB 4511'))
 
index d6c46ad7013d4c3f4ff5ee7771be8e7383d77235..ac79a48fb6f8361adacbbf0e26da433942fa36d3 100644 (file)
@@ -7,20 +7,18 @@ import pytest
 
 from nominatim.tools import refresh
 
-TEST_DIR = (Path(__file__) / '..' / '..').resolve()
-
 def test_refresh_import_wikipedia_not_existing(dsn):
-    assert 1 == refresh.import_wikipedia_articles(dsn, Path('.'))
+    assert refresh.import_wikipedia_articles(dsn, Path('.')) == 1
 
 
 @pytest.mark.parametrize("replace", (True, False))
-def test_refresh_import_wikipedia(dsn, table_factory, temp_db_cursor, replace):
+def test_refresh_import_wikipedia(dsn, src_dir, table_factory, temp_db_cursor, replace):
     if replace:
         table_factory('wikipedia_article')
         table_factory('wikipedia_redirect')
 
     # use the small wikipedia file for the API testdb
-    assert 0 == refresh.import_wikipedia_articles(dsn, TEST_DIR / 'testdb')
+    assert refresh.import_wikipedia_articles(dsn, src_dir / 'test' / 'testdb') == 0
 
-    assert temp_db_cursor.scalar('SELECT count(*) FROM wikipedia_article') > 0
-    assert temp_db_cursor.scalar('SELECT count(*) FROM wikipedia_redirect') > 0
+    assert temp_db_cursor.table_rows('wikipedia_article') > 0
+    assert temp_db_cursor.table_rows('wikipedia_redirect') > 0
index 2bd917209ed517c3438d4417c7a29ebbb22dd9f1..2821222c5712183d962f8feb3cc72fffd558b055 100644 (file)
@@ -11,7 +11,7 @@ from nominatim.tools.refresh import load_address_levels, load_address_levels_fro
 def test_load_ranks_def_config(temp_db_conn, temp_db_cursor, def_config):
     load_address_levels_from_file(temp_db_conn, Path(def_config.ADDRESS_LEVEL_CONFIG))
 
-    assert temp_db_cursor.scalar('SELECT count(*) FROM address_levels') > 0
+    assert temp_db_cursor.table_rows('address_levels') > 0
 
 def test_load_ranks_from_file(temp_db_conn, temp_db_cursor, tmp_path):
     test_file = tmp_path / 'test_levels.json'
@@ -19,7 +19,7 @@ def test_load_ranks_from_file(temp_db_conn, temp_db_cursor, tmp_path):
 
     load_address_levels_from_file(temp_db_conn, test_file)
 
-    assert temp_db_cursor.scalar('SELECT count(*) FROM address_levels') > 0
+    assert temp_db_cursor.table_rows('address_levels') > 0
 
 
 def test_load_ranks_from_broken_file(temp_db_conn, tmp_path):
@@ -35,7 +35,7 @@ def test_load_ranks_country(temp_db_conn, temp_db_cursor):
                         [{"tags": {"place": {"village": 14}}},
                          {"countries": ['de'],
                           "tags": {"place": {"village": 15}}},
-                         {"countries": ['uk', 'us' ],
+                         {"countries": ['uk', 'us'],
                           "tags": {"place": {"village": 16}}}
                         ])
 
@@ -62,9 +62,8 @@ def test_load_ranks_default_value(temp_db_conn, temp_db_cursor):
 
 def test_load_ranks_multiple_keys(temp_db_conn, temp_db_cursor):
     load_address_levels(temp_db_conn, 'levels',
-                        [{"tags":
-                            {"place": {"city": 14},
-                             "boundary": {"administrative2" : 4}}
+                        [{"tags": {"place": {"city": 14},
+                                   "boundary": {"administrative2" : 4}}
                          }])
 
     assert temp_db_cursor.row_set('SELECT * FROM levels') == \
@@ -75,9 +74,8 @@ def test_load_ranks_multiple_keys(temp_db_conn, temp_db_cursor):
 
 def test_load_ranks_address(temp_db_conn, temp_db_cursor):
     load_address_levels(temp_db_conn, 'levels',
-                        [{"tags":
-                            {"place": {"city": 14,
-                                       "town" : [14, 13]}}
+                        [{"tags": {"place": {"city": 14,
+                                             "town" : [14, 13]}}
                          }])
 
     assert temp_db_cursor.row_set('SELECT * FROM levels') == \
index 3f9bccbdd4162f81165dcdddd64f88879b179ad7..00b863ab1e621289c3a38be1dbd4d65a97496e6c 100644 (file)
@@ -31,7 +31,8 @@ def test_create_functions(temp_db_cursor, conn, def_config, sql_tmp_path):
 
 
 @pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
-def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path, dbg, ret):
+def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path,
+                                        dbg, ret):
     sqlfile = sql_tmp_path / 'functions.sql'
     sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
                           AS $$
index 9b60c0e5ae1221d3de612925c64e8b5111186bba..8946bd1feae1790d3e29242e74ef28d07f3f616e 100644 (file)
@@ -1,7 +1,6 @@
 """
 Tests for setting up the website scripts.
 """
-from pathlib import Path
 import subprocess
 
 import pytest
@@ -9,67 +8,65 @@ import pytest
 from nominatim.tools import refresh
 
 @pytest.fixture
-def envdir(tmpdir):
-    (tmpdir / 'php').mkdir()
-    (tmpdir / 'php' / 'website').mkdir()
-    return tmpdir
+def test_script(tmp_path):
+    (tmp_path / 'php').mkdir()
 
+    website_dir = (tmp_path / 'php' / 'website')
+    website_dir.mkdir()
 
-@pytest.fixture
-def test_script(envdir):
     def _create_file(code):
-        outfile = envdir / 'php' / 'website' / 'reverse-only-search.php'
+        outfile = website_dir / 'reverse-only-search.php'
         outfile.write_text('<?php\n{}\n'.format(code), 'utf-8')
 
     return _create_file
 
 
-def run_website_script(envdir, config, conn):
-    config.lib_dir.php = envdir / 'php'
-    config.project_dir = envdir
-    refresh.setup_website(envdir, config, conn)
+@pytest.fixture
+def run_website_script(tmp_path, def_config, temp_db_conn):
+    def_config.lib_dir.php = tmp_path / 'php'
+    def_config.project_dir = tmp_path
+
+    def _runner():
+        refresh.setup_website(tmp_path, def_config, temp_db_conn)
+
+        proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
+                               tmp_path / 'search.php'], check=False)
 
-    proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
-                           envdir / 'search.php'], check=False)
+        return proc.returncode
 
-    return proc.returncode
+    return _runner
 
 
 @pytest.mark.parametrize("setting,retval", (('yes', 10), ('no', 20)))
-def test_setup_website_check_bool(def_config, monkeypatch, envdir, test_script,
-                                  setting, retval, temp_db_conn):
+def test_setup_website_check_bool(monkeypatch, test_script, run_website_script,
+                                  setting, retval):
     monkeypatch.setenv('NOMINATIM_CORS_NOACCESSCONTROL', setting)
 
     test_script('exit(CONST_NoAccessControl ? 10 : 20);')
 
-    assert run_website_script(envdir, def_config, temp_db_conn) == retval
+    assert run_website_script() == retval
 
 
 @pytest.mark.parametrize("setting", (0, 10, 99067))
-def test_setup_website_check_int(def_config, monkeypatch, envdir, test_script, setting,
-                                 temp_db_conn):
+def test_setup_website_check_int(monkeypatch, test_script, run_website_script, setting):
     monkeypatch.setenv('NOMINATIM_LOOKUP_MAX_COUNT', str(setting))
 
     test_script('exit(CONST_Places_Max_ID_count == {} ? 10 : 20);'.format(setting))
 
-    assert run_website_script(envdir, def_config, temp_db_conn) == 10
+    assert run_website_script() == 10
 
 
-def test_setup_website_check_empty_str(def_config, monkeypatch, envdir, test_script,
-                                       temp_db_conn):
+def test_setup_website_check_empty_str(monkeypatch, test_script, run_website_script):
     monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', '')
 
     test_script('exit(CONST_Default_Language === false ? 10 : 20);')
 
-    assert run_website_script(envdir, def_config, temp_db_conn) == 10
+    assert run_website_script() == 10
 
 
-def test_setup_website_check_str(def_config, monkeypatch, envdir, test_script,
-                                 temp_db_conn):
+def test_setup_website_check_str(monkeypatch, test_script, run_website_script):
     monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'ffde 2')
 
     test_script('exit(CONST_Default_Language === "ffde 2" ? 10 : 20);')
 
-    assert run_website_script(envdir, def_config, temp_db_conn) == 10
-
-
+    assert run_website_script() == 10
index affe13174a6256dbb8c960758c636b9bfd7d397e..2bf2901b33a8b8794d05c3e3e637fd0b85147113 100644 (file)
@@ -18,48 +18,51 @@ OSM_NODE_DATA = """\
 </osm>
 """
 
+@pytest.fixture(autouse=True)
+def setup_status_table(status_table):
+    pass
+
 ### init replication
 
-def test_init_replication_bad_base_url(monkeypatch, status_table, place_row, temp_db_conn, temp_db_cursor):
+def test_init_replication_bad_base_url(monkeypatch, place_row, temp_db_conn):
     place_row(osm_type='N', osm_id=100)
 
-    monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA)
+    monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
 
     with pytest.raises(UsageError, match="Failed to reach replication service"):
         nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io')
 
 
-def test_init_replication_success(monkeypatch, status_table, place_row, temp_db_conn, temp_db_cursor):
+def test_init_replication_success(monkeypatch, place_row, temp_db_conn, temp_db_cursor):
     place_row(osm_type='N', osm_id=100)
 
-    monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA)
+    monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
     monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
                         "timestamp_to_sequence",
                         lambda self, date: 234)
 
     nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io')
 
-    temp_db_cursor.execute("SELECT * FROM import_status")
-
     expected_date = dt.datetime.strptime('2006-01-27T19:09:10', status.ISODATE_FORMAT)\
                         .replace(tzinfo=dt.timezone.utc)
-    assert temp_db_cursor.rowcount == 1
-    assert temp_db_cursor.fetchone() == [expected_date, 234, True]
+
+    assert temp_db_cursor.row_set("SELECT * FROM import_status") \
+             == {(expected_date, 234, True)}
 
 
 ### checking for updates
 
-def test_check_for_updates_empty_status_table(status_table, temp_db_conn):
+def test_check_for_updates_empty_status_table(temp_db_conn):
     assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
 
 
-def test_check_for_updates_seq_not_set(status_table, temp_db_conn):
+def test_check_for_updates_seq_not_set(temp_db_conn):
     status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc))
 
     assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
 
 
-def test_check_for_updates_no_state(monkeypatch, status_table, temp_db_conn):
+def test_check_for_updates_no_state(monkeypatch, temp_db_conn):
     status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=345)
 
     monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
@@ -69,7 +72,7 @@ def test_check_for_updates_no_state(monkeypatch, status_table, temp_db_conn):
 
 
 @pytest.mark.parametrize("server_sequence,result", [(344, 2), (345, 2), (346, 0)])
-def test_check_for_updates_no_new_data(monkeypatch, status_table, temp_db_conn,
+def test_check_for_updates_no_new_data(monkeypatch, temp_db_conn,
                                        server_sequence, result):
     date = dt.datetime.now(dt.timezone.utc)
     status.set_status(temp_db_conn, date, seq=345)
@@ -86,24 +89,24 @@ def test_check_for_updates_no_new_data(monkeypatch, status_table, temp_db_conn,
 @pytest.fixture
 def update_options(tmpdir):
     return dict(base_url='https://test.io',
-                   indexed_only=False,
-                   update_interval=3600,
-                   import_file=tmpdir / 'foo.osm',
-                   max_diff_size=1)
+                indexed_only=False,
+                update_interval=3600,
+                import_file=tmpdir / 'foo.osm',
+                max_diff_size=1)
 
-def test_update_empty_status_table(status_table, temp_db_conn):
+def test_update_empty_status_table(temp_db_conn):
     with pytest.raises(UsageError):
         nominatim.tools.replication.update(temp_db_conn, {})
 
 
-def test_update_already_indexed(status_table, temp_db_conn):
+def test_update_already_indexed(temp_db_conn):
     status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=34, indexed=False)
 
     assert nominatim.tools.replication.update(temp_db_conn, dict(indexed_only=True)) \
              == nominatim.tools.replication.UpdateState.MORE_PENDING
 
 
-def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update_options):
+def test_update_no_data_no_sleep(monkeypatch, temp_db_conn, update_options):
     date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(days=1)
     status.set_status(temp_db_conn, date, seq=34)
 
@@ -112,7 +115,7 @@ def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update
                         lambda *args, **kwargs: None)
 
     sleeptime = []
-    monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s))
+    monkeypatch.setattr(time, 'sleep', sleeptime.append)
 
     assert nominatim.tools.replication.update(temp_db_conn, update_options) \
              == nominatim.tools.replication.UpdateState.NO_CHANGES
@@ -120,7 +123,7 @@ def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update
     assert not sleeptime
 
 
-def test_update_no_data_sleep(monkeypatch, status_table, temp_db_conn, update_options):
+def test_update_no_data_sleep(monkeypatch, temp_db_conn, update_options):
     date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=30)
     status.set_status(temp_db_conn, date, seq=34)
 
@@ -129,7 +132,7 @@ def test_update_no_data_sleep(monkeypatch, status_table, temp_db_conn, update_op
                         lambda *args, **kwargs: None)
 
     sleeptime = []
-    monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s))
+    monkeypatch.setattr(time, 'sleep', sleeptime.append)
 
     assert nominatim.tools.replication.update(temp_db_conn, update_options) \
              == nominatim.tools.replication.UpdateState.NO_CHANGES
diff --git a/test/python/test_tools_sp_csv_loader.py b/test/python/test_tools_sp_csv_loader.py
new file mode 100644 (file)
index 0000000..80d5989
--- /dev/null
@@ -0,0 +1,60 @@
+"""
+    Tests for methods of the SPCsvLoader class.
+"""
+import pytest
+
+from nominatim.errors import UsageError
+from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
+
+def test_parse_csv(sp_csv_loader):
+    """
+        Test method parse_csv()
+        Should return the right SpecialPhrase objects.
+    """
+    phrases = sp_csv_loader.parse_csv()
+    assert check_phrases_content(phrases)
+
+def test_next(sp_csv_loader):
+    """
+        Test objects returned from the next() method.
+        It should return all SpecialPhrases objects of
+        the sp_csv_test.csv special phrases.
+    """
+    phrases = next(sp_csv_loader)
+    assert check_phrases_content(phrases)
+
+def test_check_csv_validity(sp_csv_loader):
+    """
+        Test method check_csv_validity()
+        It should raise an exception when file with a
+        different exception than .csv is given.
+    """
+    sp_csv_loader.csv_path = 'test.csv'
+    sp_csv_loader.check_csv_validity()
+    sp_csv_loader.csv_path = 'test.wrong'
+    with pytest.raises(UsageError):
+        assert sp_csv_loader.check_csv_validity()
+
+def check_phrases_content(phrases):
+    """
+        Asserts that the given phrases list contains
+        the right phrases of the sp_csv_test.csv special phrases.
+    """
+    return  len(phrases) > 1 \
+            and any(p.p_label == 'Billboard'
+                    and p.p_class == 'advertising'
+                    and p.p_type == 'billboard'
+                    and p.p_operator == '-' for p in phrases) \
+            and any(p.p_label == 'Zip Lines'
+                    and p.p_class == 'aerialway'
+                    and p.p_type == 'zip_line'
+                    and p.p_operator == '-' for p in phrases)
+
+@pytest.fixture
+def sp_csv_loader(src_dir):
+    """
+        Return an instance of SPCsvLoader.
+    """
+    csv_path = (src_dir / 'test' / 'testdata' / 'sp_csv_test.csv').resolve()
+    loader = SPCsvLoader(csv_path)
+    return loader
diff --git a/test/python/test_tools_sp_wiki_loader.py b/test/python/test_tools_sp_wiki_loader.py
new file mode 100644 (file)
index 0000000..35b413d
--- /dev/null
@@ -0,0 +1,54 @@
+"""
+    Tests for methods of the SPWikiLoader class.
+"""
+import pytest
+from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
+
+@pytest.fixture
+def xml_wiki_content(src_dir):
+    """
+        return the content of the static xml test file.
+    """
+    xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt'
+    return xml_test_content.read_text()
+
+
+@pytest.fixture
+def sp_wiki_loader(monkeypatch, def_config, xml_wiki_content):
+    """
+        Return an instance of SPWikiLoader.
+    """
+    loader = SPWikiLoader(def_config, ['en'])
+    monkeypatch.setattr('nominatim.tools.special_phrases.sp_wiki_loader.SPWikiLoader._get_wiki_content',
+                        lambda self, lang: xml_wiki_content)
+    return loader
+
+
+def test_parse_xml(sp_wiki_loader, xml_wiki_content):
+    """
+        Test method parse_xml()
+        Should return the right SpecialPhrase objects.
+    """
+    phrases = sp_wiki_loader.parse_xml(xml_wiki_content)
+    assert check_phrases_content(phrases)
+
+
+def test_next(sp_wiki_loader):
+    """
+        Test objects returned from the next() method.
+        It should return all SpecialPhrases objects of
+        the 'en' special phrases.
+    """
+    phrases = next(sp_wiki_loader)
+    assert check_phrases_content(phrases)
+
+def check_phrases_content(phrases):
+    """
+        Asserts that the given phrases list contains
+        the right phrases of the 'en' special phrases.
+    """
+    return  len(phrases) > 1 \
+            and any(p.p_label == 'Embassies' and p.p_class == 'amenity' and p.p_type == 'embassy'
+                    and p.p_operator == '-' for p in phrases) \
+            and any(p.p_label == 'Zip Line' and p.p_class == 'aerialway' and p.p_type == 'zip_line'
+                    and p.p_operator == '-' for p in phrases)
index 17dd676c3f5c631af948b3e5934947aba32750c8..2e5240871e099da66ac1678f94b75d391d3af994 100644 (file)
@@ -1,13 +1,12 @@
 """
 Test for tiger data function
 """
-from pathlib import Path
+import tarfile
 from textwrap import dedent
 
 import pytest
-import tarfile
 
-from nominatim.tools import tiger_data, database_import
+from nominatim.tools import tiger_data
 from nominatim.errors import UsageError
 
 class MockTigerTable:
@@ -42,7 +41,8 @@ def tiger_table(def_config, temp_db_conn, sql_preprocessor,
                                                         stop INTEGER, interpol TEXT,
                                                         token_info JSONB, postcode TEXT)
            RETURNS INTEGER AS $$
-            INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode) RETURNING 1
+            INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode)
+            RETURNING 1
            $$ LANGUAGE SQL;""")
     (def_config.lib_dir.sql / 'tiger_import_finish.sql').write_text(
         """DROP FUNCTION tiger_line_import (linegeo GEOMETRY, in_startnumber INTEGER,
@@ -110,7 +110,7 @@ def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock,
     tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv'))
     tar.close()
 
-    tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
+    tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
                               tokenizer_mock())
 
     assert tiger_table.count() == 6213
@@ -126,7 +126,7 @@ def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock,
 
 
 def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
-                                      tmp_path, src_dir):
+                                      tmp_path):
     tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
     tar.add(__file__)
     tar.close()
@@ -135,4 +135,3 @@ def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
                               tokenizer_mock())
 
     assert tiger_table.count() == 0
-
diff --git a/test/testdata/sp_csv_test.csv b/test/testdata/sp_csv_test.csv
new file mode 100644 (file)
index 0000000..3dab967
--- /dev/null
@@ -0,0 +1,42 @@
+phrase,class,type,operator,plural 
+Zip Lines,aerialway,zip_line,-,Y 
+Zip Line in,aerialway,zip_line,in,N 
+Zip Lines in,aerialway,zip_line,in,Y 
+Zip Line near,aerialway,zip_line,near,N 
+Zip Lines near,aerialway,zip_line,near,Y 
+Zip Wire,aerialway,zip_line,-,N 
+Zip Wires,aerialway,zip_line,-,Y 
+Zip Wire in,aerialway,zip_line,in,N 
+Zip Wires in,aerialway,zip_line,in,Y 
+Zip Wire near,aerialway,zip_line,near,N 
+Zip Wires near,aerialway,zip_line,near,Y 
+Zipline,aerialway,zip_line,-,N 
+Ziplines,aerialway,zip_line,-,Y 
+Zipline in,aerialway,zip_line,in,N 
+Ziplines in,aerialway,zip_line,in,Y 
+Zipline near,aerialway,zip_line,near,N 
+Ziplines near,aerialway,zip_line,near,Y 
+Zipwire,aerialway,zip_line,-,N 
+Zipwires,aerialway,zip_line,-,Y 
+Zipwire in,aerialway,zip_line,in,N 
+Zipwires in,aerialway,zip_line,in,Y 
+Zipwire near,aerialway,zip_line,near,N 
+Zipwires near,aerialway,zip_line,near,Y 
+Aerodrome,aeroway,aerodrome,-,N 
+Aerodromes,aeroway,aerodrome,-,Y 
+Aerodrome in,aeroway,aerodrome,in,N 
+Aerodromes in,aeroway,aerodrome,in,Y 
+Aerodrome near,aeroway,aerodrome,near,N 
+Aerodromes near,aeroway,aerodrome,near,Y 
+Airport,aeroway,aerodrome,-,N 
+Airports,aeroway,aerodrome,-,Y 
+Airport in,aeroway,aerodrome,in,N 
+Airports in,aeroway,aerodrome,in,Y 
+Airport near,aeroway,aerodrome,near,N 
+Airports near,aeroway,aerodrome,near,Y 
+Billboard,advertising,billboard,-,N 
+Billboards,advertising,billboard,-,Y 
+Billboard in,advertising,billboard,in,N 
+Billboards in,advertising,billboard,in,Y 
+Billboard near,advertising,billboard,near,N 
+Billboards near,advertising,billboard,near,Y 
diff --git a/test/testdb/full_en_phrases_test.csv b/test/testdb/full_en_phrases_test.csv
new file mode 100644 (file)
index 0000000..1ad397b
--- /dev/null
@@ -0,0 +1,2914 @@
+phrase,class,type,operator,plural
+Zip Line,aerialway,zip_line,-,N 
+Zip Lines,aerialway,zip_line,-,Y 
+Zip Line in,aerialway,zip_line,in,N 
+Zip Lines in,aerialway,zip_line,in,Y 
+Zip Line near,aerialway,zip_line,near,N 
+Zip Lines near,aerialway,zip_line,near,Y 
+Zip Wire,aerialway,zip_line,-,N 
+Zip Wires,aerialway,zip_line,-,Y 
+Zip Wire in,aerialway,zip_line,in,N 
+Zip Wires in,aerialway,zip_line,in,Y 
+Zip Wire near,aerialway,zip_line,near,N 
+Zip Wires near,aerialway,zip_line,near,Y 
+Zipline,aerialway,zip_line,-,N 
+Ziplines,aerialway,zip_line,-,Y 
+Zipline in,aerialway,zip_line,in,N 
+Ziplines in,aerialway,zip_line,in,Y 
+Zipline near,aerialway,zip_line,near,N 
+Ziplines near,aerialway,zip_line,near,Y 
+Zipwire,aerialway,zip_line,-,N 
+Zipwires,aerialway,zip_line,-,Y 
+Zipwire in,aerialway,zip_line,in,N 
+Zipwires in,aerialway,zip_line,in,Y 
+Zipwire near,aerialway,zip_line,near,N 
+Zipwires near,aerialway,zip_line,near,Y 
+Aerodrome,aeroway,aerodrome,-,N 
+Aerodromes,aeroway,aerodrome,-,Y 
+Aerodrome in,aeroway,aerodrome,in,N 
+Aerodromes in,aeroway,aerodrome,in,Y 
+Aerodrome near,aeroway,aerodrome,near,N 
+Aerodromes near,aeroway,aerodrome,near,Y 
+Airport,aeroway,aerodrome,-,N 
+Airports,aeroway,aerodrome,-,Y 
+Airport in,aeroway,aerodrome,in,N 
+Airports in,aeroway,aerodrome,in,Y 
+Airport near,aeroway,aerodrome,near,N 
+Airports near,aeroway,aerodrome,near,Y 
+Billboard,advertising,billboard,-,N 
+Billboards,advertising,billboard,-,Y 
+Billboard in,advertising,billboard,in,N 
+Billboards in,advertising,billboard,in,Y 
+Billboard near,advertising,billboard,near,N 
+Billboards near,advertising,billboard,near,Y 
+Hoarding,advertising,billboard,-,N 
+Hoardings,advertising,billboard,-,Y 
+Hoarding in,advertising,billboard,in,N 
+Hoardings in,advertising,billboard,in,Y 
+Hoarding near,advertising,billboard,near,N 
+Hoardings near,advertising,billboard,near,Y 
+Advertising column,advertising,column,-,N 
+Advertising columns,advertising,column,-,Y 
+Advertising column in,advertising,column,in,N 
+Advertising columns in,advertising,column,in,Y 
+Advertising column near,advertising,column,near,N 
+Advertising columns near,advertising,column,near,Y 
+Litfass column,advertising,column,-,N 
+Litfass columns,advertising,column,-,Y 
+Litfass column in,advertising,column,in,N 
+Litfass columns in,advertising,column,in,Y 
+Litfass column near,advertising,column,near,N 
+Litfass columns near,advertising,column,near,Y 
+Morris column,advertising,column,-,N 
+Morris columns,advertising,column,-,Y 
+Morris column in,advertising,column,in,N 
+Morris columns in,advertising,column,in,Y 
+Morris column near,advertising,column,near,N 
+Morris columns near,advertising,column,near,Y 
+Animal boarding facility,amenity,animal_boarding,-,N 
+Animal boarding facilities,amenity,animal_boarding,-,Y 
+Animal boarding facility in,amenity,animal_boarding,in,N 
+Animal boarding facilities in,amenity,animal_boarding,in,Y 
+Animal boarding facility near,amenity,animal_boarding,near,N 
+Animal boarding facilities near,amenity,animal_boarding,near,Y 
+Animal shelter,amenity,animal_shelter,-,N 
+Animal shelters,amenity,animal_shelter,-,Y 
+Animal shelter in,amenity,animal_shelter,in,N 
+Animal shelters in,amenity,animal_shelter,in,Y 
+Animal shelter near,amenity,animal_shelter,near,N 
+Animal shelters near,amenity,animal_shelter,near,Y 
+Arts Centre,amenity,arts_centre,-,N 
+Arts Centres,amenity,arts_centre,-,Y 
+Arts Centre in,amenity,arts_centre,in,N 
+Arts Centres in,amenity,arts_centre,in,Y 
+Arts Centre near,amenity,arts_centre,near,N 
+Arts Centres near,amenity,arts_centre,near,Y 
+Arts Center,amenity,arts_centre,-,N 
+Arts Centers,amenity,arts_centre,-,Y 
+Arts Center in,amenity,arts_centre,in,N 
+Arts Centers in,amenity,arts_centre,in,Y 
+Arts Center near,amenity,arts_centre,near,N 
+Arts Centers near,amenity,arts_centre,near,Y 
+ATM,amenity,atm,-,N 
+ATMs,amenity,atm,-,Y 
+ATM in,amenity,atm,in,N 
+ATMs in,amenity,atm,in,Y 
+ATM near,amenity,atm,near,N 
+ATMs near,amenity,atm,near,Y 
+cash,amenity,atm,-,N 
+cash,amenity,atm,-,Y 
+cash in,amenity,atm,in,N 
+cash in,amenity,atm,in,Y 
+cash near,amenity,atm,near,N 
+cash near,amenity,atm,near,Y 
+cash machine,amenity,atm,-,N 
+cash machines,amenity,atm,-,Y 
+cash machine in,amenity,atm,in,N 
+cash machines in,amenity,atm,in,Y 
+cash machine near,amenity,atm,near,N 
+cash machines near,amenity,atm,near,Y 
+Bank,amenity,bank,-,N 
+Banks,amenity,bank,-,Y 
+Bank in,amenity,bank,in,N 
+Banks in,amenity,bank,in,Y 
+Bank near,amenity,bank,near,N 
+Banks near,amenity,bank,near,Y 
+Bar,amenity,bar,-,N 
+Bars,amenity,bar,-,Y 
+Bar in,amenity,bar,in,N 
+Bars in,amenity,bar,in,Y 
+Bar near,amenity,bar,near,N 
+Bars near,amenity,bar,near,Y 
+Bar,amenity,pub,-,N 
+Bars,amenity,pub,-,Y 
+Bar in,amenity,pub,in,N 
+Bars in,amenity,pub,in,Y 
+Bar near,amenity,pub,near,N 
+Bars near,amenity,pub,near,Y 
+Barbecue,amenity,bbq,-,N 
+Barbecues,amenity,bbq,-,Y 
+Barbecue in,amenity,bbq,in,N 
+Barbecues in,amenity,bbq,in,Y 
+Barbecue near,amenity,bbq,near,N 
+Barbecues near,amenity,bbq,near,Y 
+Barbecue grill,amenity,bbq,-,N 
+Barbecue grills,amenity,bbq,-,Y 
+Barbecue grill in,amenity,bbq,in,N 
+Barbecue grills in,amenity,bbq,in,Y 
+Barbecue grill near,amenity,bbq,near,N 
+Barbecue grills near,amenity,bbq,near,Y 
+Bbq,amenity,bbq,-,N 
+Bbqs,amenity,bbq,-,Y 
+Bbq in,amenity,bbq,in,N 
+Bbqs in,amenity,bbq,in,Y 
+Bbq near,amenity,bbq,near,N 
+Bbqs near,amenity,bbq,near,Y 
+Bench,amenity,bench,-,N 
+Benches,amenity,bench,-,Y 
+Bench in,amenity,bench,in,N 
+Benches in,amenity,bench,in,Y 
+Bench near,amenity,bench,near,N 
+Benches near,amenity,bench,near,Y 
+Cycle Parking,amenity,bicycle_parking,-,N 
+Cycle Parkings,amenity,bicycle_parking,-,Y 
+Cycle Parking in,amenity,bicycle_parking,in,N 
+Cycle Parkings in,amenity,bicycle_parking,in,Y 
+Cycle Parking near,amenity,bicycle_parking,near,N 
+Cycle Parkings near,amenity,bicycle_parking,near,Y 
+Cycle Rental,amenity,bicycle_rental,-,N 
+Cycle Rentals,amenity,bicycle_rental,-,Y 
+Cycle Rental in,amenity,bicycle_rental,in,N 
+Cycle Rentals in,amenity,bicycle_rental,in,Y 
+Cycle Rental near,amenity,bicycle_rental,near,N 
+Cycle Rentals near,amenity,bicycle_rental,near,Y 
+Bicycle Parking,amenity,bicycle_parking,-,N 
+Bicycle Parkings,amenity,bicycle_parking,-,Y 
+Bicycle Parking in,amenity,bicycle_parking,in,N 
+Bicycle Parkings in,amenity,bicycle_parking,in,Y 
+Bicycle Parking near,amenity,bicycle_parking,near,N 
+Bicycle Parkings near,amenity,bicycle_parking,near,Y 
+Bicycle Rental,amenity,bicycle_rental,-,N 
+Bicycle Rentals,amenity,bicycle_rental,-,Y 
+Bicycle Rental in,amenity,bicycle_rental,in,N 
+Bicycle Rentals in,amenity,bicycle_rental,in,Y 
+Bicycle Rental near,amenity,bicycle_rental,near,N 
+Bicycle Rentals near,amenity,bicycle_rental,near,Y 
+Beer garden,amenity,biergarten,-,N 
+Beer gardens,amenity,biergarten,-,Y 
+Beer garden in,amenity,biergarten,in,N 
+Beer gardens in,amenity,biergarten,in,Y 
+Beer garden near,amenity,biergarten,near,N 
+Beer gardens near,amenity,biergarten,near,Y 
+Brothel,amenity,brothel,-,N 
+Brothels,amenity,brothel,-,Y 
+Brothel in,amenity,brothel,in,N 
+Brothels in,amenity,brothel,in,Y 
+Brothel near,amenity,brothel,near,N 
+Brothels near,amenity,brothel,near,Y 
+Bureau de Change,amenity,bureau_de_change,-,N 
+Bureau de Changes,amenity,bureau_de_change,-,Y 
+Bureaus de Change,amenity,bureau_de_change,-,Y 
+Bureau de Change in,amenity,bureau_de_change,in,N 
+Bureau de Changes in,amenity,bureau_de_change,in,Y 
+Bureaus de Change in,amenity,bureau_de_change,in,Y 
+Bureau de Change near,amenity,bureau_de_change,near,N 
+Bureau de Changes near,amenity,bureau_de_change,near,Y 
+Bureaus de Change near,amenity,bureau_de_change,near,Y 
+Bus Station,amenity,bus_station,-,N 
+Bus Stations,amenity,bus_station,-,Y 
+Bus Station in,amenity,bus_station,in,N 
+Bus Stations in,amenity,bus_station,in,Y 
+Bus Station near,amenity,bus_station,near,N 
+Bus Stations near,amenity,bus_station,near,Y 
+Cafe,amenity,cafe,-,N 
+Cafes,amenity,cafe,-,Y 
+Cafe in,amenity,cafe,in,N 
+Cafes in,amenity,cafe,in,Y 
+Cafe near,amenity,cafe,near,N 
+Cafes near,amenity,cafe,near,Y 
+Car Rental,amenity,car_rental,-,N 
+Car Rentals,amenity,car_rental,-,Y 
+Car Rental in,amenity,car_rental,in,N 
+Car Rentals in,amenity,car_rental,in,Y 
+Car Rental near,amenity,car_rental,near,N 
+Car Rentals near,amenity,car_rental,near,Y 
+Car Share,amenity,car_sharing,-,N 
+Car Sharing,amenity,car_sharing,-,N 
+Car Sharings,amenity,car_sharing,-,Y 
+Car Share in,amenity,car_sharing,in,N 
+Car Sharing in,amenity,car_sharing,in,N 
+Car Sharings in,amenity,car_sharing,in,Y 
+Car Share near,amenity,car_sharing,near,N 
+Car Sharing near,amenity,car_sharing,near,N 
+Car Sharings near,amenity,car_sharing,near,Y 
+Car Wash,amenity,car_wash,-,N 
+Car Washes,amenity,car_wash,-,Y 
+Car Wash in,amenity,car_wash,in,N 
+Car Washes in,amenity,car_wash,in,Y 
+Car Wash near,amenity,car_wash,near,N 
+Car Washes near,amenity,car_wash,near,Y 
+Casino,amenity,casino,-,N 
+Casinos,amenity,casino,-,Y 
+Casino in,amenity,casino,in,N 
+Casinos in,amenity,casino,in,Y 
+Casino near,amenity,casino,near,N 
+Casinos near,amenity,casino,near,Y 
+Charging station,amenity,charging_station,-,N 
+Charging stations,amenity,charging_station,-,Y 
+Charging station in,amenity,charging_station,in,N 
+Charging stations in,amenity,charging_station,in,Y 
+Charging station near,amenity,charging_station,near,N 
+Charging stations near,amenity,charging_station,near,Y 
+Cinema,amenity,cinema,-,N 
+Cinemas,amenity,cinema,-,Y 
+Cinema in,amenity,cinema,in,N 
+Cinemas in,amenity,cinema,in,Y 
+Cinema near,amenity,cinema,near,N 
+Cinemas near,amenity,cinema,near,Y 
+Clinic,amenity,clinic,-,N 
+Clinics,amenity,clinic,-,Y 
+Clinic in,amenity,clinic,in,N 
+Clinics in,amenity,clinic,in,Y 
+Clinic near,amenity,clinic,near,N 
+Clinics near,amenity,clinic,near,Y 
+College,amenity,college,-,N 
+Colleges,amenity,college,-,Y 
+College in,amenity,college,in,N 
+Colleges in,amenity,college,in,Y 
+College near,amenity,college,near,N 
+Colleges near,amenity,college,near,Y 
+Conference Centre,amenity,conference_centre,-,N 
+Conference Centres,amenity,conference_centre,-,Y 
+Conference Centre in,amenity,conference_centre,in,N 
+Conference Centres in,amenity,conference_centre,in,Y 
+Conference Centre near,amenity,conference_centre,near,N 
+Conference Centres near,amenity,conference_centre,near,Y 
+Conference Center,amenity,conference_centre,-,N 
+Conference Centers,amenity,conference_centre,-,Y 
+Conference Center in,amenity,conference_centre,in,N 
+Conference Centers in,amenity,conference_centre,in,Y 
+Conference Center near,amenity,conference_centre,near,N 
+Conference Centers near,amenity,conference_centre,near,Y 
+Community Centre,amenity,community_centre,-,N 
+Community Centres,amenity,community_centre,-,Y 
+Community Centre in,amenity,community_centre,in,N 
+Community Centres in,amenity,community_centre,in,Y 
+Community Centre near,amenity,community_centre,near,N 
+Community Centres near,amenity,community_centre,near,Y 
+Community Center,amenity,community_centre,-,N 
+Community Centers,amenity,community_centre,-,Y 
+Community Center in,amenity,community_centre,in,N 
+Community Centers in,amenity,community_centre,in,Y 
+Community Center near,amenity,community_centre,near,N 
+Community Centers near,amenity,community_centre,near,Y 
+Courthouse,amenity,courthouse,-,N 
+Courthouses,amenity,courthouse,-,Y 
+Courthouse in,amenity,courthouse,in,N 
+Courthouses in,amenity,courthouse,in,Y 
+Courthouse near,amenity,courthouse,near,N 
+Courthouses near,amenity,courthouse,near,Y 
+Crematorium,amenity,crematorium,-,N 
+Crematoriums,amenity,crematorium,-,Y 
+Crematorium in,amenity,crematorium,in,N 
+Crematoriums in,amenity,crematorium,in,Y 
+Crematorium near,amenity,crematorium,near,N 
+Crematoriums near,amenity,crematorium,near,Y 
+Dentist,amenity,dentist,-,N 
+Dentists,amenity,dentist,-,Y 
+Dentist in,amenity,dentist,in,N 
+Dentists in,amenity,dentist,in,Y 
+Dentist near,amenity,dentist,near,N 
+Dentists near,amenity,dentist,near,Y 
+Doctor,amenity,doctors,-,N 
+Doctors,amenity,doctors,-,Y 
+Doctor in,amenity,doctors,in,N 
+Doctors in,amenity,doctors,in,Y 
+Doctor near,amenity,doctors,near,N 
+Doctors near,amenity,doctors,near,Y 
+Martial Arts,amenity,dojo,-,N 
+Martial Arts,amenity,dojo,-,Y 
+Martial Arts in,amenity,dojo,in,N 
+Martial Arts in,amenity,dojo,in,Y 
+Martial Arts near,amenity,dojo,near,N 
+Martial Arts near,amenity,dojo,near,Y 
+Dojo,amenity,dojo,-,N 
+Dojos,amenity,dojo,-,Y 
+Dojo in,amenity,dojo,in,N 
+Dojos in,amenity,dojo,in,Y 
+Dojo near,amenity,dojo,near,N 
+Dojos near,amenity,dojo,near,Y 
+Dojang,amenity,dojo,-,N 
+Dojang,amenity,dojo,-,Y 
+Dojang in,amenity,dojo,in,N 
+Dojang in,amenity,dojo,in,Y 
+Dojang near,amenity,dojo,near,N 
+Dojang near,amenity,dojo,near,Y 
+Drinking Water,amenity,drinking_water,-,N 
+Drinking Water in,amenity,drinking_water,in,N 
+Drinking Water near,amenity,drinking_water,near,N 
+Water,amenity,drinking_water,-,N 
+Water in,amenity,drinking_water,in,N 
+Water near,amenity,drinking_water,near,N 
+Driving School,amenity,driving_school,-,N 
+Driving Schools,amenity,driving_school,-,Y 
+Driving School in,amenity,driving_school,in,N 
+Driving Schools in,amenity,driving_school,in,Y 
+Driving School near,amenity,driving_school,near,N 
+Driving Schools near,amenity,driving_school,near,Y 
+Embassy,amenity,embassy,-,N 
+Embassys,amenity,embassy,-,Y 
+Embassies,amenity,embassy,-,Y 
+Embassy in,amenity,embassy,in,N 
+Embassys in,amenity,embassy,in,Y 
+Embassies in,amenity,embassy,in,Y 
+Embassy near,amenity,embassy,near,N 
+Embassys near,amenity,embassy,near,Y 
+Embassies near,amenity,embassy,near,Y 
+Fast Food,amenity,fast_food,-,N 
+Fast Food in,amenity,fast_food,in,N 
+Fast Food near,amenity,fast_food,near,N 
+Food,amenity,restaurant,-,N 
+Food,amenity,fast_food,-,N 
+Food,amenity,restaurant,-,Y 
+Food,amenity,fast_food,-,Y 
+Food in,amenity,restaurant,in,N 
+Food in,amenity,fast_food,in,N 
+Food in,amenity,restaurant,in,Y 
+Food in,amenity,fast_food,in,Y 
+Food near,amenity,restaurant,near,N 
+Food near,amenity,fast_food,near,N 
+Food near,amenity,restaurant,near,Y 
+Food near,amenity,fast_food,near,Y 
+Ferry Terminal,amenity,ferry_terminal,-,N 
+Ferry Terminals,amenity,ferry_terminal,-,Y 
+Ferry Terminal in,amenity,ferry_terminal,in,N 
+Ferry Terminals in,amenity,ferry_terminal,in,Y 
+Ferry Terminal near,amenity,ferry_terminal,near,N 
+Ferry Terminals near,amenity,ferry_terminal,near,Y 
+Fire Station,amenity,fire_station,-,N 
+Fire Stations,amenity,fire_station,-,Y 
+Fire Station in,amenity,fire_station,in,N 
+Fire Stations in,amenity,fire_station,in,Y 
+Fire Station near,amenity,fire_station,near,N 
+Fire Stations near,amenity,fire_station,near,Y 
+Fountain,amenity,fountain,-,N 
+Fountains,amenity,fountain,-,Y 
+Fountain in,amenity,fountain,in,N 
+Fountains in,amenity,fountain,in,Y 
+Fountain near,amenity,fountain,near,N 
+Fountains near,amenity,fountain,near,Y 
+Fuel,amenity,fuel,-,N 
+Fuels,amenity,fuel,-,Y 
+Fuel in,amenity,fuel,in,N 
+Fuels in,amenity,fuel,in,Y 
+Fuel near,amenity,fuel,near,N 
+Fuels near,amenity,fuel,near,Y 
+Fuel Station,amenity,fuel,-,N 
+Fuel Stations,amenity,fuel,-,Y 
+Fuel Station in,amenity,fuel,in,N 
+Fuel Stations in,amenity,fuel,in,Y 
+Fuel Station near,amenity,fuel,near,N 
+Fuel Stations near,amenity,fuel,near,Y 
+Gas,amenity,fuel,-,N 
+Gas,amenity,fuel,-,Y 
+Gas in,amenity,fuel,in,N 
+Gas in,amenity,fuel,in,Y 
+Gas near,amenity,fuel,near,N 
+Gas near,amenity,fuel,near,Y 
+Gas Station,amenity,fuel,-,N 
+Gas Stations,amenity,fuel,-,Y 
+Gas Station in,amenity,fuel,in,N 
+Gas Stations in,amenity,fuel,in,Y 
+Gas Station near,amenity,fuel,near,N 
+Gas Stations near,amenity,fuel,near,Y 
+Petrol,amenity,fuel,-,N 
+Petrol in,amenity,fuel,in,N 
+Petrol near,amenity,fuel,near,N 
+Petrol Stations near,amenity,fuel,near,Y 
+Petrol Station,amenity,fuel,-,N 
+Petrol Stations,amenity,fuel,-,Y 
+Petrol Station in,amenity,fuel,in,N 
+Petrol Stations in,amenity,fuel,in,Y 
+Petrol Station near,amenity,fuel,near,N 
+Petrol Stations near,amenity,fuel,near,Y 
+Grave Yard,amenity,grave_yard,-,N 
+Grave Yards,amenity,grave_yard,-,Y 
+Grave Yard in,amenity,grave_yard,in,N 
+Grave Yards in,amenity,grave_yard,in,Y 
+Grave Yard near,amenity,grave_yard,near,N 
+Grave Yards near,amenity,grave_yard,near,Y 
+Grit bin,amenity,grit_bin,-,N 
+Grit bins,amenity,grit_bin,-,Y 
+Grit bin in,amenity,grit_bin,in,N 
+Grit bins in,amenity,grit_bin,in,Y 
+Grit bin near,amenity,grit_bin,near,N 
+Grit bins near,amenity,grit_bin,near,Y 
+Fitness Centre,amenity,gym,-,N 
+Fitness Centres,amenity,gym,-,Y 
+Fitness Centre in,amenity,gym,in,N 
+Fitness Centres in,amenity,gym,in,Y 
+Fitness Centre near,amenity,gym,near,N 
+Fitness Centres near,amenity,gym,near,Y 
+Fitness Center,amenity,gym,-,N 
+Fitness Centers,amenity,gym,-,Y 
+Fitness Center in,amenity,gym,in,N 
+Fitness Centers in,amenity,gym,in,Y 
+Fitness Center near,amenity,gym,near,N 
+Fitness Centers near,amenity,gym,near,Y 
+Gym,amenity,gym,-,N 
+Gyms,amenity,gym,-,Y 
+Gym in,amenity,gym,in,N 
+Gyms in,amenity,gym,in,Y 
+Gym near,amenity,gym,near,N 
+Gyms near,amenity,gym,near,Y 
+Hospital,amenity,hospital,-,N 
+Hospitals,amenity,hospital,-,Y 
+Hospital in,amenity,hospital,in,N 
+Hospitals in,amenity,hospital,in,Y 
+Hospital near,amenity,hospital,near,N 
+Hospitals near,amenity,hospital,near,Y 
+Hunting Stand,amenity,hunting_stand,-,N 
+Hunting Stands,amenity,hunting_stand,-,Y 
+Hunting Stand in,amenity,hunting_stand,in,N 
+Hunting Stands in,amenity,hunting_stand,in,Y 
+Hunting Stand near,amenity,hunting_stand,near,N 
+Hunting Stands near,amenity,hunting_stand,near,Y 
+Ice Cream,amenity,ice_cream,-,N 
+Ice Cream in,amenity,ice_cream,in,N 
+Ice Cream near,amenity,ice_cream,near,N 
+Karaoke,amenity,karaoke_box,-,N 
+Karaokes,amenity,karaoke_box,-,Y 
+Karaoke in,amenity,karaoke_box,in,N 
+Karaokes in,amenity,karaoke_box,in,Y 
+Karaokes near,amenity,karaoke_box,near,Y 
+Kindergarten,amenity,kindergarten,-,N 
+Kindergartens,amenity,kindergarten,-,Y 
+Kindergarten in,amenity,kindergarten,in,N 
+Kindergartens in,amenity,kindergarten,in,Y 
+Kindergarten near,amenity,kindergarten,near,N 
+Kindergartens near,amenity,kindergarten,near,Y 
+Nursery,amenity,kindergarten,-,N 
+Nurserys,amenity,kindergarten,-,Y 
+Nurseries,amenity,kindergarten,-,Y 
+Nursery in,amenity,kindergarten,in,N 
+Nurserys in,amenity,kindergarten,in,Y 
+Nurseries in,amenity,kindergarten,in,Y 
+Nursery near,amenity,kindergarten,near,N 
+Nurserys near,amenity,kindergarten,near,Y 
+Nurseries near,amenity,kindergarten,near,Y 
+Nursery School,amenity,kindergarten,-,N 
+Nursery Schools,amenity,kindergarten,-,Y 
+Nursery School in,amenity,kindergarten,in,N 
+Nursery Schools in,amenity,kindergarten,in,Y 
+Nursery School near,amenity,kindergarten,near,N 
+Nursery Schools near,amenity,kindergarten,near,Y 
+Kneipp Basin,amenity,kneipp_water_cure,-,N 
+Kneipp Basins,amenity,kneipp_water_cure,-,Y 
+Kneipp Basin in,amenity,kneipp_water_cure,in,N 
+Kneipp Basins in,amenity,kneipp_water_cure,in,Y 
+Kneipp Basin near,amenity,kneipp_water_cure,near,N 
+Kneipp Basins near,amenity,kneipp_water_cure,near,Y 
+Kneipp Bath,amenity,kneipp_water_cure,-,N 
+Kneipp Baths,amenity,kneipp_water_cure,-,Y 
+Kneipp Bath in,amenity,kneipp_water_cure,in,N 
+Kneipp Baths in,amenity,kneipp_water_cure,in,Y 
+Kneipp Bath near,amenity,kneipp_water_cure,near,N 
+Kneipp Baths near,amenity,kneipp_water_cure,near,Y 
+Kneipp Facility,amenity,kneipp_water_cure,-,N 
+Kneipp Facilitys,amenity,kneipp_water_cure,-,Y 
+Kneipp Facilities,amenity,kneipp_water_cure,-,Y 
+Kneipp Facility in,amenity,kneipp_water_cure,in,N 
+Kneipp Facilitys in,amenity,kneipp_water_cure,in,Y 
+Kneipp Facilities in,amenity,kneipp_water_cure,in,Y 
+Kneipp Facility near,amenity,kneipp_water_cure,near,N 
+Kneipp Facilitys near,amenity,kneipp_water_cure,near,Y 
+Kneipp Facilities near,amenity,kneipp_water_cure,near,Y 
+Library,amenity,library,-,N 
+Librarys,amenity,library,-,Y 
+Libraries,amenity,library,-,Y 
+Library in,amenity,library,in,N 
+Librarys in,amenity,library,in,Y 
+Libraries in,amenity,library,in,Y 
+Library near,amenity,library,near,N 
+Librarys near,amenity,library,near,Y 
+Libraries near,amenity,library,near,Y 
+Marketplace,amenity,marketplace,-,N 
+Marketplaces,amenity,marketplace,-,Y 
+Marketplace in,amenity,marketplace,in,N 
+Marketplaces in,amenity,marketplace,in,Y 
+Marketplace near,amenity,marketplace,near,N 
+Marketplaces near,amenity,marketplace,near,Y 
+Motorcycle parking,amenity,motorcycle_parking,-,N 
+Motorcycle parkings,amenity,motorcycle_parking,-,Y 
+Motorcycle parking in,amenity,motorcycle_parking,in,N 
+Motorcycle parkings in,amenity,motorcycle_parking,in,Y 
+Motorcycle parking near,amenity,motorcycle_parking,near,N 
+Motorcycle parkings near,amenity,motorcycle_parking,near,Y 
+Night Club,amenity,nightclub,-,N 
+Night Clubs,amenity,nightclub,-,Y 
+Night Club in,amenity,nightclub,in,N 
+Night Clubs in,amenity,nightclub,in,Y 
+Night Club near,amenity,nightclub,near,N 
+Night Clubs near,amenity,nightclub,near,Y 
+Nursing Home,amenity,nursing_home,-,N 
+Nursing Homes,amenity,nursing_home,-,Y 
+Nursing Home in,amenity,nursing_home,in,N 
+Nursing Homes in,amenity,nursing_home,in,Y 
+Nursing Home near,amenity,nursing_home,near,N 
+Nursing Homes near,amenity,nursing_home,near,Y 
+Pharmacy,amenity,pharmacy,-,N 
+Pharmacys,amenity,pharmacy,-,Y 
+Pharmacies,amenity,pharmacy,-,Y 
+Pharmacy in,amenity,pharmacy,in,N 
+Pharmacys in,amenity,pharmacy,in,Y 
+Pharmacies in,amenity,pharmacy,in,Y 
+Pharmacy near,amenity,pharmacy,near,N 
+Pharmacys near,amenity,pharmacy,near,Y 
+Pharmacies near,amenity,pharmacy,near,Y 
+Parking,amenity,parking,-,N 
+Parkings,amenity,parking,-,Y 
+Parking in,amenity,parking,in,N 
+Parkings in,amenity,parking,in,Y 
+Parking near,amenity,parking,near,N 
+Parkings near,amenity,parking,near,Y 
+Church,amenity,place_of_worship,-,N 
+Churches,amenity,place_of_worship,-,Y 
+Church in,amenity,place_of_worship,in,N 
+Churches in,amenity,place_of_worship,in,Y 
+Church near,amenity,place_of_worship,near,N 
+Churches near,amenity,place_of_worship,near,Y 
+Place of Worship,amenity,place_of_worship,-,N 
+Place of Worships,amenity,place_of_worship,-,Y 
+Places of Worship,amenity,place_of_worship,-,Y 
+Place of Worship in,amenity,place_of_worship,in,N 
+Place of Worships in,amenity,place_of_worship,in,Y 
+Places of Worship in,amenity,place_of_worship,in,Y 
+Place of Worship near,amenity,place_of_worship,near,N 
+Place of Worships near,amenity,place_of_worship,near,Y 
+Places of Worship near,amenity,place_of_worship,near,Y 
+Planetarium,amenity,planetarium,-,N 
+Planetariums,amenity,planetarium,-,Y 
+Planetaria,amenity,planetarium,-,Y 
+Planetarium in,amenity,planetarium,in,N 
+Planetariums in,amenity,planetarium,in,Y 
+Planetaria in,amenity,planetarium,in,Y 
+Planetarium near,amenity,planetarium,near,N 
+Planetariums near,amenity,planetarium,near,Y 
+Planetaria near,amenity,planetarium,near,Y 
+Police,amenity,police,-,N 
+Police in,amenity,police,in,N 
+Police near,amenity,police,near,N 
+Post Box,amenity,post_box,-,N 
+Post Boxes,amenity,post_box,-,Y 
+Post Box in,amenity,post_box,in,N 
+Post Boxes in,amenity,post_box,in,Y 
+Post Box near,amenity,post_box,near,N 
+Post Boxes near,amenity,post_box,near,Y 
+Post Office,amenity,post_office,-,N 
+Post Offices,amenity,post_office,-,Y 
+Post Office in,amenity,post_office,in,N 
+Post Offices in,amenity,post_office,in,Y 
+Post Office near,amenity,post_office,near,N 
+Post Offices near,amenity,post_office,near,Y 
+Prison,amenity,prison,-,N 
+Prisons,amenity,prison,-,Y 
+Prison in,amenity,prison,in,N 
+Prisons in,amenity,prison,in,Y 
+Prison near,amenity,prison,near,N 
+Prisons near,amenity,prison,near,Y 
+Bookcase,amenity,public_bookcase,-,N 
+Bookcases,amenity,public_bookcase,-,Y 
+Bookcase in,amenity,public_bookcase,in,N 
+Bookcases in,amenity,public_bookcase,in,Y 
+Bookcase near,amenity,public_bookcase,near,N 
+Bookcases near,amenity,public_bookcase,near,Y 
+Public Bookcase,amenity,public_bookcase,-,N 
+Public Bookcases,amenity,public_bookcase,-,Y 
+Public Bookcase in,amenity,public_bookcase,in,N 
+Public Bookcases in,amenity,public_bookcase,in,Y 
+Public Bookcase near,amenity,public_bookcase,near,N 
+Public Bookcases near,amenity,public_bookcase,near,Y 
+Pub,amenity,bar,-,N 
+Pubs,amenity,bar,-,Y 
+Pub in,amenity,bar,in,N 
+Pubs in,amenity,bar,in,Y 
+Pub near,amenity,bar,near,N 
+Pubs near,amenity,bar,near,Y 
+Pub,amenity,pub,-,N 
+Pubs,amenity,pub,-,Y 
+Pub in,amenity,pub,in,N 
+Pubs in,amenity,pub,in,Y 
+Pub near,amenity,pub,near,N 
+Pubs near,amenity,pub,near,Y 
+Public Building,amenity,public_building,-,N 
+Public Buildings,amenity,public_building,-,Y 
+Public Building in,amenity,public_building,in,N 
+Public Buildings in,amenity,public_building,in,Y 
+Public Building near,amenity,public_building,near,N 
+Public Buildings near,amenity,public_building,near,Y 
+Recycling Point,amenity,recycling,-,N 
+Recycling Points,amenity,recycling,-,Y 
+Recycling Point in,amenity,recycling,in,N 
+Recycling Points in,amenity,recycling,in,Y 
+Recycling Point near,amenity,recycling,near,N 
+Recycling Points near,amenity,recycling,near,Y 
+Recycling Station,amenity,recycling,-,N 
+Recycling Stations,amenity,recycling,-,Y 
+Recycling Station in,amenity,recycling,in,N 
+Recycling Stations in,amenity,recycling,in,Y 
+Recycling Station near,amenity,recycling,near,N 
+Recycling Stations near,amenity,recycling,near,Y 
+Restaurant,amenity,restaurant,-,N 
+Restaurants,amenity,restaurant,-,Y 
+Restaurant in,amenity,restaurant,in,N 
+Restaurants in,amenity,restaurant,in,Y 
+Restaurant near,amenity,restaurant,near,N 
+Restaurants near,amenity,restaurant,near,Y 
+Retirement Home,amenity,retirement_home,-,N 
+Retirement Homes,amenity,retirement_home,-,Y 
+Retirement Home in,amenity,retirement_home,in,N 
+Retirement Homes in,amenity,retirement_home,in,Y 
+Retirement Home near,amenity,retirement_home,near,N 
+Retirement Homes near,amenity,retirement_home,near,Y 
+Sauna,amenity,sauna,-,N 
+Saunas,amenity,sauna,-,Y 
+Sauna in,amenity,sauna,in,N 
+Saunas in,amenity,sauna,in,Y 
+Sauna near,amenity,sauna,near,N 
+Saunas near,amenity,sauna,near,Y 
+School,amenity,school,-,N 
+Schools,amenity,school,-,Y 
+School in,amenity,school,in,N 
+Schools in,amenity,school,in,Y 
+School near,amenity,school,near,N 
+Schools near,amenity,school,near,Y 
+Shelter,amenity,shelter,-,N 
+Shelters,amenity,shelter,-,Y 
+Shelter in,amenity,shelter,in,N 
+Shelters in,amenity,shelter,in,Y 
+Shelter near,amenity,shelter,near,N 
+Shelters near,amenity,shelter,near,Y 
+Studio,amenity,studio,-,N 
+Studios,amenity,studio,-,Y 
+Studio in,amenity,studio,in,N 
+Studios in,amenity,studio,in,Y 
+Studio near,amenity,studio,near,N 
+Studios near,amenity,studio,near,Y 
+Swinger Club,amenity,swingerclub,-,N 
+Swinger Clubs,amenity,swingerclub,-,Y 
+Swinger Club in,amenity,swingerclub,in,N 
+Swinger Clubs in,amenity,swingerclub,in,Y 
+Swinger Club near,amenity,swingerclub,near,N 
+Swinger Clubs near,amenity,swingerclub,near,Y 
+Taxi,amenity,taxi,-,N 
+Taxis,amenity,taxi,-,Y 
+Taxi in,amenity,taxi,in,N 
+Taxis in,amenity,taxi,in,Y 
+Taxi near,amenity,taxi,near,N 
+Taxis near,amenity,taxi,near,Y 
+Taxi Rank,amenity,taxi,-,N 
+Taxi Ranks,amenity,taxi,-,Y 
+Taxi Rank in,amenity,taxi,in,N 
+Taxi Ranks in,amenity,taxi,in,Y 
+Taxi Rank near,amenity,taxi,near,N 
+Taxi Ranks near,amenity,taxi,near,Y 
+Telephone,amenity,telephone,-,N 
+Telephones,amenity,telephone,-,Y 
+Telephone in,amenity,telephone,in,N 
+Telephones in,amenity,telephone,in,Y 
+Telephone near,amenity,telephone,near,N 
+Telephones near,amenity,telephone,near,Y 
+Public Telephone,amenity,telephone,-,N 
+Public Telephones,amenity,telephone,-,Y 
+Public Telephone in,amenity,telephone,in,N 
+Public Telephones in,amenity,telephone,in,Y 
+Public Telephone near,amenity,telephone,near,N 
+Public Telephones near,amenity,telephone,near,Y 
+Phone Booth,amenity,telephone,-,N 
+Phone Booths,amenity,telephone,-,Y 
+Phone Booth in,amenity,telephone,in,N 
+Phone Booths in,amenity,telephone,in,Y 
+Phone Booth near,amenity,telephone,near,N 
+Phone Booths near,amenity,telephone,near,Y 
+Theatre,amenity,theatre,-,N 
+Theatres,amenity,theatre,-,Y 
+Theatre in,amenity,theatre,in,N 
+Theatres in,amenity,theatre,in,Y 
+Theatre near,amenity,theatre,near,N 
+Theatres near,amenity,theatre,near,Y 
+Toilet,amenity,toilets,-,N 
+Toilets,amenity,toilets,-,Y 
+Toilet in,amenity,toilets,in,N 
+Toilets in,amenity,toilets,in,Y 
+Toilet near,amenity,toilets,near,N 
+Toilets near,amenity,toilets,near,Y 
+Town Hall,amenity,townhall,-,N 
+Town Halls,amenity,townhall,-,Y 
+Town Hall in,amenity,townhall,in,N 
+Town Halls in,amenity,townhall,in,Y 
+Town Hall near,amenity,townhall,near,N 
+Town Halls near,amenity,townhall,near,Y 
+University,amenity,university,-,N 
+Universitys,amenity,university,-,Y 
+Universities,amenity,university,-,Y 
+University in,amenity,university,in,N 
+Universitys in,amenity,university,in,Y 
+Universities in,amenity,university,in,Y 
+University near,amenity,university,near,N 
+Universitys near,amenity,university,near,Y 
+Universities near,amenity,university,near,Y 
+Vending Machine,amenity,vending_machine,-,N 
+Vending Machines,amenity,vending_machine,-,Y 
+Vending Machine in,amenity,vending_machine,in,N 
+Vending Machines in,amenity,vending_machine,in,Y 
+Vending Machine near,amenity,vending_machine,near,N 
+Vending Machines near,amenity,vending_machine,near,Y 
+Veterinary Surgery,amenity,veterinary,-,N 
+Veterinary Surgeries,amenity,veterinary,-,Y 
+Veterinary Surgery in,amenity,veterinary,in,N 
+Veterinary Surgeries in,amenity,veterinary,in,Y 
+Veterinary Surgery near,amenity,veterinary,near,N 
+Veterinary Surgeries near,amenity,veterinary,near,Y 
+Waste Basket,amenity,waste_basket,-,N 
+Waste Baskets,amenity,waste_basket,-,Y 
+Waste Basket in,amenity,waste_basket,in,N 
+Waste Baskets in,amenity,waste_basket,in,Y 
+Waste Basket near,amenity,waste_basket,near,N 
+Waste Baskets near,amenity,waste_basket,near,Y 
+Rubbish Bin,amenity,waste_basket,-,N 
+Rubbish Bins,amenity,waste_basket,-,Y 
+Rubbish Bin in,amenity,waste_basket,in,N 
+Rubbish Bins in,amenity,waste_basket,in,Y 
+Rubbish Bin near,amenity,waste_basket,near,N 
+Rubbish Bins near,amenity,waste_basket,near,Y 
+Bin,amenity,waste_basket,-,N 
+Bins,amenity,waste_basket,-,Y 
+Bin in,amenity,waste_basket,in,N 
+Bins in,amenity,waste_basket,in,Y 
+Bin near,amenity,waste_basket,near,N 
+Bins near,amenity,waste_basket,near,Y 
+Mural,artwork_type,mural,-,N 
+Murals,artwork_type,mural,-,Y 
+Mural in,artwork_type,mural,in,N 
+Murals in,artwork_type,mural,in,Y 
+Mural near,artwork_type,mural,near,N 
+Murals near,artwork_type,mural,near,Y 
+Sculpture,artwork_type,sculpture,-,N 
+Sculptures,artwork_type,sculpture,-,Y 
+Sculpture in,artwork_type,sculpture,in,N 
+Sculptures in,artwork_type,sculpture,in,Y 
+Sculpture near,artwork_type,sculpture,near,N 
+Sculptures near,artwork_type,sculpture,near,Y 
+Statue,artwork_type,statue,-,N 
+Statues,artwork_type,statue,-,Y 
+Statue in,artwork_type,statue,in,N 
+Statues in,artwork_type,statue,in,Y 
+Statue near,artwork_type,statue,near,N 
+Statues near,artwork_type,statue,near,Y 
+ATM,atm,yes,-,N 
+ATMs,atm,yes,-,Y 
+ATM in,atm,yes,in,N 
+ATMs in,atm,yes,in,Y 
+ATM near,atm,yes,near,N 
+ATMs near,atm,yes,near,Y 
+National Park,boundary,national_park,-,N 
+National Parks,boundary,national_park,-,Y 
+National Park in,boundary,national_park,in,N 
+National Parks in,boundary,national_park,in,Y 
+National Park near,boundary,national_park,near,N 
+National Parks near,boundary,national_park,near,Y 
+Apartment Block,building,apartments,-,N 
+Apartment Blocks,building,apartments,-,Y 
+Apartment Block in,building,apartments,in,N 
+Apartment Blocks in,building,apartments,in,Y 
+Apartment Block near,building,apartments,near,N 
+Apartment Blocks near,building,apartments,near,Y 
+Building Block,building,block,-,N 
+Building Blocks,building,block,-,Y 
+Building Block in,building,block,in,N 
+Building Blocks in,building,block,in,Y 
+Building Block near,building,block,near,N 
+Building Blocks near,building,block,near,Y 
+Bunker,building,bunker,-,N 
+Bunkers,building,bunker,-,Y 
+Bunker in,building,bunker,in,N 
+Bunkers in,building,bunker,in,Y 
+Bunker near,building,bunker,near,N 
+Bunkers near,building,bunker,near,Y 
+Cathedral,building,cathedral,-,N 
+Cathedrals,building,cathedral,-,Y 
+Cathedral in,building,cathedral,in,N 
+Cathedrals in,building,cathedral,in,Y 
+Cathedral near,building,cathedral,near,N 
+Cathedrals near,building,cathedral,near,Y 
+Chapel,building,chapel,-,N 
+Chapels,building,chapel,-,Y 
+Chapel in,building,chapel,in,N 
+Chapels in,building,chapel,in,Y 
+Chapel near,building,chapel,near,N 
+Chapels near,building,chapel,near,Y 
+Church,building,church,-,N 
+Churchs,building,church,-,Y 
+Churches,building,church,-,Y 
+Church in,building,church,in,N 
+Churchs in,building,church,in,Y 
+Churches in,building,church,in,Y 
+Church near,building,church,near,N 
+Churchs near,building,church,near,Y 
+Churches near,building,church,near,Y 
+City Hall,building,city_hall,-,N 
+City Halls,building,city_hall,-,Y 
+City Hall in,building,city_hall,in,N 
+City Halls in,building,city_hall,in,Y 
+City Hall near,building,city_hall,near,N 
+City Halls near,building,city_hall,near,Y 
+Civic Building,building,civic,-,N 
+Civic Buildings,building,civic,-,Y 
+Civic Building in,building,civic,in,N 
+Civic Buildings in,building,civic,in,Y 
+Civic Building near,building,civic,near,N 
+Civic Buildings near,building,civic,near,Y 
+Commercial Building,building,commercial,-,N 
+Commercial Buildings,building,commercial,-,Y 
+Commercial Building in,building,commercial,in,N 
+Commercial Buildings in,building,commercial,in,Y 
+Commercial Building near,building,commercial,near,N 
+Commercial Buildings near,building,commercial,near,Y 
+Dormitory,building,dormitory,-,N 
+Dormitorys,building,dormitory,-,Y 
+Dormitory in,building,dormitory,in,N 
+Dormitorys in,building,dormitory,in,Y 
+Dormitory near,building,dormitory,near,N 
+Dormitorys near,building,dormitory,near,Y 
+Building Entrance,building,entrance,-,N 
+Building Entrances,building,entrance,-,Y 
+Building Entrance in,building,entrance,in,N 
+Building Entrances in,building,entrance,in,Y 
+Building Entrance near,building,entrance,near,N 
+Building Entrances near,building,entrance,near,Y 
+Faculty Building,building,faculty,-,N 
+Faculty Buildings,building,faculty,-,Y 
+Faculty Building in,building,faculty,in,N 
+Faculty Buildings in,building,faculty,in,Y 
+Faculty Building near,building,faculty,near,N 
+Faculty Buildings near,building,faculty,near,Y 
+Farm Building,building,farm_auxiliary,-,N 
+Farm Buildings,building,farm_auxiliary,-,Y 
+Farm Building in,building,farm_auxiliary,in,N 
+Farm Buildings in,building,farm_auxiliary,in,Y 
+Farm Building near,building,farm_auxiliary,near,N 
+Farm Buildings near,building,farm_auxiliary,near,Y 
+Farm Building,building,farm,-,N 
+Farm Buildings,building,farm,-,Y 
+Farm Building in,building,farm,in,N 
+Farm Buildings in,building,farm,in,Y 
+Farm Building near,building,farm,near,N 
+Farm Buildings near,building,farm,near,Y 
+Flats,building,flats,-,N 
+Flats,building,flats,-,Y 
+Flats in,building,flats,in,N 
+Flats in,building,flats,in,Y 
+Flats near,building,flats,near,N 
+Flats near,building,flats,near,Y 
+Glass House,building,greenhouse,-,N 
+Glass Houses,building,greenhouse,-,Y 
+Glass House in,building,greenhouse,in,N 
+Glass Houses in,building,greenhouse,in,Y 
+Glass House near,building,greenhouse,near,N 
+Glass Houses near,building,greenhouse,near,Y 
+Glasshouse,building,greenhouse,-,N 
+Glasshouses,building,greenhouse,-,Y 
+Glasshouse in,building,greenhouse,in,N 
+Glasshouses in,building,greenhouse,in,Y 
+Glasshouse near,building,greenhouse,near,N 
+Glasshouses near,building,greenhouse,near,Y 
+Green House,building,greenhouse,-,N 
+Green Houses,building,greenhouse,-,Y 
+Green House in,building,greenhouse,in,N 
+Green Houses in,building,greenhouse,in,Y 
+Green House near,building,greenhouse,near,N 
+Green Houses near,building,greenhouse,near,Y 
+Greenhouse,building,greenhouse,-,N 
+Greenhouses,building,greenhouse,-,Y 
+Greenhouse in,building,greenhouse,in,N 
+Greenhouses in,building,greenhouse,in,Y 
+Greenhouse near,building,greenhouse,near,N 
+Greenhouses near,building,greenhouse,near,Y 
+Garage,building,garage,-,N 
+Garages,building,garage,-,Y 
+Garage in,building,garage,in,N 
+Garages in,building,garage,in,Y 
+Garage near,building,garage,near,N 
+Garages near,building,garage,near,Y 
+Hall,building,hall,-,N 
+Halls,building,hall,-,Y 
+Hall in,building,hall,in,N 
+Halls in,building,hall,in,Y 
+Hall near,building,hall,near,N 
+Halls near,building,hall,near,Y 
+Hospital Building,building,hospital,-,N 
+Hospital Buildings,building,hospital,-,Y 
+Hospital Building in,building,hospital,in,N 
+Hospital Buildings in,building,hospital,in,Y 
+Hospital Building near,building,hospital,near,N 
+Hospital Buildings near,building,hospital,near,Y 
+Hotel,building,hotel,-,N 
+Hotels,building,hotel,-,Y 
+Hotel in,building,hotel,in,N 
+Hotels in,building,hotel,in,Y 
+Hotel near,building,hotel,near,N 
+Hotels near,building,hotel,near,Y 
+House,building,house,-,N 
+Houses,building,house,-,Y 
+House in,building,house,in,N 
+Houses in,building,house,in,Y 
+House near,building,house,near,N 
+Houses near,building,house,near,Y 
+Industrial Building,building,industrial,-,N 
+Industrial Buildings,building,industrial,-,Y 
+Industrial Building in,building,industrial,in,N 
+Industrial Buildings in,building,industrial,in,Y 
+Industrial Building near,building,industrial,near,N 
+Industrial Buildings near,building,industrial,near,Y 
+Mosque,building,mosque,-,N 
+Mosques,building,mosque,-,Y 
+Mosque in,building,mosque,in,N 
+Mosques in,building,mosque,in,Y 
+Mosque near,building,mosque,near,N 
+Mosques near,building,mosque,near,Y 
+Office Building,building,office,-,N 
+Office Buildings,building,office,-,Y 
+Office Building in,building,office,in,N 
+Office Buildings in,building,office,in,Y 
+Office Building near,building,office,near,N 
+Office Buildings near,building,office,near,Y 
+Public Building,building,public,-,N 
+Public Buildings,building,public,-,Y 
+Public Building in,building,public,in,N 
+Public Buildings in,building,public,in,Y 
+Public Building near,building,public,near,N 
+Public Buildings near,building,public,near,Y 
+Residential Building,building,residential,-,N 
+Residential Buildings,building,residential,-,Y 
+Residential Building in,building,residential,in,N 
+Residential Buildings in,building,residential,in,Y 
+Residential Building near,building,residential,near,N 
+Residential Buildings near,building,residential,near,Y 
+Retail Building,building,retail,-,N 
+Retail Buildings,building,retail,-,Y 
+Retail Building in,building,retail,in,N 
+Retail Buildings in,building,retail,in,Y 
+Retail Building near,building,retail,near,N 
+Retail Buildings near,building,retail,near,Y 
+School Building,building,school,-,N 
+School Buildings,building,school,-,Y 
+School Building in,building,school,in,N 
+School Buildings in,building,school,in,Y 
+School Building near,building,school,near,N 
+School Buildings near,building,school,near,Y 
+Shop,building,shop,-,N 
+Shops,building,shop,-,Y 
+Shop in,building,shop,in,N 
+Shops in,building,shop,in,Y 
+Shop near,building,shop,near,N 
+Shops near,building,shop,near,Y 
+Stadium,building,stadium,-,N 
+Stadiums,building,stadium,-,Y 
+Stadium in,building,stadium,in,N 
+Stadiums in,building,stadium,in,Y 
+Stadium near,building,stadium,near,N 
+Stadiums near,building,stadium,near,Y 
+Synagogue,building,synagogue,-,N 
+Synagogues,building,synagogue,-,Y 
+Synagogue in,building,synagogue,in,N 
+Synagogues in,building,synagogue,in,Y 
+Synagogue near,building,synagogue,near,N 
+Synagogues near,building,synagogue,near,Y 
+Store,building,store,-,N 
+Stores,building,store,-,Y 
+Store in,building,store,in,N 
+Stores in,building,store,in,Y 
+Store near,building,store,near,N 
+Stores near,building,store,near,Y 
+Terrace,building,terrace,-,N 
+Terraces,building,terrace,-,Y 
+Terrace in,building,terrace,in,N 
+Terraces in,building,terrace,in,Y 
+Terrace near,building,terrace,near,N 
+Terraces near,building,terrace,near,Y 
+Tower,building,tower,-,N 
+Towers,building,tower,-,Y 
+Tower in,building,tower,in,N 
+Towers in,building,tower,in,Y 
+Tower near,building,tower,near,N 
+Towers near,building,tower,near,Y 
+Railway Station,building,train_station,-,N 
+Railway Stations,building,train_station,-,Y 
+Railway Station in,building,train_station,in,N 
+Railway Stations in,building,train_station,in,Y 
+Railway Station near,building,train_station,near,N 
+Railway Stations near,building,train_station,near,Y 
+Station,building,train_station,-,N 
+Stations,building,train_station,-,Y 
+Station in,building,train_station,in,N 
+Stations in,building,train_station,in,Y 
+Station near,building,train_station,near,N 
+Stations near,building,train_station,near,Y 
+University Building,building,university,-,N 
+University Buildings,building,university,-,Y 
+University Building in,building,university,in,N 
+University Buildings in,building,university,in,Y 
+University Building near,building,university,near,N 
+University Buildings near,building,university,near,Y 
+Building,building,yes,-,N 
+Buildings,building,yes,-,Y 
+Building in,building,yes,in,N 
+Buildings in,building,yes,in,Y 
+Building near,building,yes,near,N 
+Buildings near,building,yes,near,Y 
+Bridleway,highway,bridleway,-,N 
+Bridleways,highway,bridleway,-,Y 
+Bridleway in,highway,bridleway,in,N 
+Bridleways in,highway,bridleway,in,Y 
+Bridleway near,highway,bridleway,near,N 
+Bridleways near,highway,bridleway,near,Y 
+Guided Bus Lane,highway,bus_guideway,-,N 
+Guided Bus Lanes,highway,bus_guideway,-,Y 
+Guided Bus Lane in,highway,bus_guideway,in,N 
+Guided Bus Lanes in,highway,bus_guideway,in,Y 
+Guided Bus Lane near,highway,bus_guideway,near,N 
+Guided Bus Lanes near,highway,bus_guideway,near,Y 
+Bus Stop,highway,bus_stop,-,N 
+Bus Stops,highway,bus_stop,-,Y 
+Bus Stop in,highway,bus_stop,in,N 
+Bus Stops in,highway,bus_stop,in,Y 
+Bus Stop near,highway,bus_stop,near,N 
+Bus Stops near,highway,bus_stop,near,Y 
+Byway,highway,byway,-,N 
+Byways,highway,byway,-,Y 
+Byway in,highway,byway,in,N 
+Byways in,highway,byway,in,Y 
+Byway near,highway,byway,near,N 
+Byways near,highway,byway,near,Y 
+Changing table,changing_table,yes,-,N 
+Changing tables,changing_table,yes,-,Y 
+Changing table in,changing_table,yes,in,N 
+Changing tables in,changing_table,yes,in,Y 
+Changing table near,changing_table,yes,near,N 
+Changing tables near,changing_table,yes,near,Y 
+Brewery,craft,brewery,-,N 
+Brewerys,craft,brewery,-,Y 
+Breweries,craft,brewery,-,Y 
+Brewery in,craft,brewery,in,N 
+Brewerys in,craft,brewery,in,Y 
+Breweries in,craft,brewery,in,Y 
+Brewery near,craft,brewery,near,N 
+Brewerys near,craft,brewery,near,Y 
+Breweries near,craft,brewery,near,Y 
+Carpenter,craft,carpenter,-,N 
+Carpenters,craft,carpenter,-,Y 
+Carpenter in,craft,carpenter,in,N 
+Carpenters in,craft,carpenter,in,Y 
+Carpenter near,craft,carpenter,near,N 
+Carpenters near,craft,carpenter,near,Y 
+Distillery,craft,distillery,-,N 
+Distillerys,craft,distillery,-,Y 
+Distilleries,craft,distillery,-,Y 
+Distillery in,craft,distillery,in,N 
+Distillerys in,craft,distillery,in,Y 
+Distilleries in,craft,distillery,in,Y 
+Distillery near,craft,distillery,near,N 
+Distillerys near,craft,distillery,near,Y 
+Distilleries near,craft,distillery,near,Y 
+Key Cutter,craft,key_cutter,-,N 
+Key Cutters,craft,key_cutter,-,Y 
+Key Cutter in,craft,key_cutter,in,N 
+Key Cutters in,craft,key_cutter,in,Y 
+Key Cutter near,craft,key_cutter,near,N 
+Key Cutters near,craft,key_cutter,near,Y 
+Key Duplication,craft,key_cutter,-,N 
+Key Duplication in,craft,key_cutter,in,N 
+Key Duplication near,craft,key_cutter,near,N 
+Electrician,craft,electrician,-,N 
+Electricians,craft,electrician,-,Y 
+Electrician in,craft,electrician,in,N 
+Electricians in,craft,electrician,in,Y 
+Electrician near,craft,electrician,near,N 
+Electricians near,craft,electrician,near,Y 
+Photographer,craft,photographer,-,N 
+Photographers,craft,photographer,-,Y 
+Photographer in,craft,photographer,in,N 
+Photographers in,craft,photographer,in,Y 
+Photographer near,craft,photographer,near,N 
+Photographers near,craft,photographer,near,Y 
+Shoe Maker,craft,shoemaker,-,N 
+Shoe Makers,craft,shoemaker,-,Y 
+Shoe Maker in,craft,shoemaker,in,N 
+Shoe Makers in,craft,shoemaker,in,Y 
+Shoe Maker near,craft,shoemaker,near,N 
+Shoe Makers near,craft,shoemaker,near,Y 
+Shoemaker,craft,shoemaker,-,N 
+Shoemakers,craft,shoemaker,-,Y 
+Shoemaker in,craft,shoemaker,in,N 
+Shoemakers in,craft,shoemaker,in,Y 
+Shoemaker near,craft,shoemaker,near,N 
+Shoemakers near,craft,shoemaker,near,Y 
+Tailor,craft,tailor,-,N 
+Tailors,craft,tailor,-,Y 
+Tailor in,craft,tailor,in,N 
+Tailors in,craft,tailor,in,Y 
+Tailor near,craft,tailor,near,N 
+Tailors near,craft,tailor,near,Y 
+Winery,craft,winery,-,N 
+Winerys,craft,winery,-,Y 
+Wineries,craft,winery,-,Y 
+Winery in,craft,winery,in,N 
+Winerys in,craft,winery,in,Y 
+Wineries in,craft,winery,in,Y 
+Winery near,craft,winery,near,N 
+Winerys near,craft,winery,near,Y 
+Wineries near,craft,winery,near,Y 
+Ambulance Station,emergency,ambulance_station,-,N 
+Ambulance Stations,emergency,ambulance_station,-,Y 
+Ambulance Station in,emergency,ambulance_station,in,N 
+Ambulance Stations in,emergency,ambulance_station,in,Y 
+Ambulance Station near,emergency,ambulance_station,near,N 
+Ambulance Stations near,emergency,ambulance_station,near,Y 
+Defibrillator,emergency,defibrillator,-,N 
+Defibrillators,emergency,defibrillator,-,Y 
+Defibrillator in,emergency,defibrillator,in,N 
+Defibrillators in,emergency,defibrillator,in,Y 
+Defibrillator near,emergency,defibrillator,near,N 
+Defibrillators near,emergency,defibrillator,near,Y 
+Fire Hydrant,emergency,fire_hydrant,-,N 
+Fire Hydrants,emergency,fire_hydrant,-,Y 
+Fire Hydrant in,emergency,fire_hydrant,in,N 
+Fire Hydrants in,emergency,fire_hydrant,in,Y 
+Fire Hydrant near,emergency,fire_hydrant,near,N 
+Fire Hydrants near,emergency,fire_hydrant,near,Y 
+Emergency Phone,emergency,phone,-,N 
+Emergency Phones,emergency,phone,-,Y 
+Emergency Phone in,emergency,phone,in,N 
+Emergency Phones in,emergency,phone,in,Y 
+Emergency Phone near,emergency,phone,near,N 
+Emergency Phones near,emergency,phone,near,Y 
+Highway under Construction,highway,construction,-,N 
+Highways under Construction,highway,construction,-,Y 
+Highway under Construction in,highway,construction,in,N 
+Highways under Construction in,highway,construction,in,Y 
+Highway under Construction near,highway,construction,near,N 
+Highways under Construction near,highway,construction,near,Y 
+Cycle Path,highway,cycleway,-,N 
+Cycle Paths,highway,cycleway,-,Y 
+Cycle Path in,highway,cycleway,in,N 
+Cycle Paths in,highway,cycleway,in,Y 
+Cycle Path near,highway,cycleway,near,N 
+Cycle Paths near,highway,cycleway,near,Y 
+Distance Marker,highway,distance_marker,-,N 
+Distance Markers,highway,distance_marker,-,Y 
+Distance Marker in,highway,distance_marker,in,N 
+Distance Markers in,highway,distance_marker,in,Y 
+Distance Marker near,highway,distance_marker,near,N 
+Distance Markers near,highway,distance_marker,near,Y 
+Emergency Access Point,highway,emergency_access_point,-,N 
+Emergency Access Points,highway,emergency_access_point,-,Y 
+Emergency Access Point in,highway,emergency_access_point,in,N 
+Emergency Access Points in,highway,emergency_access_point,in,Y 
+Emergency Access Point near,highway,emergency_access_point,near,N 
+Emergency Access Points near,highway,emergency_access_point,near,Y 
+Radar Trap,highway,speed_camera,-,N 
+Radar Traps,highway,speed_camera,-,Y 
+Radar Trap in,highway,speed_camera,in,N 
+Radar Traps in,highway,speed_camera,in,Y 
+Radar Trap near,highway,speed_camera,near,N 
+Radar Traps near,highway,speed_camera,near,Y 
+Speed Camera,highway,speed_camera,-,N 
+Speed Cameras,highway,speed_camera,-,Y 
+Speed Camera in,highway,speed_camera,in,N 
+Speed Cameras in,highway,speed_camera,in,Y 
+Speed Camera near,highway,speed_camera,near,N 
+Speed Cameras near,highway,speed_camera,near,Y 
+Speed Trap,highway,speed_camera,-,N 
+Speed Traps,highway,speed_camera,-,Y 
+Speed Trap in,highway,speed_camera,in,N 
+Speed Traps in,highway,speed_camera,in,Y 
+Speed Trap near,highway,speed_camera,near,N 
+Speed Traps near,highway,speed_camera,near,Y 
+Traffic Enforcement Camera,highway,speed_camera,-,N 
+Traffic Enforcement Cameras,highway,speed_camera,-,Y 
+Traffic Enforcement Camera in,highway,speed_camera,in,N 
+Traffic Enforcement Cameras in,highway,speed_camera,in,Y 
+Traffic Enforcement Camera near,highway,speed_camera,near,N 
+Traffic Enforcement Cameras near,highway,speed_camera,near,Y 
+Stoplights,highway,traffic_signals,-,Y 
+Stoplights in,highway,traffic_signals,in,Y 
+Stoplights near,highway,traffic_signals,near,Y 
+Traffic Lights,highway,traffic_signals,-,Y 
+Traffic Lights in,highway,traffic_signals,in,Y 
+Traffic Lights near,highway,traffic_signals,near,Y 
+Traffic Signals,highway,traffic_signals,-,Y 
+Traffic Signals in,highway,traffic_signals,in,Y 
+Traffic Signals near,highway,traffic_signals,near,Y 
+Footpath,highway,footway,-,N 
+Footpaths,highway,footway,-,Y 
+Footpath in,highway,footway,in,N 
+Footpaths in,highway,footway,in,Y 
+Footpath near,highway,footway,near,N 
+Footpaths near,highway,footway,near,Y 
+Ford,highway,ford,-,N 
+Fords,highway,ford,-,Y 
+Ford in,highway,ford,in,N 
+Fords in,highway,ford,in,Y 
+Ford near,highway,ford,near,N 
+Fords near,highway,ford,near,Y 
+Gate,highway,gate,-,N 
+Gates,highway,gate,-,Y 
+Gate in,highway,gate,in,N 
+Gates in,highway,gate,in,Y 
+Gate near,highway,gate,near,N 
+Gates near,highway,gate,near,Y 
+Living Street,highway,living_street,-,N 
+Living Streets,highway,living_street,-,Y 
+Living Street in,highway,living_street,in,N 
+Living Streets in,highway,living_street,in,Y 
+Living Street near,highway,living_street,near,N 
+Living Streets near,highway,living_street,near,Y 
+Minor Road,highway,minor,-,N 
+Minor Roads,highway,minor,-,Y 
+Minor Road in,highway,minor,in,N 
+Minor Roads in,highway,minor,in,Y 
+Minor Road near,highway,minor,near,N 
+Minor Roads near,highway,minor,near,Y 
+Motorway,highway,motorway,-,N 
+Motorways,highway,motorway,-,Y 
+Motorway in,highway,motorway,in,N 
+Motorways in,highway,motorway,in,Y 
+Motorway near,highway,motorway,near,N 
+Motorways near,highway,motorway,near,Y 
+Motorway Junction,highway,motorway_junction,-,N 
+Motorway Junctions,highway,motorway_junction,-,Y 
+Motorway Junction in,highway,motorway_junction,in,N 
+Motorway Junctions in,highway,motorway_junction,in,Y 
+Motorway Junction near,highway,motorway_junction,near,N 
+Motorway Junctions near,highway,motorway_junction,near,Y 
+Motorway Road,highway,motorway_link,-,N 
+Motorway Roads,highway,motorway_link,-,Y 
+Motorway Road in,highway,motorway_link,in,N 
+Motorway Roads in,highway,motorway_link,in,Y 
+Motorway Road near,highway,motorway_link,near,N 
+Motorway Roads near,highway,motorway_link,near,Y 
+Path,highway,path,-,N 
+Paths,highway,path,-,Y 
+Path in,highway,path,in,N 
+Paths in,highway,path,in,Y 
+Path near,highway,path,near,N 
+Paths near,highway,path,near,Y 
+Pedestrian Way,highway,pedestrian,-,N 
+Pedestrian Ways,highway,pedestrian,-,Y 
+Pedestrian Way in,highway,pedestrian,in,N 
+Pedestrian Ways in,highway,pedestrian,in,Y 
+Pedestrian Way near,highway,pedestrian,near,N 
+Pedestrian Ways near,highway,pedestrian,near,Y 
+Platform,highway,platform,-,N 
+Platforms,highway,platform,-,Y 
+Platform in,highway,platform,in,N 
+Platforms in,highway,platform,in,Y 
+Platform near,highway,platform,near,N 
+Platforms near,highway,platform,near,Y 
+Primary Road,highway,primary,-,N 
+Primary Roads,highway,primary,-,Y 
+Primary Road in,highway,primary,in,N 
+Primary Roads in,highway,primary,in,Y 
+Primary Road near,highway,primary,near,N 
+Primary Roads near,highway,primary,near,Y 
+Primary Road,highway,primary_link,-,N 
+Primary Roads,highway,primary_link,-,Y 
+Primary Road in,highway,primary_link,in,N 
+Primary Roads in,highway,primary_link,in,Y 
+Primary Road near,highway,primary_link,near,N 
+Primary Roads near,highway,primary_link,near,Y 
+Raceway,highway,raceway,-,N 
+Raceways,highway,raceway,-,Y 
+Raceway in,highway,raceway,in,N 
+Raceways in,highway,raceway,in,Y 
+Raceway near,highway,raceway,near,N 
+Raceways near,highway,raceway,near,Y 
+Residential,highway,residential,-,N 
+Residentials,highway,residential,-,Y 
+Residential in,highway,residential,in,N 
+Residentials in,highway,residential,in,Y 
+Residential near,highway,residential,near,N 
+Residentials near,highway,residential,near,Y 
+Residential Road,highway,residential,-,N 
+Residential Roads,highway,residential,-,Y 
+Residential Road in,highway,residential,in,N 
+Residential Roads in,highway,residential,in,Y 
+Residential Road near,highway,residential,near,N 
+Residential Roads near,highway,residential,near,Y 
+Rest Area,highway,rest_area,-,N 
+Rest Stop,highway,rest_area,-,N 
+Road,highway,road,-,N 
+Roads,highway,road,-,Y 
+Road in,highway,road,in,N 
+Roads in,highway,road,in,Y 
+Road near,highway,road,near,N 
+Roads near,highway,road,near,Y 
+Secondary Road,highway,secondary,-,N 
+Secondary Roads,highway,secondary,-,Y 
+Secondary Road in,highway,secondary,in,N 
+Secondary Roads in,highway,secondary,in,Y 
+Secondary Road near,highway,secondary,near,N 
+Secondary Roads near,highway,secondary,near,Y 
+Secondary Road,highway,secondary_link,-,N 
+Secondary Roads,highway,secondary_link,-,Y 
+Secondary Road in,highway,secondary_link,in,N 
+Secondary Roads in,highway,secondary_link,in,Y 
+Secondary Road near,highway,secondary_link,near,N 
+Secondary Roads near,highway,secondary_link,near,Y 
+Service Road,highway,service,-,N 
+Service Roads,highway,service,-,Y 
+Service Road in,highway,service,in,N 
+Service Roads in,highway,service,in,Y 
+Service Road near,highway,service,near,N 
+Service Roads near,highway,service,near,Y 
+Service Area,highway,services,-,N 
+Motorway Services,highway,services,-,N 
+Motorway Services,highway,services,-,Y 
+Motorway Services in,highway,services,in,N 
+Motorway Services in,highway,services,in,Y 
+Motorway Services near,highway,services,near,N 
+Motorway Services near,highway,services,near,Y 
+Steps,highway,steps,-,N 
+Steps,highway,steps,-,Y 
+Steps in,highway,steps,in,N 
+Steps in,highway,steps,in,Y 
+Steps near,highway,steps,near,N 
+Steps near,highway,steps,near,Y 
+Stile,highway,stile,-,N 
+Stiles,highway,stile,-,Y 
+Stile in,highway,stile,in,N 
+Stiles in,highway,stile,in,Y 
+Stile near,highway,stile,near,N 
+Stiles near,highway,stile,near,Y 
+Tertiary Road,highway,tertiary,-,N 
+Tertiary Roads,highway,tertiary,-,Y 
+Tertiary Road in,highway,tertiary,in,N 
+Tertiary Roads in,highway,tertiary,in,Y 
+Tertiary Road near,highway,tertiary,near,N 
+Tertiary Roads near,highway,tertiary,near,Y 
+Track,highway,track,-,N 
+Tracks,highway,track,-,Y 
+Track in,highway,track,in,N 
+Tracks in,highway,track,in,Y 
+Track near,highway,track,near,N 
+Tracks near,highway,track,near,Y 
+Trail,highway,trail,-,N 
+Trails,highway,trail,-,Y 
+Trail in,highway,trail,in,N 
+Trails in,highway,trail,in,Y 
+Trail near,highway,trail,near,N 
+Trails near,highway,trail,near,Y 
+Trunk Road,highway,trunk,-,N 
+Trunk Roads,highway,trunk,-,Y 
+Trunk Road in,highway,trunk,in,N 
+Trunk Roads in,highway,trunk,in,Y 
+Trunk Road near,highway,trunk,near,N 
+Trunk Roads near,highway,trunk,near,Y 
+Trunk Road,highway,trunk_link,-,N 
+Trunk Roads,highway,trunk_link,-,Y 
+Trunk Road in,highway,trunk_link,in,N 
+Trunk Roads in,highway,trunk_link,in,Y 
+Trunk Road near,highway,trunk_link,near,N 
+Trunk Roads near,highway,trunk_link,near,Y 
+Unclassified Road,highway,unclassified,-,N 
+Unclassified Roads,highway,unclassified,-,Y 
+Unclassified Road in,highway,unclassified,in,N 
+Unclassified Roads in,highway,unclassified,in,Y 
+Unclassified Road near,highway,unclassified,near,N 
+Unclassified Roads near,highway,unclassified,near,Y 
+Unsurfaced Road,highway,unsurfaced,-,N 
+Unsurfaced Roads,highway,unsurfaced,-,Y 
+Unsurfaced Road in,highway,unsurfaced,in,N 
+Unsurfaced Roads in,highway,unsurfaced,in,Y 
+Unsurfaced Road near,highway,unsurfaced,near,N 
+Unsurfaced Roads near,highway,unsurfaced,near,Y 
+Archaeological Site,historic,archaeological_site,-,N 
+Archaeological Sites,historic,archaeological_site,-,Y 
+Archaeological Site in,historic,archaeological_site,in,N 
+Archaeological Sites in,historic,archaeological_site,in,Y 
+Archaeological Site near,historic,archaeological_site,near,N 
+Archaeological Sites near,historic,archaeological_site,near,Y 
+Battlefield,historic,battlefield,-,N 
+Battlefields,historic,battlefield,-,Y 
+Battlefield in,historic,battlefield,in,N 
+Battlefields in,historic,battlefield,in,Y 
+Battlefield near,historic,battlefield,near,N 
+Battlefields near,historic,battlefield,near,Y 
+Boundary Stone,historic,boundary_stone,-,N 
+Boundary Stones,historic,boundary_stone,-,Y 
+Boundary Stone in,historic,boundary_stone,in,N 
+Boundary Stones in,historic,boundary_stone,in,Y 
+Boundary Stone near,historic,boundary_stone,near,N 
+Boundary Stones near,historic,boundary_stone,near,Y 
+Historic Building,historic,building,-,N 
+Historic Buildings,historic,building,-,Y 
+Historic Building in,historic,building,in,N 
+Historic Buildings in,historic,building,in,Y 
+Historic Building near,historic,building,near,N 
+Historic Buildings near,historic,building,near,Y 
+Castle,historic,castle,-,N 
+Castles,historic,castle,-,Y 
+Castle in,historic,castle,in,N 
+Castles in,historic,castle,in,Y 
+Castle near,historic,castle,near,N 
+Castles near,historic,castle,near,Y 
+Manor,historic,manor,-,N 
+Manors,historic,manor,-,Y 
+Manor in,historic,manor,in,N 
+Manors in,historic,manor,in,Y 
+Manor near,historic,manor,near,N 
+Manors near,historic,manor,near,Y 
+Memorial,historic,memorial,-,N 
+Memorials,historic,memorial,-,Y 
+Memorial in,historic,memorial,in,N 
+Memorials in,historic,memorial,in,Y 
+Memorial near,historic,memorial,near,N 
+Memorials near,historic,memorial,near,Y 
+Mine,historic,mine,-,N 
+Mines,historic,mine,-,Y 
+Mine in,historic,mine,in,N 
+Mines in,historic,mine,in,Y 
+Mine near,historic,mine,near,N 
+Mines near,historic,mine,near,Y 
+Monument,historic,monument,-,N 
+Monuments,historic,monument,-,Y 
+Monument in,historic,monument,in,N 
+Monuments in,historic,monument,in,Y 
+Monument near,historic,monument,near,N 
+Monuments near,historic,monument,near,Y 
+Ruin,historic,ruins,-,N 
+Ruins,historic,ruins,-,Y 
+Ruin in,historic,ruins,in,N 
+Ruins in,historic,ruins,in,Y 
+Ruin near,historic,ruins,near,N 
+Ruins near,historic,ruins,near,Y 
+Wayside Cross,historic,wayside_cross,-,N 
+Wayside Crosses,historic,wayside_cross,-,Y 
+Wayside Cross in,historic,wayside_cross,in,N 
+Wayside Crosses in,historic,wayside_cross,in,Y 
+Wayside Cross near,historic,wayside_cross,near,N 
+Wayside Crosses near,historic,wayside_cross,near,Y 
+Wayside Shrine,historic,wayside_shrine,-,N 
+Wayside Shrines,historic,wayside_shrine,-,Y 
+Wayside Shrine in,historic,wayside_shrine,in,N 
+Wayside Shrines in,historic,wayside_shrine,in,Y 
+Wayside Shrine near,historic,wayside_shrine,near,N 
+Wayside Shrines near,historic,wayside_shrine,near,Y 
+Wreck,historic,wreck,-,N 
+Wrecks,historic,wreck,-,Y 
+Wreck in,historic,wreck,in,N 
+Wrecks in,historic,wreck,in,Y 
+Wreck near,historic,wreck,near,N 
+Wrecks near,historic,wreck,near,Y 
+Allotment,landuse,allotments,-,N 
+Allotments,landuse,allotments,-,Y 
+Allotment in,landuse,allotments,in,N 
+Allotments in,landuse,allotments,in,Y 
+Allotment near,landuse,allotments,near,N 
+Allotments near,landuse,allotments,near,Y 
+Roundabout,junction,roundabout,-,N 
+Roundabouts,junction,roundabout,-,Y 
+Roundabout in,junction,roundabout,in,N 
+Roundabouts in,junction,roundabout,in,Y 
+Roundabout near,junction,roundabout,near,N 
+Roundabouts near,junction,roundabout,near,Y 
+Basin,landuse,basin,-,N 
+Basins,landuse,basin,-,Y 
+Basin in,landuse,basin,in,N 
+Basins in,landuse,basin,in,Y 
+Basin near,landuse,basin,near,N 
+Basins near,landuse,basin,near,Y 
+Brownfield Land,landuse,brownfield,-,N 
+Brownfield Lands,landuse,brownfield,-,Y 
+Brownfield Land in,landuse,brownfield,in,N 
+Brownfield Lands in,landuse,brownfield,in,Y 
+Brownfield Land near,landuse,brownfield,near,N 
+Brownfield Lands near,landuse,brownfield,near,Y 
+Cemetery,landuse,cemetery,-,N 
+Cemeterys,landuse,cemetery,-,Y 
+Cemeteries,landuse,cemetery,-,Y 
+Cemetery in,landuse,cemetery,in,N 
+Cemeterys in,landuse,cemetery,in,Y 
+Cemeteries in,landuse,cemetery,in,Y 
+Cemetery near,landuse,cemetery,near,N 
+Cemeterys near,landuse,cemetery,near,Y 
+Cemeteries near,landuse,cemetery,near,Y 
+Commercial Area,landuse,commercial,-,N 
+Commercial Areas,landuse,commercial,-,Y 
+Commercial Area in,landuse,commercial,in,N 
+Commercial Areas in,landuse,commercial,in,Y 
+Commercial Area near,landuse,commercial,near,N 
+Commercial Areas near,landuse,commercial,near,Y 
+Conservation,landuse,conservation,-,N 
+Conservations,landuse,conservation,-,Y 
+Conservation in,landuse,conservation,in,N 
+Conservations in,landuse,conservation,in,Y 
+Conservation near,landuse,conservation,near,N 
+Conservations near,landuse,conservation,near,Y 
+Construction,landuse,construction,-,N 
+Constructions,landuse,construction,-,Y 
+Construction in,landuse,construction,in,N 
+Constructions in,landuse,construction,in,Y 
+Construction near,landuse,construction,near,N 
+Constructions near,landuse,construction,near,Y 
+Farm,landuse,farm,-,N 
+Farms,landuse,farm,-,Y 
+Farm in,landuse,farm,in,N 
+Farms in,landuse,farm,in,Y 
+Farm near,landuse,farm,near,N 
+Farms near,landuse,farm,near,Y 
+Farmland,landuse,farmland,-,N 
+Farmlands,landuse,farmland,-,Y 
+Farmland in,landuse,farmland,in,N 
+Farmlands in,landuse,farmland,in,Y 
+Farmland near,landuse,farmland,near,N 
+Farmlands near,landuse,farmland,near,Y 
+Farmyard,landuse,farmyard,-,N 
+Farmyards,landuse,farmyard,-,Y 
+Farmyard in,landuse,farmyard,in,N 
+Farmyards in,landuse,farmyard,in,Y 
+Farmyard near,landuse,farmyard,near,N 
+Farmyards near,landuse,farmyard,near,Y 
+Forest,landuse,forest,-,N 
+Forests,landuse,forest,-,Y 
+Forest in,landuse,forest,in,N 
+Forests in,landuse,forest,in,Y 
+Forest near,landuse,forest,near,N 
+Forests near,landuse,forest,near,Y 
+Wood,landuse,forest,-,N 
+Woods,landuse,forest,-,Y 
+Wood in,landuse,forest,in,N 
+Woods in,landuse,forest,in,Y 
+Wood near,landuse,forest,near,N 
+Woods near,landuse,forest,near,Y 
+Grass,landuse,grass,-,N 
+Grasses,landuse,grass,-,Y 
+Grass in,landuse,grass,in,N 
+Grasses in,landuse,grass,in,Y 
+Grass near,landuse,grass,near,N 
+Grasses near,landuse,grass,near,Y 
+Greenfield Land,landuse,greenfield,-,N 
+Greenfield Lands,landuse,greenfield,-,Y 
+Greenfield Land in,landuse,greenfield,in,N 
+Greenfield Lands in,landuse,greenfield,in,Y 
+Greenfield Land near,landuse,greenfield,near,N 
+Greenfield Lands near,landuse,greenfield,near,Y 
+Industrial Area,landuse,industrial,-,N 
+Industrial Areas,landuse,industrial,-,Y 
+Industrial Area in,landuse,industrial,in,N 
+Industrial Areas in,landuse,industrial,in,Y 
+Industrial Area near,landuse,industrial,near,N 
+Industrial Areas near,landuse,industrial,near,Y 
+Landfill,landuse,landfill,-,N 
+Landfills,landuse,landfill,-,Y 
+Landfill in,landuse,landfill,in,N 
+Landfills in,landuse,landfill,in,Y 
+Landfill near,landuse,landfill,near,N 
+Landfills near,landuse,landfill,near,Y 
+Meadow,landuse,meadow,-,N 
+Meadows,landuse,meadow,-,Y 
+Meadow in,landuse,meadow,in,N 
+Meadows in,landuse,meadow,in,Y 
+Meadow near,landuse,meadow,near,N 
+Meadows near,landuse,meadow,near,Y 
+Military Area,landuse,military,-,N 
+Military Areas,landuse,military,-,Y 
+Military Area in,landuse,military,in,N 
+Military Areas in,landuse,military,in,Y 
+Military Area near,landuse,military,near,N 
+Military Areas near,landuse,military,near,Y 
+Piste,landuse,piste,-,N 
+Pistes,landuse,piste,-,Y 
+Piste in,landuse,piste,in,N 
+Pistes in,landuse,piste,in,Y 
+Piste near,landuse,piste,near,N 
+Pistes near,landuse,piste,near,Y 
+Quarry,landuse,quarry,-,N 
+Quarrys,landuse,quarry,-,Y 
+Quarries,landuse,quarry,-,Y 
+Quarry in,landuse,quarry,in,N 
+Quarrys in,landuse,quarry,in,Y 
+Quarries in,landuse,quarry,in,Y 
+Quarry near,landuse,quarry,near,N 
+Quarrys near,landuse,quarry,near,Y 
+Quarries near,landuse,quarry,near,Y 
+Railway,landuse,railway,-,N 
+Railways,landuse,railway,-,Y 
+Railway in,landuse,railway,in,N 
+Railways in,landuse,railway,in,Y 
+Railway near,landuse,railway,near,N 
+Railways near,landuse,railway,near,Y 
+Recreation Ground,landuse,recreation_ground,-,N 
+Recreation Grounds,landuse,recreation_ground,-,Y 
+Recreation Ground in,landuse,recreation_ground,in,N 
+Recreation Grounds in,landuse,recreation_ground,in,Y 
+Recreation Ground near,landuse,recreation_ground,near,N 
+Recreation Grounds near,landuse,recreation_ground,near,Y 
+Reservoir,landuse,reservoir,-,N 
+Reservoirs,landuse,reservoir,-,Y 
+Reservoir in,landuse,reservoir,in,N 
+Reservoirs in,landuse,reservoir,in,Y 
+Reservoir near,landuse,reservoir,near,N 
+Reservoirs near,landuse,reservoir,near,Y 
+Residential Area,landuse,residential,-,N 
+Residential Areas,landuse,residential,-,Y 
+Residential Area in,landuse,residential,in,N 
+Residential Areas in,landuse,residential,in,Y 
+Residential Area near,landuse,residential,near,N 
+Residential Areas near,landuse,residential,near,Y 
+Retail,landuse,retail,-,N 
+Retails,landuse,retail,-,Y 
+Retail in,landuse,retail,in,N 
+Retails in,landuse,retail,in,Y 
+Retail near,landuse,retail,near,N 
+Retails near,landuse,retail,near,Y 
+Village Green,landuse,village_green,-,N 
+Village Greens,landuse,village_green,-,Y 
+Village Green in,landuse,village_green,in,N 
+Village Greens in,landuse,village_green,in,Y 
+Village Green near,landuse,village_green,near,N 
+Village Greens near,landuse,village_green,near,Y 
+Vineyard,landuse,vineyard,-,N 
+Vineyards,landuse,vineyard,-,Y 
+Vineyard in,landuse,vineyard,in,N 
+Vineyards in,landuse,vineyard,in,Y 
+Vineyard near,landuse,vineyard,near,N 
+Vineyards near,landuse,vineyard,near,Y 
+Beach Resort,leisure,beach_resort,-,N 
+Beach Resorts,leisure,beach_resort,-,Y 
+Beach Resort in,leisure,beach_resort,in,N 
+Beach Resorts in,leisure,beach_resort,in,Y 
+Beach Resort near,leisure,beach_resort,near,N 
+Beach Resorts near,leisure,beach_resort,near,Y 
+Common Land,leisure,common,-,N 
+Common Lands,leisure,common,-,Y 
+Common Land in,leisure,common,in,N 
+Common Lands in,leisure,common,in,Y 
+Common Land near,leisure,common,near,N 
+Common Lands near,leisure,common,near,Y 
+Fishing Area,leisure,fishing,-,N 
+Fishing Areas,leisure,fishing,-,Y 
+Fishing Area in,leisure,fishing,in,N 
+Fishing Areas in,leisure,fishing,in,Y 
+Fishing Area near,leisure,fishing,near,N 
+Fishing Areas near,leisure,fishing,near,Y 
+Garden,leisure,garden,-,N 
+Gardens,leisure,garden,-,Y 
+Garden in,leisure,garden,in,N 
+Gardens in,leisure,garden,in,Y 
+Garden near,leisure,garden,near,N 
+Gardens near,leisure,garden,near,Y 
+Golf Course,leisure,golf_course,-,N 
+Golf Courses,leisure,golf_course,-,Y 
+Golf Course in,leisure,golf_course,in,N 
+Golf Courses in,leisure,golf_course,in,Y 
+Golf Course near,leisure,golf_course,near,N 
+Golf Courses near,leisure,golf_course,near,Y 
+Hackerspace,leisure,hackerspace,-,N 
+Hackerspaces,leisure,hackerspace,-,Y 
+Hackerspace in,leisure,hackerspace,in,N 
+Hackerspaces in,leisure,hackerspace,in,Y 
+Hackerspace near,leisure,hackerspace,near,N 
+Hackerspaces near,leisure,hackerspace,near,Y 
+Ice Rink,leisure,ice_rink,-,N 
+Ice Rinks,leisure,ice_rink,-,Y 
+Ice Rink in,leisure,ice_rink,in,N 
+Ice Rinks in,leisure,ice_rink,in,Y 
+Ice Rink near,leisure,ice_rink,near,N 
+Ice Rinks near,leisure,ice_rink,near,Y 
+Marina,leisure,marina,-,N 
+Marinas,leisure,marina,-,Y 
+Marina in,leisure,marina,in,N 
+Marinas in,leisure,marina,in,Y 
+Marina near,leisure,marina,near,N 
+Marinas near,leisure,marina,near,Y 
+Miniature Golf,leisure,miniature_golf,-,N 
+Miniature Golfs,leisure,miniature_golf,-,Y 
+Miniature Golf in,leisure,miniature_golf,in,N 
+Miniature Golfs in,leisure,miniature_golf,in,Y 
+Miniature Golf near,leisure,miniature_golf,near,N 
+Miniature Golfs near,leisure,miniature_golf,near,Y 
+Nature Reserve,leisure,nature_reserve,-,N 
+Nature Reserves,leisure,nature_reserve,-,Y 
+Nature Reserve in,leisure,nature_reserve,in,N 
+Nature Reserves in,leisure,nature_reserve,in,Y 
+Nature Reserve near,leisure,nature_reserve,near,N 
+Nature Reserves near,leisure,nature_reserve,near,Y 
+Park,leisure,park,-,N 
+Parks,leisure,park,-,Y 
+Park in,leisure,park,in,N 
+Parks in,leisure,park,in,Y 
+Park near,leisure,park,near,N 
+Parks near,leisure,park,near,Y 
+Sports Pitch,leisure,pitch,-,N 
+Sports Pitches,leisure,pitch,-,Y 
+Sports Pitch in,leisure,pitch,in,N 
+Sports Pitches in,leisure,pitch,in,Y 
+Sports Pitch near,leisure,pitch,near,N 
+Sports Pitches near,leisure,pitch,near,Y 
+Playground,leisure,playground,-,N 
+Playgrounds,leisure,playground,-,Y 
+Playground in,leisure,playground,in,N 
+Playgrounds in,leisure,playground,in,Y 
+Playground near,leisure,playground,near,N 
+Playgrounds near,leisure,playground,near,Y 
+Recreation Ground,leisure,recreation_ground,-,N 
+Recreation Grounds,leisure,recreation_ground,-,Y 
+Recreation Ground in,leisure,recreation_ground,in,N 
+Recreation Grounds in,leisure,recreation_ground,in,Y 
+Recreation Ground near,leisure,recreation_ground,near,N 
+Recreation Grounds near,leisure,recreation_ground,near,Y 
+Slipway,leisure,slipway,-,N 
+Slipways,leisure,slipway,-,Y 
+Slipway in,leisure,slipway,in,N 
+Slipways in,leisure,slipway,in,Y 
+Slipway near,leisure,slipway,near,N 
+Slipways near,leisure,slipway,near,Y 
+Sports Centre,leisure,sports_centre,-,N 
+Sports Centres,leisure,sports_centre,-,Y 
+Sports Centre in,leisure,sports_centre,in,N 
+Sports Centres in,leisure,sports_centre,in,Y 
+Sports Centre near,leisure,sports_centre,near,N 
+Sports Centres near,leisure,sports_centre,near,Y 
+Sports Centre,leisure,sports_centre,-,N 
+Sports Centers,leisure,sports_centre,-,Y 
+Sports Center in,leisure,sports_centre,in,N 
+Sports Centers in,leisure,sports_centre,in,Y 
+Sports Center near,leisure,sports_centre,near,N 
+Sports Centers near,leisure,sports_centre,near,Y 
+Stadium,leisure,stadium,-,N 
+Stadiums,leisure,stadium,-,Y 
+Stadium in,leisure,stadium,in,N 
+Stadiums in,leisure,stadium,in,Y 
+Stadium near,leisure,stadium,near,N 
+Stadiums near,leisure,stadium,near,Y 
+Swimming Pool,leisure,swimming_pool,-,N 
+Swimming Pools,leisure,swimming_pool,-,Y 
+Swimming Pool in,leisure,swimming_pool,in,N 
+Swimming Pools in,leisure,swimming_pool,in,Y 
+Swimming Pool near,leisure,swimming_pool,near,N 
+Swimming Pools near,leisure,swimming_pool,near,Y 
+Running Track,leisure,track,-,N 
+Running Tracks,leisure,track,-,Y 
+Running Track in,leisure,track,in,N 
+Running Tracks in,leisure,track,in,Y 
+Running Track near,leisure,track,near,N 
+Running Tracks near,leisure,track,near,Y 
+Water Park,leisure,water_park,-,N 
+Water Parks,leisure,water_park,-,Y 
+Water Park in,leisure,water_park,in,N 
+Water Parks in,leisure,water_park,in,Y 
+Water Park near,leisure,water_park,near,N 
+Water Parks near,leisure,water_park,near,Y 
+Water well,man_made,water_well,-,N 
+Water wells,man_made,water_well,-,Y 
+Water well in,man_made,water_well,in,N 
+Water wells in,man_made,water_well,in,Y 
+Water well near,man_made,water_well,near,N 
+Water wells near,man_made,water_well,near,Y 
+Windmill,man_made,windmill,-,N 
+Windmills,man_made,windmill,-,Y 
+Windmill in,man_made,windmill,in,N 
+Windmills in,man_made,windmill,in,Y 
+Windmill near,man_made,windmill,near,N 
+Windmills near,man_made,windmill,near,Y 
+Maypole,man_made,maypole,-,N 
+Maypoles,man_made,maypole,-,Y 
+Maypole in,man_made,maypole,in,N 
+Maypoles in,man_made,maypole,in,Y 
+Maypole near,man_made,maypole,near,N 
+Maypoles near,man_made,maypole,near,Y 
+Plaque,memorial,plaque,-,N 
+Plaques,memorial,plaque,-,Y 
+Plaque in,memorial,plaque,in,N 
+Plaques in,memorial,plaque,in,Y 
+Plaque near,memorial,plaque,near,N 
+Plaques near,memorial,plaque,near,Y 
+Statue,memorial,statue,-,N 
+Statues,memorial,statue,-,Y 
+Statue in,memorial,statue,in,N 
+Statues in,memorial,statue,in,Y 
+Statue near,memorial,statue,near,N 
+Statues near,memorial,statue,near,Y 
+Stolperstein,memorial,stolperstein,-,N 
+Stolpersteins,memorial,stolperstein,-,Y 
+Stolpersteine,memorial,stolperstein,-,Y 
+Stolperstein in,memorial,stolperstein,in,N 
+Stolpersteins in,memorial,stolperstein,in,Y 
+Stolpersteine in,memorial,stolperstein,in,Y 
+Stolperstein near,memorial,stolperstein,near,N 
+Stolpersteins near,memorial,stolperstein,near,Y 
+Stolpersteine near,memorial,stolperstein,near,Y 
+War Memorial,memorial,war_memorial,-,N 
+War Memorials,memorial,war_memorial,-,Y 
+War Memorial in,memorial,war_memorial,in,N 
+War Memorials in,memorial,war_memorial,in,Y 
+War Memorial near,memorial,war_memorial,near,N 
+War Memorials near,memorial,war_memorial,near,Y 
+Bay,natural,bay,-,N 
+Bays,natural,bay,-,Y 
+Bay in,natural,bay,in,N 
+Bays in,natural,bay,in,Y 
+Bay near,natural,bay,near,N 
+Bays near,natural,bay,near,Y 
+Beach,natural,beach,-,N 
+Beachs,natural,beach,-,Y 
+Beaches,natural,beach,-,Y 
+Beach in,natural,beach,in,N 
+Beachs in,natural,beach,in,Y 
+Beaches in,natural,beach,in,Y 
+Beach near,natural,beach,near,N 
+Beachs near,natural,beach,near,Y 
+Beaches near,natural,beach,near,Y 
+Cape,natural,cape,-,N 
+Capes,natural,cape,-,Y 
+Cape in,natural,cape,in,N 
+Capes in,natural,cape,in,Y 
+Cape near,natural,cape,near,N 
+Capes near,natural,cape,near,Y 
+Cave Entrance,natural,cave_entrance,-,N 
+Cave Entrances,natural,cave_entrance,-,Y 
+Cave Entrance in,natural,cave_entrance,in,N 
+Cave Entrances in,natural,cave_entrance,in,Y 
+Cave Entrance near,natural,cave_entrance,near,N 
+Cave Entrances near,natural,cave_entrance,near,Y 
+Cliff,natural,cliff,-,N 
+Cliffs,natural,cliff,-,Y 
+Cliff in,natural,cliff,in,N 
+Cliffs in,natural,cliff,in,Y 
+Cliff near,natural,cliff,near,N 
+Cliffs near,natural,cliff,near,Y 
+Coastline,natural,coastline,-,N 
+Coastlines,natural,coastline,-,Y 
+Coastline in,natural,coastline,in,N 
+Coastlines in,natural,coastline,in,Y 
+Coastline near,natural,coastline,near,N 
+Coastlines near,natural,coastline,near,Y 
+Desert,natural,desert,-,N 
+Deserts,natural,desert,-,Y 
+Desert in,natural,desert,in,N 
+Deserts in,natural,desert,in,Y 
+Desert near,natural,desert,near,N 
+Deserts near,natural,desert,near,Y 
+Fell,natural,fell,-,N 
+Fells,natural,fell,-,Y 
+Fell in,natural,fell,in,N 
+Fells in,natural,fell,in,Y 
+Fell near,natural,fell,near,N 
+Fells near,natural,fell,near,Y 
+Glacier,natural,glacier,-,N 
+Glaciers,natural,glacier,-,Y 
+Glacier in,natural,glacier,in,N 
+Glaciers in,natural,glacier,in,Y 
+Glacier near,natural,glacier,near,N 
+Glaciers near,natural,glacier,near,Y 
+Heath,natural,heath,-,N 
+Heaths,natural,heath,-,Y 
+Heath in,natural,heath,in,N 
+Heaths in,natural,heath,in,Y 
+Heath near,natural,heath,near,N 
+Heaths near,natural,heath,near,Y 
+Land,natural,land,-,N 
+Lands,natural,land,-,Y 
+Land in,natural,land,in,N 
+Lands in,natural,land,in,Y 
+Land near,natural,land,near,N 
+Lands near,natural,land,near,Y 
+Marsh,natural,marsh,-,N 
+Marshs,natural,marsh,-,Y 
+Marshes,natural,marsh,-,Y 
+Marsh in,natural,marsh,in,N 
+Marshs in,natural,marsh,in,Y 
+Marshes in,natural,marsh,in,Y 
+Marsh near,natural,marsh,near,N 
+Marshs near,natural,marsh,near,Y 
+Marshes near,natural,marsh,near,Y 
+Moor,natural,moor,-,N 
+Moors,natural,moor,-,Y 
+Moor in,natural,moor,in,N 
+Moors in,natural,moor,in,Y 
+Moor near,natural,moor,near,N 
+Moors near,natural,moor,near,Y 
+Mud,natural,mud,-,N 
+Muds,natural,mud,-,Y 
+Mud in,natural,mud,in,N 
+Muds in,natural,mud,in,Y 
+Mud near,natural,mud,near,N 
+Muds near,natural,mud,near,Y 
+Peak,natural,peak,-,N 
+Peaks,natural,peak,-,Y 
+Peak in,natural,peak,in,N 
+Peaks in,natural,peak,in,Y 
+Peak near,natural,peak,near,N 
+Peaks near,natural,peak,near,Y 
+Reef,natural,reef,-,N 
+Reefs,natural,reef,-,Y 
+Reef in,natural,reef,in,N 
+Reefs in,natural,reef,in,Y 
+Reef near,natural,reef,near,N 
+Reefs near,natural,reef,near,Y 
+Ridge,natural,ridge,-,N 
+Ridges,natural,ridge,-,Y 
+Ridge in,natural,ridge,in,N 
+Ridges in,natural,ridge,in,Y 
+Ridge near,natural,ridge,near,N 
+Ridges near,natural,ridge,near,Y 
+Rock,natural,rock,-,N 
+Rocks,natural,rock,-,Y 
+Rock in,natural,rock,in,N 
+Rocks in,natural,rock,in,Y 
+Rock near,natural,rock,near,N 
+Rocks near,natural,rock,near,Y 
+Scree,natural,scree,-,N 
+Screes,natural,scree,-,Y 
+Scree in,natural,scree,in,N 
+Screes in,natural,scree,in,Y 
+Scree near,natural,scree,near,N 
+Screes near,natural,scree,near,Y 
+Scrub,natural,scrub,-,N 
+Scrubs,natural,scrub,-,Y 
+Scrub in,natural,scrub,in,N 
+Scrubs in,natural,scrub,in,Y 
+Scrub near,natural,scrub,near,N 
+Scrubs near,natural,scrub,near,Y 
+Shoal,natural,shoal,-,N 
+Shoals,natural,shoal,-,Y 
+Shoal in,natural,shoal,in,N 
+Shoals in,natural,shoal,in,Y 
+Shoal near,natural,shoal,near,N 
+Shoals near,natural,shoal,near,Y 
+Spring,natural,spring,-,N 
+Springs,natural,spring,-,Y 
+Spring in,natural,spring,in,N 
+Springs in,natural,spring,in,Y 
+Spring near,natural,spring,near,N 
+Springs near,natural,spring,near,Y 
+Tree,natural,tree,-,N 
+Trees,natural,tree,-,Y 
+Tree in,natural,tree,in,N 
+Trees in,natural,tree,in,Y 
+Tree near,natural,tree,near,N 
+Trees near,natural,tree,near,Y 
+Valley,natural,valley,-,N 
+Valleys,natural,valley,-,Y 
+Valley in,natural,valley,in,N 
+Valleys in,natural,valley,in,Y 
+Valley near,natural,valley,near,N 
+Valleys near,natural,valley,near,Y 
+Volcano,natural,volcano,-,N 
+Volcanos,natural,volcano,-,Y 
+Volcano in,natural,volcano,in,N 
+Volcanos in,natural,volcano,in,Y 
+Volcano near,natural,volcano,near,N 
+Volcanos near,natural,volcano,near,Y 
+Water,natural,water,-,N 
+Waters,natural,water,-,Y 
+Water in,natural,water,in,N 
+Waters in,natural,water,in,Y 
+Water near,natural,water,near,N 
+Waters near,natural,water,near,Y 
+Wetland,natural,wetland,-,N 
+Wetlands,natural,wetland,-,Y 
+Wetland in,natural,wetland,in,N 
+Wetlands in,natural,wetland,in,Y 
+Wetland near,natural,wetland,near,N 
+Wetlands near,natural,wetland,near,Y 
+Wood,natural,wood,-,N 
+Woods,natural,wood,-,Y 
+Wood in,natural,wood,in,N 
+Woods in,natural,wood,in,Y 
+Wood near,natural,wood,near,N 
+Woods near,natural,wood,near,Y 
+City,place,city,-,N 
+Citys,place,city,-,Y 
+Cities,place,city,-,Y 
+City in,place,city,in,N 
+Citys in,place,city,in,Y 
+Cities in,place,city,in,Y 
+City near,place,city,near,N 
+Citys near,place,city,near,Y 
+Cities near,place,city,near,Y 
+Country,place,country,-,N 
+Countrys,place,country,-,Y 
+Countries,place,country,-,Y 
+Country in,place,country,in,N 
+Countrys in,place,country,in,Y 
+Countries in,place,country,in,Y 
+Country near,place,country,near,N 
+Countrys near,place,country,near,Y 
+Countries near,place,country,near,Y 
+County,place,county,-,N 
+Countys,place,county,-,Y 
+Counties,place,county,-,Y 
+County in,place,county,in,N 
+Countys in,place,county,in,Y 
+Counties in,place,county,in,Y 
+County near,place,county,near,N 
+Countys near,place,county,near,Y 
+Counties near,place,county,near,Y 
+Farm,place,farm,-,N 
+Farms,place,farm,-,Y 
+Farm in,place,farm,in,N 
+Farms in,place,farm,in,Y 
+Farm near,place,farm,near,N 
+Farms near,place,farm,near,Y 
+Hamlet,place,hamlet,-,N 
+Hamlets,place,hamlet,-,Y 
+Hamlet in,place,hamlet,in,N 
+Hamlets in,place,hamlet,in,Y 
+Hamlet near,place,hamlet,near,N 
+Hamlets near,place,hamlet,near,Y 
+Houses,place,houses,-,N 
+Houses,place,houses,-,Y 
+Houses in,place,houses,in,N 
+Houses in,place,houses,in,Y 
+Houses near,place,houses,near,N 
+Houses near,place,houses,near,Y 
+Island,place,island,-,N 
+Islands,place,island,-,Y 
+Island in,place,island,in,N 
+Islands in,place,island,in,Y 
+Island near,place,island,near,N 
+Islands near,place,island,near,Y 
+Islet,place,islet,-,N 
+Islets,place,islet,-,Y 
+Islet in,place,islet,in,N 
+Islets in,place,islet,in,Y 
+Islet near,place,islet,near,N 
+Islets near,place,islet,near,Y 
+Locality,place,locality,-,N 
+Localitys,place,locality,-,Y 
+Localities,place,locality,-,Y 
+Locality in,place,locality,in,N 
+Localitys in,place,locality,in,Y 
+Localities in,place,locality,in,Y 
+Locality near,place,locality,near,N 
+Localitys near,place,locality,near,Y 
+Localities near,place,locality,near,Y 
+Municipality,place,municipality,-,N 
+Municipalitys,place,municipality,-,Y 
+Municipalities,place,municipality,-,Y 
+Municipality in,place,municipality,in,N 
+Municipalitys in,place,municipality,in,Y 
+Municipalities in,place,municipality,in,Y 
+Municipality near,place,municipality,near,N 
+Municipalitys near,place,municipality,near,Y 
+Municipalities near,place,municipality,near,Y 
+Region,place,region,-,N 
+Regions,place,region,-,Y 
+Region in,place,region,in,N 
+Regions in,place,region,in,Y 
+Region near,place,region,near,N 
+Regions near,place,region,near,Y 
+Sea,place,sea,-,N 
+Seas,place,sea,-,Y 
+Sea in,place,sea,in,N 
+Seas in,place,sea,in,Y 
+Sea near,place,sea,near,N 
+Seas near,place,sea,near,Y 
+State,place,state,-,N 
+States,place,state,-,Y 
+State in,place,state,in,N 
+States in,place,state,in,Y 
+State near,place,state,near,N 
+States near,place,state,near,Y 
+Suburb,place,suburb,-,N 
+Suburbs,place,suburb,-,Y 
+Suburb in,place,suburb,in,N 
+Suburbs in,place,suburb,in,Y 
+Suburb near,place,suburb,near,N 
+Suburbs near,place,suburb,near,Y 
+Town,place,town,-,N 
+Towns,place,town,-,Y 
+Town in,place,town,in,N 
+Towns in,place,town,in,Y 
+Town near,place,town,near,N 
+Towns near,place,town,near,Y 
+Village,place,village,-,N 
+Villages,place,village,-,Y 
+Village in,place,village,in,N 
+Villages in,place,village,in,Y 
+Village near,place,village,near,N 
+Villages near,place,village,near,Y 
+Abandoned Railway,railway,abandoned,-,N 
+Abandoned Railways,railway,abandoned,-,Y 
+Abandoned Railway in,railway,abandoned,in,N 
+Abandoned Railways in,railway,abandoned,in,Y 
+Abandoned Railway near,railway,abandoned,near,N 
+Abandoned Railways near,railway,abandoned,near,Y 
+Railway under Construction,railway,construction,-,N 
+Railway under Constructions,railway,construction,-,Y 
+Railway under Construction in,railway,construction,in,N 
+Railway under Constructions in,railway,construction,in,Y 
+Railway under Construction near,railway,construction,near,N 
+Railway under Constructions near,railway,construction,near,Y 
+Disused Railway,railway,disused,-,N 
+Disused Railways,railway,disused,-,Y 
+Disused Railway in,railway,disused,in,N 
+Disused Railways in,railway,disused,in,Y 
+Disused Railway near,railway,disused,near,N 
+Disused Railways near,railway,disused,near,Y 
+Funicular Railway,railway,funicular,-,N 
+Funicular Railways,railway,funicular,-,Y 
+Funicular Railway in,railway,funicular,in,N 
+Funicular Railways in,railway,funicular,in,Y 
+Funicular Railway near,railway,funicular,near,N 
+Funicular Railways near,railway,funicular,near,Y 
+Train Stop,railway,halt,-,N 
+Train Stops,railway,halt,-,Y 
+Train Stop in,railway,halt,in,N 
+Train Stops in,railway,halt,in,Y 
+Train Stop near,railway,halt,near,N 
+Train Stops near,railway,halt,near,Y 
+Level Crossing,railway,level_crossing,-,N 
+Level Crossings,railway,level_crossing,-,Y 
+Level Crossing in,railway,level_crossing,in,N 
+Level Crossings in,railway,level_crossing,in,Y 
+Level Crossing near,railway,level_crossing,near,N 
+Level Crossings near,railway,level_crossing,near,Y 
+Light Rail,railway,light_rail,-,N 
+Light Rails,railway,light_rail,-,Y 
+Light Rail in,railway,light_rail,in,N 
+Light Rails in,railway,light_rail,in,Y 
+Light Rail near,railway,light_rail,near,N 
+Light Rails near,railway,light_rail,near,Y 
+Monorail,railway,monorail,-,N 
+Monorails,railway,monorail,-,Y 
+Monorail in,railway,monorail,in,N 
+Monorails in,railway,monorail,in,Y 
+Monorail near,railway,monorail,near,N 
+Monorails near,railway,monorail,near,Y 
+Narrow Gauge Railway,railway,narrow_gauge,-,N 
+Narrow Gauge Railways,railway,narrow_gauge,-,Y 
+Narrow Gauge Railway in,railway,narrow_gauge,in,N 
+Narrow Gauge Railways in,railway,narrow_gauge,in,Y 
+Narrow Gauge Railway near,railway,narrow_gauge,near,N 
+Narrow Gauge Railways near,railway,narrow_gauge,near,Y 
+Railway Platform,railway,platform,-,N 
+Railway Platforms,railway,platform,-,Y 
+Railway Platform in,railway,platform,in,N 
+Railway Platforms in,railway,platform,in,Y 
+Railway Platform near,railway,platform,near,N 
+Railway Platforms near,railway,platform,near,Y 
+Preserved Railway,railway,preserved,-,N 
+Preserved Railways,railway,preserved,-,Y 
+Preserved Railway in,railway,preserved,in,N 
+Preserved Railways in,railway,preserved,in,Y 
+Preserved Railway near,railway,preserved,near,N 
+Preserved Railways near,railway,preserved,near,Y 
+Railway Station,railway,station,-,N 
+Railway Stations,railway,station,-,Y 
+Railway Station in,railway,station,in,N 
+Railway Stations in,railway,station,in,Y 
+Railway Station near,railway,station,near,N 
+Railway Stations near,railway,station,near,Y 
+Station,railway,station,-,N 
+Stations,railway,station,-,Y 
+Station in,railway,station,in,N 
+Stations in,railway,station,in,Y 
+Station near,railway,station,near,N 
+Stations near,railway,station,near,Y 
+Subway Station,railway,subway,-,N 
+Subway Stations,railway,subway,-,Y 
+Subway Station in,railway,subway,in,N 
+Subway Stations in,railway,subway,in,Y 
+Subway Station near,railway,subway,near,N 
+Subway Stations near,railway,subway,near,Y 
+Subway Entrance,railway,subway_entrance,-,N 
+Subway Entrances,railway,subway_entrance,-,Y 
+Subway Entrance in,railway,subway_entrance,in,N 
+Subway Entrances in,railway,subway_entrance,in,Y 
+Subway Entrance near,railway,subway_entrance,near,N 
+Subway Entrances near,railway,subway_entrance,near,Y 
+Railway Points,railway,switch,-,N 
+Railway Points,railway,switch,-,Y 
+Railway Points in,railway,switch,in,N 
+Railway Points in,railway,switch,in,Y 
+Railway Points near,railway,switch,near,N 
+Railway Points near,railway,switch,near,Y 
+Tramway,railway,tram,-,N 
+Tramways,railway,tram,-,Y 
+Tramway in,railway,tram,in,N 
+Tramways in,railway,tram,in,Y 
+Tramway near,railway,tram,near,N 
+Tramways near,railway,tram,near,Y 
+Tram Stop,railway,tram_stop,-,N 
+Tram Stops,railway,tram_stop,-,Y 
+Tram Stop in,railway,tram_stop,in,N 
+Tram Stops in,railway,tram_stop,in,Y 
+Tram Stop near,railway,tram_stop,near,N 
+Tram Stops near,railway,tram_stop,near,Y 
+Off Licence,shop,alcohol,-,N 
+Off Licences,shop,alcohol,-,Y 
+Off Licence in,shop,alcohol,in,N 
+Off Licences in,shop,alcohol,in,Y 
+Off Licence near,shop,alcohol,near,N 
+Off Licences near,shop,alcohol,near,Y 
+Off License,shop,alcohol,-,N 
+Off Licenses,shop,alcohol,-,Y 
+Off License in,shop,alcohol,in,N 
+Off Licenses in,shop,alcohol,in,Y 
+Off License near,shop,alcohol,near,N 
+Off Licenses near,shop,alcohol,near,Y 
+Art Shop,shop,art,-,N 
+Art Shops,shop,art,-,Y 
+Art Shop in,shop,art,in,N 
+Art Shops in,shop,art,in,Y 
+Art Shop near,shop,art,near,N 
+Art Shops near,shop,art,near,Y 
+Bakery,shop,bakery,-,N 
+Bakerys,shop,bakery,-,Y 
+Bakeries,shop,bakery,-,Y 
+Bakery in,shop,bakery,in,N 
+Bakerys in,shop,bakery,in,Y 
+Bakeries in,shop,bakery,in,Y 
+Bakery near,shop,bakery,near,N 
+Bakerys near,shop,bakery,near,Y 
+Bakeries near,shop,bakery,near,Y 
+Beauty Shop,shop,beauty,-,N 
+Beauty Shops,shop,beauty,-,Y 
+Beauty Shop in,shop,beauty,in,N 
+Beauty Shops in,shop,beauty,in,Y 
+Beauty Shop near,shop,beauty,near,N 
+Beauty Shops near,shop,beauty,near,Y 
+Beverages Shop,shop,beverages,-,N 
+Beverages Shops,shop,beverages,-,Y 
+Beverages Shop in,shop,beverages,in,N 
+Beverages Shops in,shop,beverages,in,Y 
+Beverages Shop near,shop,beverages,near,N 
+Beverages Shops near,shop,beverages,near,Y 
+Bicycle Shop,shop,bicycle,-,N 
+Bicycle Shops,shop,bicycle,-,Y 
+Bicycle Shop in,shop,bicycle,in,N 
+Bicycle Shops in,shop,bicycle,in,Y 
+Bicycle Shop near,shop,bicycle,near,N 
+Bicycle Shops near,shop,bicycle,near,Y 
+Book Shop,shop,books,-,N 
+Book Shops,shop,books,-,Y 
+Book Shop in,shop,books,in,N 
+Book Shops in,shop,books,in,Y 
+Book Shop near,shop,books,near,N 
+Book Shops near,shop,books,near,Y 
+Butcher,shop,butcher,-,N 
+Butchers,shop,butcher,-,Y 
+Butcher in,shop,butcher,in,N 
+Butchers in,shop,butcher,in,Y 
+Butcher near,shop,butcher,near,N 
+Butchers near,shop,butcher,near,Y 
+Car Shop,shop,car,-,N 
+Car Shops,shop,car,-,Y 
+Car Shop in,shop,car,in,N 
+Car Shops in,shop,car,in,Y 
+Car Shop near,shop,car,near,N 
+Car Shops near,shop,car,near,Y 
+Car Parts,shop,car_parts,-,N 
+Car Parts,shop,car_parts,-,Y 
+Car Parts in,shop,car_parts,in,N 
+Car Parts in,shop,car_parts,in,Y 
+Car Parts near,shop,car_parts,near,N 
+Car Parts near,shop,car_parts,near,Y 
+Carpet Shop,shop,carpet,-,N 
+Carpet Shops,shop,carpet,-,Y 
+Carpet Shop in,shop,carpet,in,N 
+Carpet Shops in,shop,carpet,in,Y 
+Carpet Shop near,shop,carpet,near,N 
+Carpet Shops near,shop,carpet,near,Y 
+Car Repair,shop,car_repair,-,N 
+Car Repairs,shop,car_repair,-,Y 
+Car Repair in,shop,car_repair,in,N 
+Car Repairs in,shop,car_repair,in,Y 
+Car Repair near,shop,car_repair,near,N 
+Car Repairs near,shop,car_repair,near,Y 
+Charity Shop,shop,charity,-,N 
+Charity Shops,shop,charity,-,Y 
+Charity Shop in,shop,charity,in,N 
+Charity Shops in,shop,charity,in,Y 
+Charity Shop near,shop,charity,near,N 
+Charity Shops near,shop,charity,near,Y 
+Chemist,shop,chemist,-,N 
+Chemists,shop,chemist,-,Y 
+Chemist in,shop,chemist,in,N 
+Chemists in,shop,chemist,in,Y 
+Chemist near,shop,chemist,near,N 
+Chemists near,shop,chemist,near,Y 
+Clothes Shop,shop,clothes,-,N 
+Clothes Shops,shop,clothes,-,Y 
+Clothes Shop in,shop,clothes,in,N 
+Clothes Shops in,shop,clothes,in,Y 
+Clothes Shop near,shop,clothes,near,N 
+Clothes Shops near,shop,clothes,near,Y 
+Computer Shop,shop,computer,-,N 
+Computer Shops,shop,computer,-,Y 
+Computer Shop in,shop,computer,in,N 
+Computer Shops in,shop,computer,in,Y 
+Computer Shop near,shop,computer,near,N 
+Computer Shops near,shop,computer,near,Y 
+Confectionery Shop,shop,confectionery,-,N 
+Confectionery Shops,shop,confectionery,-,Y 
+Confectionery Shop in,shop,confectionery,in,N 
+Confectionery Shops in,shop,confectionery,in,Y 
+Confectionery Shop near,shop,confectionery,near,N 
+Confectionery Shops near,shop,confectionery,near,Y 
+Convenience Store,shop,convenience,-,N 
+Convenience Stores,shop,convenience,-,Y 
+Convenience Store in,shop,convenience,in,N 
+Convenience Stores in,shop,convenience,in,Y 
+Convenience Store near,shop,convenience,near,N 
+Convenience Stores near,shop,convenience,near,Y 
+Copy Shop,shop,copyshop,-,N 
+Copy Shops,shop,copyshop,-,Y 
+Copy Shop in,shop,copyshop,in,N 
+Copy Shops in,shop,copyshop,in,Y 
+Copy Shop near,shop,copyshop,near,N 
+Copy Shops near,shop,copyshop,near,Y 
+Cosmetics,shop,copyshop,-,Y 
+Cosmetics in,shop,copyshop,in,Y 
+Cosmetics near,shop,copyshop,near,Y 
+Cosmetics Shop,shop,cosmetics,-,N 
+Cosmetics Shops,shop,cosmetics,-,Y 
+Cosmetics Shop in,shop,cosmetics,in,N 
+Cosmetics Shops in,shop,cosmetics,in,Y 
+Cosmetics Shop near,shop,cosmetics,near,N 
+Cosmetics Shops near,shop,cosmetics,near,Y 
+Delicatessen,shop,deli,-,N 
+Delicatessen,shop,deli,-,Y 
+Delicatessen in,shop,deli,in,N 
+Delicatessen in,shop,deli,in,Y 
+Delicatessen near,shop,deli,near,N 
+Delicatessen near,shop,deli,near,Y 
+Department Store,shop,department_store,-,N 
+Department Stores,shop,department_store,-,Y 
+Department Store in,shop,department_store,in,N 
+Department Stores in,shop,department_store,in,Y 
+Department Store near,shop,department_store,near,N 
+Department Stores near,shop,department_store,near,Y 
+Fish Shop,shop,seafood,-,N 
+Fish Shops,shop,seafood,-,Y 
+Fish Shop in,shop,seafood,in,N 
+Fish Shops in,shop,seafood,in,Y 
+Fish Shop near,shop,seafood,near,N 
+Fish Shops near,shop,seafood,near,Y 
+Seafood Shop,shop,seafood,-,N 
+Seafood Shops,shop,seafood,-,Y 
+Seafood Shop in,shop,seafood,in,N 
+Seafood Shops in,shop,seafood,in,Y 
+Seafood Shop near,shop,seafood,near,N 
+Seafood Shops near,shop,seafood,near,Y 
+Do-It-Yourself,shop,doityourself,-,N 
+Do-It-Yourselfs,shop,doityourself,-,Y 
+Do-It-Yourselves,shop,doityourself,-,Y 
+Do-It-Yourself in,shop,doityourself,in,N 
+Do-It-Yourselfs in,shop,doityourself,in,Y 
+Do-It-Yourselves in,shop,doityourself,in,Y 
+Do-It-Yourself near,shop,doityourself,near,N 
+Do-It-Yourselfs near,shop,doityourself,near,Y 
+Do-It-Yourselves near,shop,doityourself,near,Y 
+Dry Cleaning,shop,dry_cleaning,-,N 
+Dry Cleanings,shop,dry_cleaning,-,Y 
+Dry Cleaning in,shop,dry_cleaning,in,N 
+Dry Cleanings in,shop,dry_cleaning,in,Y 
+Dry Cleaning near,shop,dry_cleaning,near,N 
+Dry Cleanings near,shop,dry_cleaning,near,Y 
+Electronics Shop,shop,electronics,-,N 
+Electronics Shops,shop,electronics,-,Y 
+Electronics Shop in,shop,electronics,in,N 
+Electronics Shops in,shop,electronics,in,Y 
+Electronics Shop near,shop,electronics,near,N 
+Electronics Shops near,shop,electronics,near,Y 
+Erotic Shop,shop,erotic,-,N 
+Erotic Shops,shop,erotic,-,Y 
+Erotic Shop in,shop,erotic,in,N 
+Erotic Shops in,shop,erotic,in,Y 
+Erotic Shop near,shop,erotic,near,N 
+Erotic Shops near,shop,erotic,near,Y 
+Sex Shop,shop,erotic,-,N 
+Sex Shops,shop,erotic,-,Y 
+Sex Shop in,shop,erotic,in,N 
+Sex Shops in,shop,erotic,in,Y 
+Sex Shop near,shop,erotic,near,N 
+Sex Shops near,shop,erotic,near,Y 
+Estate Agent,shop,estate_agent,-,N 
+Estate Agents,shop,estate_agent,-,Y 
+Estate Agent in,shop,estate_agent,in,N 
+Estate Agents in,shop,estate_agent,in,Y 
+Estate Agent near,shop,estate_agent,near,N 
+Estate Agents near,shop,estate_agent,near,Y 
+Farm Shop,shop,farm,-,N 
+Farm Shops,shop,farm,-,Y 
+Farm Shop in,shop,farm,in,N 
+Farm Shops in,shop,farm,in,Y 
+Farm Shop near,shop,farm,near,N 
+Farm Shops near,shop,farm,near,Y 
+Fashion Shop,shop,fashion,-,N 
+Fashion Shops,shop,fashion,-,Y 
+Fashion Shop in,shop,fashion,in,N 
+Fashion Shops in,shop,fashion,in,Y 
+Fashion Shop near,shop,fashion,near,N 
+Fashion Shops near,shop,fashion,near,Y 
+Florist,shop,florist,-,N 
+Florists,shop,florist,-,Y 
+Florist in,shop,florist,in,N 
+Florists in,shop,florist,in,Y 
+Florist near,shop,florist,near,N 
+Florists near,shop,florist,near,Y 
+Food Shop,shop,food,-,N 
+Food Shops,shop,food,-,Y 
+Food Shop in,shop,food,in,N 
+Food Shops in,shop,food,in,Y 
+Food Shop near,shop,food,near,N 
+Food Shops near,shop,food,near,Y 
+Funeral Director,shop,funeral_directors,-,N 
+Funeral Directors,shop,funeral_directors,-,Y 
+Funeral Director in,shop,funeral_directors,in,N 
+Funeral Directors in,shop,funeral_directors,in,Y 
+Funeral Director near,shop,funeral_directors,near,N 
+Funeral Directors near,shop,funeral_directors,near,Y 
+Furniture,shop,furniture,-,N 
+Furnitures,shop,furniture,-,Y 
+Furniture in,shop,furniture,in,N 
+Furnitures in,shop,furniture,in,Y 
+Furniture near,shop,furniture,near,N 
+Furnitures near,shop,furniture,near,Y 
+Garden Centre,shop,garden_centre,-,N 
+Garden Centres,shop,garden_centre,-,Y 
+Garden Centre in,shop,garden_centre,in,N 
+Garden Centres in,shop,garden_centre,in,Y 
+Garden Centre near,shop,garden_centre,near,N 
+Garden Centres near,shop,garden_centre,near,Y 
+Garden Center,shop,garden_centre,-,N 
+Garden Centers,shop,garden_centre,-,Y 
+Garden Center in,shop,garden_centre,in,N 
+Garden Centers in,shop,garden_centre,in,Y 
+Garden Center near,shop,garden_centre,near,N 
+Garden Centers near,shop,garden_centre,near,Y 
+General Store,shop,general,-,N 
+General Stores,shop,general,-,Y 
+General Store in,shop,general,in,N 
+General Stores in,shop,general,in,Y 
+General Store near,shop,general,near,N 
+General Stores near,shop,general,near,Y 
+Gift Shop,shop,gift,-,N 
+Gift Shops,shop,gift,-,Y 
+Gift Shop in,shop,gift,in,N 
+Gift Shops in,shop,gift,in,Y 
+Gift Shop near,shop,gift,near,N 
+Gift Shops near,shop,gift,near,Y 
+Greengrocer,shop,greengrocer,-,N 
+Greengrocers,shop,greengrocer,-,Y 
+Greengrocer in,shop,greengrocer,in,N 
+Greengrocers in,shop,greengrocer,in,Y 
+Greengrocer near,shop,greengrocer,near,N 
+Greengrocers near,shop,greengrocer,near,Y 
+Hairdresser,shop,hairdresser,-,N 
+Hairdressers,shop,hairdresser,-,Y 
+Hairdresser in,shop,hairdresser,in,N 
+Hairdressers in,shop,hairdresser,in,Y 
+Hairdresser near,shop,hairdresser,near,N 
+Hairdressers near,shop,hairdresser,near,Y 
+Hardware Store,shop,hardware,-,N 
+Hardware Stores,shop,hardware,-,Y 
+Hardware Store in,shop,hardware,in,N 
+Hardware Stores in,shop,hardware,in,Y 
+Hardware Store near,shop,hardware,near,N 
+Hardware Stores near,shop,hardware,near,Y 
+Hi-Fi,shop,hifi,-,N 
+Hi-Fis,shop,hifi,-,Y 
+Hi-Fi in,shop,hifi,in,N 
+Hi-Fis in,shop,hifi,in,Y 
+Hi-Fi near,shop,hifi,near,N 
+Hi-Fis near,shop,hifi,near,Y 
+Insurance,office,insurance,-,N 
+Insurances,office,insurance,-,Y 
+Insurance in,office,insurance,in,N 
+Insurances in,office,insurance,in,Y 
+Insurance near,office,insurance,near,N 
+Insurances near,office,insurance,near,Y 
+Jewelry Shop,shop,jewelry,-,N 
+Jewelry Shops,shop,jewelry,-,Y 
+Jewelry Shop in,shop,jewelry,in,N 
+Jewelry Shops in,shop,jewelry,in,Y 
+Jewelry Shop near,shop,jewelry,near,N 
+Jewelry Shops near,shop,jewelry,near,Y 
+Kiosk Shop,shop,kiosk,-,N 
+Kiosk Shops,shop,kiosk,-,Y 
+Kiosk Shop in,shop,kiosk,in,N 
+Kiosk Shops in,shop,kiosk,in,Y 
+Kiosk Shop near,shop,kiosk,near,N 
+Kiosk Shops near,shop,kiosk,near,Y 
+Laundry,shop,laundry,-,N 
+Laundrys,shop,laundry,-,Y 
+Laundries,shop,laundry,-,Y 
+Laundry in,shop,laundry,in,N 
+Laundrys in,shop,laundry,in,Y 
+Laundries in,shop,laundry,in,Y 
+Laundry near,shop,laundry,near,N 
+Laundrys near,shop,laundry,near,Y 
+Laundries near,shop,laundry,near,Y 
+Mall,shop,mall,-,N 
+Malls,shop,mall,-,Y 
+Mall in,shop,mall,in,N 
+Malls in,shop,mall,in,Y 
+Mall near,shop,mall,near,N 
+Malls near,shop,mall,near,Y 
+Massage Shop,shop,massage,-,N 
+Massage Shops,shop,massage,-,Y 
+Massage Shop in,shop,massage,in,N 
+Massage Shops in,shop,massage,in,Y 
+Massage Shop near,shop,massage,near,N 
+Massage Shops near,shop,massage,near,Y 
+Mobile Phone Shop,shop,mobile_phone,-,N 
+Mobile Phone Shops,shop,mobile_phone,-,Y 
+Mobile Phone Shop in,shop,mobile_phone,in,N 
+Mobile Phone Shops in,shop,mobile_phone,in,Y 
+Mobile Phone Shop near,shop,mobile_phone,near,N 
+Mobile Phone Shops near,shop,mobile_phone,near,Y 
+Motorcycle Shop,shop,motorcycle,-,N 
+Motorcycle Shops,shop,motorcycle,-,Y 
+Motorcycle Shop in,shop,motorcycle,in,N 
+Motorcycle Shops in,shop,motorcycle,in,Y 
+Motorcycle Shop near,shop,motorcycle,near,N 
+Motorcycle Shops near,shop,motorcycle,near,Y 
+Music Shop,shop,music,-,N 
+Music Shops,shop,music,-,Y 
+Music Shop in,shop,music,in,N 
+Music Shops in,shop,music,in,Y 
+Music Shop near,shop,music,near,N 
+Music Shops near,shop,music,near,Y 
+Newsagent,shop,newsagent,-,N 
+Newsagents,shop,newsagent,-,Y 
+Newsagent in,shop,newsagent,in,N 
+Newsagents in,shop,newsagent,in,Y 
+Newsagent near,shop,newsagent,near,N 
+Newsagents near,shop,newsagent,near,Y 
+Optician,shop,optician,-,N 
+Opticians,shop,optician,-,Y 
+Optician in,shop,optician,in,N 
+Opticians in,shop,optician,in,Y 
+Optician near,shop,optician,near,N 
+Opticians near,shop,optician,near,Y 
+Organic Food Shop,shop,organic,-,N 
+Organic Food Shops,shop,organic,-,Y 
+Organic Food Shop in,shop,organic,in,N 
+Organic Food Shops in,shop,organic,in,Y 
+Organic Food Shop near,shop,organic,near,N 
+Organic Food Shops near,shop,organic,near,Y 
+Outdoor Shop,shop,outdoor,-,N 
+Outdoor Shops,shop,outdoor,-,Y 
+Outdoor Shop in,shop,outdoor,in,N 
+Outdoor Shops in,shop,outdoor,in,Y 
+Outdoor Shop near,shop,outdoor,near,N 
+Outdoor Shops near,shop,outdoor,near,Y 
+Pet Shop,shop,pet,-,N 
+Pet Shops,shop,pet,-,Y 
+Pet Shop in,shop,pet,in,N 
+Pet Shops in,shop,pet,in,Y 
+Pet Shop near,shop,pet,near,N 
+Pet Shops near,shop,pet,near,Y 
+Photo Shop,shop,photo,-,N 
+Photo Shops,shop,photo,-,Y 
+Photo Shop in,shop,photo,in,N 
+Photo Shops in,shop,photo,in,Y 
+Photo Shop near,shop,photo,near,N 
+Photo Shops near,shop,photo,near,Y 
+Salon,shop,salon,-,N 
+Salons,shop,salon,-,Y 
+Salon in,shop,salon,in,N 
+Salons in,shop,salon,in,Y 
+Salon near,shop,salon,near,N 
+Salons near,shop,salon,near,Y 
+Shoe Shop,shop,shoes,-,N 
+Shoe Shops,shop,shoes,-,Y 
+Shoe Shop in,shop,shoes,in,N 
+Shoe Shops in,shop,shoes,in,Y 
+Shoe Shop near,shop,shoes,near,N 
+Shoe Shops near,shop,shoes,near,Y 
+Shopping Centre,shop,shopping_centre,-,N 
+Shopping Centres,shop,shopping_centre,-,Y 
+Shopping Centre in,shop,shopping_centre,in,N 
+Shopping Centres in,shop,shopping_centre,in,Y 
+Shopping Centre near,shop,shopping_centre,near,N 
+Shopping Centres near,shop,shopping_centre,near,Y 
+Shopping Center,shop,shopping_centre,-,N 
+Shopping Centers,shop,shopping_centre,-,Y 
+Shopping Center in,shop,shopping_centre,in,N 
+Shopping Centers in,shop,shopping_centre,in,Y 
+Shopping Center near,shop,shopping_centre,near,N 
+Shopping Centers near,shop,shopping_centre,near,Y 
+Sports Shop,shop,sports,-,N 
+Sports Shops,shop,sports,-,Y 
+Sports Shop in,shop,sports,in,N 
+Sports Shops in,shop,sports,in,Y 
+Sports Shop near,shop,sports,near,N 
+Sports Shops near,shop,sports,near,Y 
+Stationery Shop,shop,stationery,-,N 
+Stationery Shops,shop,stationery,-,Y 
+Stationery Shop in,shop,stationery,in,N 
+Stationery Shops in,shop,stationery,in,Y 
+Stationery Shop near,shop,stationery,near,N 
+Stationery Shops near,shop,stationery,near,Y 
+Supermarket,shop,supermarket,-,N 
+Supermarkets,shop,supermarket,-,Y 
+Supermarket in,shop,supermarket,in,N 
+Supermarkets in,shop,supermarket,in,Y 
+Supermarket near,shop,supermarket,near,N 
+Supermarkets near,shop,supermarket,near,Y 
+Tattoo Studio,shop,tattoo,-,N 
+Tattoo Studios,shop,tattoo,-,Y 
+Tattoo Studio in,shop,tattoo,in,N 
+Tattoo Studios in,shop,tattoo,in,Y 
+Tattoo Studio near,shop,tattoo,near,N 
+Tattoo Studios near,shop,tattoo,near,Y 
+Tobacco Shop,shop,tobacco,-,N 
+Tobacco Shops,shop,tobacco,-,Y 
+Tobacco Shop in,shop,tobacco,in,N 
+Tobacco Shops in,shop,tobacco,in,Y 
+Tobacco Shop near,shop,tobacco,near,N 
+Tobacco Shops near,shop,tobacco,near,Y 
+Toy Shop,shop,toys,-,N 
+Toy Shops,shop,toys,-,Y 
+Toy Shop in,shop,toys,in,N 
+Toy Shops in,shop,toys,in,Y 
+Toy Shop near,shop,toys,near,N 
+Toy Shops near,shop,toys,near,Y 
+Travel Agency,shop,travel_agency,-,N 
+Travel Agencys,shop,travel_agency,-,Y 
+Travel Agencies,shop,travel_agency,-,Y 
+Travel Agency in,shop,travel_agency,in,N 
+Travel Agencys in,shop,travel_agency,in,Y 
+Travel Agencies in,shop,travel_agency,in,Y 
+Travel Agency near,shop,travel_agency,near,N 
+Travel Agencys near,shop,travel_agency,near,Y 
+Travel Agencies near,shop,travel_agency,near,Y 
+Video Shop,shop,video,-,N 
+Video Shops,shop,video,-,Y 
+Video Shop in,shop,video,in,N 
+Video Shops in,shop,video,in,Y 
+Video Shop near,shop,video,near,N 
+Video Shops near,shop,video,near,Y 
+Off Licence,shop,wine,-,N 
+Off Licences,shop,wine,-,Y 
+Off Licence in,shop,wine,in,N 
+Off Licences in,shop,wine,in,Y 
+Off Licence near,shop,wine,near,N 
+Off Licences near,shop,wine,near,Y 
+Off License,shop,wine,-,N 
+Off Licenses,shop,wine,-,Y 
+Off License in,shop,wine,in,N 
+Off Licenses in,shop,wine,in,Y 
+Off License near,shop,wine,near,N 
+Off Licenses near,shop,wine,near,Y 
+Wine Shop,shop,wine,-,N 
+Wine Shops,shop,wine,-,Y 
+Wine Shop in,shop,wine,in,N 
+Wine Shops in,shop,wine,in,Y 
+Wine Shop near,shop,wine,near,N 
+Wine Shops near,shop,wine,near,Y 
+Nursing Home,social_facility,nursing_home,-,N 
+Nursing Homes,social_facility,nursing_home,-,Y 
+Nursing Home in,social_facility,nursing_home,in,N 
+Nursing Homes in,social_facility,nursing_home,in,Y 
+Nursing Home near,social_facility,nursing_home,near,N 
+Nursing Homes near,social_facility,nursing_home,near,Y 
+Alpine Hut,tourism,alpine_hut,-,N 
+Alpine Huts,tourism,alpine_hut,-,Y 
+Alpine Hut in,tourism,alpine_hut,in,N 
+Alpine Huts in,tourism,alpine_hut,in,Y 
+Alpine Hut near,tourism,alpine_hut,near,N 
+Alpine Huts near,tourism,alpine_hut,near,Y 
+Aquarium,tourism,aquarium,-,N 
+Aquariums,tourism,aquarium,-,Y 
+Aquarium in,tourism,aquarium,in,N 
+Aquariums in,tourism,aquarium,in,Y 
+Aquarium near,tourism,aquarium,near,N 
+Aquariums near,tourism,aquarium,near,Y 
+Artwork,tourism,artwork,-,N 
+Artworks,tourism,artwork,-,Y 
+Artwork in,tourism,artwork,in,N 
+Artworks in,tourism,artwork,in,Y 
+Artwork near,tourism,artwork,near,N 
+Artworks near,tourism,artwork,near,Y 
+Attraction,tourism,attraction,-,N 
+Attractions,tourism,attraction,-,Y 
+Attraction in,tourism,attraction,in,N 
+Attractions in,tourism,attraction,in,Y 
+Attraction near,tourism,attraction,near,N 
+Attractions near,tourism,attraction,near,Y 
+Camp Site,tourism,camp_site,-,N 
+Camp Sites,tourism,camp_site,-,Y 
+Camp Site in,tourism,camp_site,in,N 
+Camp Sites in,tourism,camp_site,in,Y 
+Camp Site near,tourism,camp_site,near,N 
+Camp Sites near,tourism,camp_site,near,Y 
+Caravan Site,tourism,caravan_site,-,N 
+Caravan Sites,tourism,caravan_site,-,Y 
+Caravan Site in,tourism,caravan_site,in,N 
+Caravan Sites in,tourism,caravan_site,in,Y 
+Caravan Site near,tourism,caravan_site,near,N 
+Caravan Sites near,tourism,caravan_site,near,Y 
+Chalet,tourism,chalet,-,N 
+Chalets,tourism,chalet,-,Y 
+Chalet in,tourism,chalet,in,N 
+Chalets in,tourism,chalet,in,Y 
+Chalet near,tourism,chalet,near,N 
+Chalets near,tourism,chalet,near,Y 
+Guest House,tourism,guest_house,-,N 
+Guest Houses,tourism,guest_house,-,Y 
+Guest House in,tourism,guest_house,in,N 
+Guest Houses in,tourism,guest_house,in,Y 
+Guest House near,tourism,guest_house,near,N 
+Guest Houses near,tourism,guest_house,near,Y 
+Hostel,tourism,hostel,-,N 
+Hostels,tourism,hostel,-,Y 
+Hostel in,tourism,hostel,in,N 
+Hostels in,tourism,hostel,in,Y 
+Hostel near,tourism,hostel,near,N 
+Hostels near,tourism,hostel,near,Y 
+Hotel,tourism,hotel,-,N 
+Hotels,tourism,hotel,-,Y 
+Hotel in,tourism,hotel,in,N 
+Hotels in,tourism,hotel,in,Y 
+Hotel near,tourism,hotel,near,N 
+Hotels near,tourism,hotel,near,Y 
+Information,tourism,information,-,N 
+Informations,tourism,information,-,Y 
+Information in,tourism,information,in,N 
+Informations in,tourism,information,in,Y 
+Information near,tourism,information,near,N 
+Informations near,tourism,information,near,Y 
+Motel,tourism,motel,-,N 
+Motels,tourism,motel,-,Y 
+Motel in,tourism,motel,in,N 
+Motels in,tourism,motel,in,Y 
+Motel near,tourism,motel,near,N 
+Motels near,tourism,motel,near,Y 
+Museum,tourism,museum,-,N 
+Museums,tourism,museum,-,Y 
+Museum in,tourism,museum,in,N 
+Museums in,tourism,museum,in,Y 
+Museum near,tourism,museum,near,N 
+Museums near,tourism,museum,near,Y 
+Picnic Site,tourism,picnic_site,-,N 
+Picnic Sites,tourism,picnic_site,-,Y 
+Picnic Site in,tourism,picnic_site,in,N 
+Picnic Sites in,tourism,picnic_site,in,Y 
+Picnic Site near,tourism,picnic_site,near,N 
+Picnic Sites near,tourism,picnic_site,near,Y 
+Theme Park,tourism,theme_park,-,N 
+Theme Parks,tourism,theme_park,-,Y 
+Theme Park in,tourism,theme_park,in,N 
+Theme Parks in,tourism,theme_park,in,Y 
+Theme Park near,tourism,theme_park,near,N 
+Theme Parks near,tourism,theme_park,near,Y 
+Viewpoint,tourism,viewpoint,-,N 
+Viewpoints,tourism,viewpoint,-,Y 
+Viewpoint in,tourism,viewpoint,in,N 
+Viewpoints in,tourism,viewpoint,in,Y 
+Viewpoint near,tourism,viewpoint,near,N 
+Viewpoints near,tourism,viewpoint,near,Y 
+Zoo,tourism,zoo,-,N 
+Zoos,tourism,zoo,-,Y 
+Zoo in,tourism,zoo,in,N 
+Zoos in,tourism,zoo,in,Y 
+Zoo near,tourism,zoo,near,N 
+Zoos near,tourism,zoo,near,Y 
+Boatyard,waterway,boatyard,-,N 
+Boatyards,waterway,boatyard,-,Y 
+Boatyard in,waterway,boatyard,in,N 
+Boatyards in,waterway,boatyard,in,Y 
+Boatyard near,waterway,boatyard,near,N 
+Boatyards near,waterway,boatyard,near,Y 
+Boat Ramp,leisure,slipway,-,N 
+Boat Ramps,leisure,slipway,-,Y 
+Boat Ramp in,leisure,slipway,in,N 
+Boat Ramps in,leisure,slipway,in,Y 
+Boat Ramp near,leisure,slipway,near,N 
+Boat Ramps near,leisure,slipway,near,Y 
+Canal,waterway,canal,-,N 
+Canals,waterway,canal,-,Y 
+Canal in,waterway,canal,in,N 
+Canals in,waterway,canal,in,Y 
+Canal near,waterway,canal,near,N 
+Canals near,waterway,canal,near,Y 
+Dam,waterway,dam,-,N 
+Dams,waterway,dam,-,Y 
+Dam in,waterway,dam,in,N 
+Dams in,waterway,dam,in,Y 
+Dam near,waterway,dam,near,N 
+Dams near,waterway,dam,near,Y 
+Derelict Canal,waterway,derelict_canal,-,N 
+Derelict Canals,waterway,derelict_canal,-,Y 
+Derelict Canal in,waterway,derelict_canal,in,N 
+Derelict Canals in,waterway,derelict_canal,in,Y 
+Derelict Canal near,waterway,derelict_canal,near,N 
+Derelict Canals near,waterway,derelict_canal,near,Y 
+Ditch,waterway,ditch,-,N 
+Ditchs,waterway,ditch,-,Y 
+Ditches,waterway,ditch,-,Y 
+Ditch in,waterway,ditch,in,N 
+Ditchs in,waterway,ditch,in,Y 
+Ditches in,waterway,ditch,in,Y 
+Ditch near,waterway,ditch,near,N 
+Ditchs near,waterway,ditch,near,Y 
+Ditches near,waterway,ditch,near,Y 
+Dock,waterway,dock,-,N 
+Docks,waterway,dock,-,Y 
+Dock in,waterway,dock,in,N 
+Docks in,waterway,dock,in,Y 
+Dock near,waterway,dock,near,N 
+Docks near,waterway,dock,near,Y 
+Drain,waterway,drain,-,N 
+Drains,waterway,drain,-,Y 
+Drain in,waterway,drain,in,N 
+Drains in,waterway,drain,in,Y 
+Drain near,waterway,drain,near,N 
+Drains near,waterway,drain,near,Y 
+Rapids,waterway,rapids,-,N 
+Rapids,waterway,rapids,-,Y 
+Rapids in,waterway,rapids,in,N 
+Rapids in,waterway,rapids,in,Y 
+Rapids near,waterway,rapids,near,N 
+Rapids near,waterway,rapids,near,Y 
+River,waterway,river,-,N 
+Rivers,waterway,river,-,Y 
+River in,waterway,river,in,N 
+Rivers in,waterway,river,in,Y 
+River near,waterway,river,near,N 
+Rivers near,waterway,river,near,Y 
+Riverbank,waterway,riverbank,-,N 
+Riverbanks,waterway,riverbank,-,Y 
+Riverbank in,waterway,riverbank,in,N 
+Riverbanks in,waterway,riverbank,in,Y 
+Riverbank near,waterway,riverbank,near,N 
+Riverbanks near,waterway,riverbank,near,Y 
+Stream,waterway,stream,-,N 
+Streams,waterway,stream,-,Y 
+Stream in,waterway,stream,in,N 
+Streams in,waterway,stream,in,Y 
+Stream near,waterway,stream,near,N 
+Streams near,waterway,stream,near,Y 
+Wadi,waterway,wadi,-,N 
+Wadis,waterway,wadi,-,Y 
+Wadi in,waterway,wadi,in,N 
+Wadis in,waterway,wadi,in,Y 
+Wadi near,waterway,wadi,near,N 
+Wadis near,waterway,wadi,near,Y 
+Waterfall,waterway,waterfall,-,N 
+Waterfalls,waterway,waterfall,-,Y 
+Waterfall in,waterway,waterfall,in,N 
+Waterfalls in,waterway,waterfall,in,Y 
+Waterfall near,waterway,waterfall,near,N 
+Waterfalls near,waterway,waterfall,near,Y 
+Water Point,waterway,water_point,-,N 
+Water Points,waterway,water_point,-,Y 
+Water Point in,waterway,water_point,in,N 
+Water Points in,waterway,water_point,in,Y 
+Water Point near,waterway,water_point,near,N 
+Water Points near,waterway,water_point,near,Y 
+Weir,waterway,weir,-,N 
+Weirs,waterway,weir,-,Y 
+Weir in,waterway,weir,in,N 
+Weirs in,waterway,weir,in,Y 
+Weir near,waterway,weir,near,N 
+Weirs near,waterway,weir,near,Y 
+Coworking ,office ,coworking ,- ,N 
+Coworkings ,office ,coworking ,- ,Y 
+Coworking in ,office ,coworking ,in ,N 
+Coworkings in ,office ,coworking ,in ,Y 
+Coworking near ,office ,coworking ,near ,N 
+Coworkings near ,office ,coworking ,near ,Y 
+Coworking ,amenity ,coworking_space ,- ,N 
+Coworkings ,amenity ,coworking_space ,- ,Y 
+Coworking in ,amenity ,coworking_space ,in ,N 
+Coworkings in ,amenity ,coworking_space ,in ,Y 
+Coworking near ,amenity ,coworking_space ,near ,N 
+Coworkings near ,amenity ,coworking_space ,near ,Y