--- /dev/null
+# https://github.com/codespell-project/codespell
+
+[codespell]
+skip = ./man/nominatim.1,data,./docs/styles.css,lib-php,module,munin,osm2pgsql,./test,./settings/*.lua,./settings/*.yaml,./settings/**/*.yaml,./settings/icu-rules,./nominatim/tokenizer/token_analysis/config_variants.py
+# Need to be lowercase in the list
+# Unter = Unter den Linden (an example address)
+ignore-words-list = inout,unter
--- /dev/null
+[flake8]
+max-line-length = 100
+max-doc-length = 100
+extend-ignore =
+ # something == None constructs are needed for SQLAlchemy
+ E711
+per-file-ignores =
+ __init__.py: F401
- name: Nominatim Discussions
url: https://github.com/osm-search/Nominatim/discussions
about: Ask questions, get support, share ideas and discuss with community members.
+ - name: Discussions about OpenStreetMap data
+ url: https://community.openstreetmap.org/
+ about: Ask questions about the data used by Nominatim and discuss with the OSM community.
## What result did you expect?
-**When the result in the right place and just named wrongly:**
+**When the result is in the right place and just named wrongly:**
<!-- Please tell us the display name you expected. -->
-**When the result missing completely:**
+**When the result is missing completely:**
<!-- Make sure that the data you are looking for is in OpenStreetMap. Provide a link to the OpenStreetMap object or if you cannot get it, a link to the map on https://openstreetmap.org where you expect the result to be.
---
-<!-- Note: if you are installing Nominatim through a docker image, you should report issues with the installation process with the docker repository first. -->
+<!-- Note: if you are installing Nominatim through a docker image, you should report issues with the installation process with the docker repository first.
+
+ Do not send screen shots! Copy any console output directly into the issue.
+ -->
**Describe the bug**
-<!-- A clear and concise description of what the bug is. -->
+<!-- A clear and concise description of what the bug is.-->
**To Reproduce**
<!-- Please describe what you did to get to the issue. -->
- RAM:
- number of CPUs:
- type and size of disks:
-- bare metal/AWS/other cloud service:
**Postgresql Configuration:**
<!-- List any configuration items you changed in your postgresql configuration. -->
+**Nominatim Configuration:**
+
+<!-- List the contents of your customized `.env` file. -->
+
**Additional context**
<!-- Add any other context about the problem here. -->
name: 'Build Nominatim'
inputs:
- ubuntu:
- description: 'Version of Ubuntu to install on'
+ dependencies:
+ description: 'Where to install dependencies from (pip/apt)'
required: false
- default: '20'
- cmake-args:
- description: 'Additional options to hand to cmake'
- required: false
- default: ''
+ default: 'pip'
runs:
using: "composite"
sudo rm -rf /opt/hostedtoolcache/go /opt/hostedtoolcache/CodeQL /usr/lib/jvm /usr/local/share/chromium /usr/local/lib/android
df -h
shell: bash
- - name: Install prerequisites
+ - name: Install general prerequisites
run: |
- sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev
- if [ "x$UBUNTUVER" == "x18" ]; then
- pip3 install python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 datrie
- else
- sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml
- fi
+ sudo apt-get install -y -qq libspatialite-dev libsqlite3-mod-spatialite libicu-dev virtualenv python3-dev osm2pgsql
shell: bash
- env:
- UBUNTUVER: ${{ inputs.ubuntu }}
- CMAKE_ARGS: ${{ inputs.cmake-args }}
- - name: Configure
- run: mkdir build && cd build && cmake $CMAKE_ARGS ../Nominatim
+ - name: Install prerequisites from apt
+ run: |
+ sudo apt-get install -y -qq python3-icu python3-datrie python3-jinja2 python3-psutil python3-dotenv python3-yaml python3-sqlalchemy python3-psycopg python3-asyncpg
shell: bash
- env:
- CMAKE_ARGS: ${{ inputs.cmake-args }}
+ if: inputs.dependencies == 'apt'
- - name: Build
+ - name: Setup virtual environment (for pip)
run: |
- make -j2 all
- sudo make install
+ virtualenv venv
+ ./venv/bin/pip install -U pip
+ shell: bash
+ if: inputs.dependencies == 'pip'
+
+ - name: Setup virtual environment (for apt)
+ run: |
+ virtualenv venv --system-site-packages
+ shell: bash
+ if: inputs.dependencies == 'apt'
+
+ - name: Build nominatim
+ run: ./venv/bin/pip install Nominatim/packaging/nominatim-{api,db}
shell: bash
- working-directory: build
postgresql-version:
description: 'Version of PostgreSQL to install'
required: true
- postgis-version:
- description: 'Version of Postgis to install'
- required: true
runs:
using: "composite"
- name: Remove existing PostgreSQL
run: |
sudo apt-get purge -yq postgresql*
- sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
+ sudo apt install curl ca-certificates gnupg
+ curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/apt.postgresql.org.gpg >/dev/null
+ sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
sudo apt-get update -qq
shell: bash
- name: Install PostgreSQL
run: |
- sudo apt-get install -y -qq --no-install-suggests --no-install-recommends postgresql-client-${PGVER} postgresql-${PGVER}-postgis-${POSTGISVER} postgresql-${PGVER}-postgis-${POSTGISVER}-scripts postgresql-contrib-${PGVER} postgresql-${PGVER}
+ sudo apt-get install -y -qq --no-install-suggests --no-install-recommends postgresql-client-${PGVER} postgresql-${PGVER}-postgis-3 postgresql-${PGVER}-postgis-3-scripts postgresql-contrib-${PGVER} postgresql-${PGVER}
shell: bash
env:
PGVER: ${{ inputs.postgresql-version }}
- POSTGISVER: ${{ inputs.postgis-version }}
- name: Adapt postgresql configuration
run: |
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
with:
submodules: true
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
with:
path: |
data/country_osm_grid.sql.gz
mv nominatim-src.tar.bz2 Nominatim
- name: 'Upload Artifact'
- uses: actions/upload-artifact@v2
+ uses: actions/upload-artifact@v4
with:
name: full-source
path: nominatim-src.tar.bz2
needs: create-archive
strategy:
matrix:
- ubuntu: [18, 20, 22]
+ flavour: ["ubuntu-20", "ubuntu-24"]
include:
- - ubuntu: 18
- postgresql: 9.6
- postgis: 2.5
- pytest: pytest
- php: 7.2
- - ubuntu: 20
- postgresql: 13
- postgis: 3
- pytest: py.test-3
- php: 7.4
- - ubuntu: 22
- postgresql: 14
- postgis: 3
- pytest: py.test-3
- php: 8.1
+ - flavour: ubuntu-20
+ ubuntu: 20
+ postgresql: 12
+ lua: '5.1'
+ dependencies: pip
+ - flavour: ubuntu-24
+ ubuntu: 24
+ postgresql: 17
+ lua: '5.3'
+ dependencies: apt
runs-on: ubuntu-${{ matrix.ubuntu }}.04
steps:
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v4
with:
name: full-source
- name: Unpack Nominatim
run: tar xf nominatim-src.tar.bz2
- - name: Setup PHP
- uses: shivammathur/setup-php@v2
- with:
- php-version: ${{ matrix.php }}
- tools: phpunit, phpcs, composer
- ini-values: opcache.jit=disable
-
- - uses: actions/setup-python@v2
- with:
- python-version: 3.6
- if: matrix.ubuntu == 18
-
- uses: ./Nominatim/.github/actions/setup-postgresql
with:
postgresql-version: ${{ matrix.postgresql }}
- postgis-version: ${{ matrix.postgis }}
- uses: ./Nominatim/.github/actions/build-nominatim
with:
- ubuntu: ${{ matrix.ubuntu }}
+ dependencies: ${{ matrix.dependencies }}
- - name: Install test prerequsites
- run: sudo apt-get install -y -qq python3-pytest python3-behave
- if: matrix.ubuntu == 20
+ - name: Compile osm2pgsql
+ run: |
+ sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua-dkjson nlohmann-json3-dev
+ mkdir osm2pgsql-build
+ cd osm2pgsql-build
+ git clone https://github.com/osm2pgsql-dev/osm2pgsql
+ mkdir build
+ cd build
+ cmake ../osm2pgsql
+ make
+ sudo make install
+ cd ../..
+ rm -rf osm2pgsql-build
+ if: matrix.ubuntu == '20'
+ env:
+ LUA_VERSION: ${{ matrix.lua }}
- - name: Install test prerequsites
- run: pip3 install pylint pytest behave==1.2.6
- if: ${{ (matrix.ubuntu == 18) || (matrix.ubuntu == 22) }}
+ - name: Install test prerequisites
+ run: ./venv/bin/pip install behave==1.2.6
- - name: Install test prerequsites
- run: sudo apt-get install -y -qq python3-pytest
- if: matrix.ubuntu == 22
+ - name: Install test prerequisites (apt)
+ run: sudo apt-get install -y -qq python3-pytest python3-pytest-asyncio uvicorn python3-falcon python3-aiosqlite python3-pyosmium
+ if: matrix.dependencies == 'apt'
- - name: Install latest pylint/mypy
- run: pip3 install -U pylint mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil typing-extensions
+ - name: Install test prerequisites (pip)
+ run: ./venv/bin/pip install pytest-asyncio falcon starlette asgi_lifespan aiosqlite osmium uvicorn
+ if: matrix.dependencies == 'pip'
- - name: PHP linting
- run: phpcs --report-width=120 .
- working-directory: Nominatim
+ - name: Install latest flake8
+ run: ./venv/bin/pip install -U flake8
- name: Python linting
- run: pylint nominatim
+ run: ../venv/bin/python -m flake8 src
working-directory: Nominatim
+ - name: Install mypy and typechecking info
+ run: ./venv/bin/pip install -U mypy types-PyYAML types-jinja2 types-psutil types-requests types-ujson types-Pygments typing-extensions
+ if: matrix.dependencies == 'pip'
+
- name: Python static typechecking
- run: mypy --strict nominatim
+ run: ../venv/bin/python -m mypy --strict --python-version 3.8 src
working-directory: Nominatim
-
-
- - name: PHP unit tests
- run: phpunit ./
- working-directory: Nominatim/test/php
- if: ${{ (matrix.ubuntu == 20) || (matrix.ubuntu == 22) }}
+ if: matrix.dependencies == 'pip'
- name: Python unit tests
- run: $PYTEST test/python
+ run: ../venv/bin/python -m pytest test/python
working-directory: Nominatim
- env:
- PYTEST: ${{ matrix.pytest }}
- name: BDD tests
run: |
- behave -DREMOVE_TEMPLATE=1 -DBUILDDIR=$GITHUB_WORKSPACE/build --format=progress3
+ ../../../venv/bin/python -m behave -DREMOVE_TEMPLATE=1 --format=progress3
working-directory: Nominatim/test/bdd
-
- legacy-test:
- needs: create-archive
- runs-on: ubuntu-20.04
-
- steps:
- - uses: actions/download-artifact@v2
- with:
- name: full-source
-
- - name: Unpack Nominatim
- run: tar xf nominatim-src.tar.bz2
-
- - name: Setup PHP
- uses: shivammathur/setup-php@v2
- with:
- php-version: 7.4
-
- - uses: ./Nominatim/.github/actions/setup-postgresql
- with:
- postgresql-version: 13
- postgis-version: 3
-
- - name: Install Postgresql server dev
- run: sudo apt-get install postgresql-server-dev-13
-
- - uses: ./Nominatim/.github/actions/build-nominatim
- with:
- ubuntu: 20
- cmake-args: -DBUILD_MODULE=on
-
- - name: Install test prerequsites
- run: sudo apt-get install -y -qq python3-behave
-
- - name: BDD tests (legacy tokenizer)
- run: |
- behave -DREMOVE_TEMPLATE=1 -DBUILDDIR=$GITHUB_WORKSPACE/build -DTOKENIZER=legacy --format=progress3
- working-directory: Nominatim/test/bdd
-
-
install:
runs-on: ubuntu-latest
needs: create-archive
strategy:
matrix:
- name: [Ubuntu-18, Ubuntu-20, Ubuntu-22]
+ name: [Ubuntu-22, Ubuntu-24]
include:
- - name: Ubuntu-18
- flavour: ubuntu
- image: "ubuntu:18.04"
- ubuntu: 18
- install_mode: install-nginx
- - name: Ubuntu-20
- flavour: ubuntu
- image: "ubuntu:20.04"
- ubuntu: 20
- install_mode: install-apache
- name: Ubuntu-22
- flavour: ubuntu
image: "ubuntu:22.04"
ubuntu: 22
install_mode: install-apache
+ - name: Ubuntu-24
+ image: "ubuntu:24.04"
+ ubuntu: 24
+ install_mode: install-apache
container:
image: ${{ matrix.image }}
apt-get install -y git sudo wget
ln -snf /usr/share/zoneinfo/$CONTAINER_TIMEZONE /etc/localtime && echo $CONTAINER_TIMEZONE > /etc/timezone
shell: bash
- if: matrix.flavour == 'ubuntu'
-
- - name: Prepare container (CentOS)
- run: |
- dnf update -y
- dnf install -y sudo glibc-langpack-en
- shell: bash
- if: matrix.flavour == 'centos'
- name: Setup import user
run: |
OS: ${{ matrix.name }}
INSTALL_MODE: ${{ matrix.install_mode }}
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v4
with:
name: full-source
path: /home/nominatim
mkdir data-env-reverse
working-directory: /home/nominatim
- - name: Prepare import environment (CentOS)
+ - name: Add nominatim to path
run: |
- sudo ln -s /usr/local/bin/nominatim /usr/bin/nominatim
- echo NOMINATIM_DATABASE_WEBUSER="apache" > nominatim-project/.env
- cp nominatim-project/.env data-env-reverse/.env
- working-directory: /home/nominatim
- if: matrix.flavour == 'centos'
+ sudo ln -s /home/nominatim/nominatim-venv/bin/nominatim /usr/local/bin/nominatim
+
+ - name: Need lua binary
+ run: |
+ sudo apt-get install -y lua5.4 lua-dkjson
- name: Print version
run: nominatim --version
working-directory: /home/nominatim/nominatim-project
+ - name: Print taginfo
+ run: lua ./nominatim-venv/lib/*/site-packages/nominatim_db/resources/lib-lua/taginfo.lua
+ working-directory: /home/nominatim
+
- name: Collect host OS information
run: nominatim admin --collect-os-info
working-directory: /home/nominatim/nominatim-project
-
+
- name: Import
run: nominatim import --osm-file ../test.pbf
working-directory: /home/nominatim/nominatim-project
run: nominatim admin --warm
working-directory: /home/nominatim/nominatim-project
- - name: Prepare update (Ubuntu)
- run: apt-get install -y python3-pip
- shell: bash
- if: matrix.flavour == 'ubuntu'
+ - name: Install osmium
+ run: |
+ /home/nominatim/nominatim-venv/bin/pip install osmium
- name: Run update
run: |
- pip3 install --user osmium
nominatim replication --init
NOMINATIM_REPLICATION_MAX_DIFF=1 nominatim replication --once
working-directory: /home/nominatim/nominatim-project
- name: Clean up database (reverse-only import)
run: nominatim refresh --postcodes --word-tokens
working-directory: /home/nominatim/nominatim-project
+
+ install-no-superuser:
+ runs-on: ubuntu-24.04
+ needs: create-archive
+
+ steps:
+ - uses: actions/download-artifact@v4
+ with:
+ name: full-source
+
+ - name: Unpack Nominatim
+ run: tar xf nominatim-src.tar.bz2
+
+ - uses: ./Nominatim/.github/actions/setup-postgresql
+ with:
+ postgresql-version: 16
+
+ - uses: ./Nominatim/.github/actions/build-nominatim
+
+ - name: Prepare import environment
+ run: |
+ mv Nominatim/test/testdb/apidb-test-data.pbf test.pbf
+ rm -rf Nominatim
+
+ - name: Prepare Database
+ run: |
+ ./venv/bin/nominatim import --prepare-database
+
+ - name: Create import user
+ run: |
+ sudo -u postgres createuser osm-import
+ psql -d nominatim -c "ALTER USER \"osm-import\" WITH PASSWORD 'osm-import'"
+ psql -d nominatim -c 'GRANT CREATE ON SCHEMA public TO "osm-import"'
+
+ - name: Run import
+ run: |
+ NOMINATIM_DATABASE_DSN="pgsql:host=127.0.0.1;dbname=nominatim;user=osm-import;password=osm-import" ./venv/bin/nominatim import --continue import-from-file --osm-file test.pbf
+
+ - name: Check full import
+ run: ./venv/bin/nominatim admin --check-database
+
+ migrate:
+ runs-on: ubuntu-24.04
+ needs: create-archive
+
+ steps:
+ - uses: actions/download-artifact@v4
+ with:
+ name: full-source
+
+ - name: Unpack Nominatim
+ run: tar xf nominatim-src.tar.bz2
+
+ - uses: ./Nominatim/.github/actions/setup-postgresql
+ with:
+ postgresql-version: 17
+
+ - name: Install Python dependencies
+ run: |
+ sudo apt-get install --no-install-recommends virtualenv osm2pgsql
+
+ - name: Install Nominatim master version
+ run: |
+ virtualenv master
+ cd Nominatim
+ ../master/bin/pip install packaging/nominatim-db
+
+ - name: Install Nominatim from pypi
+ run: |
+ virtualenv release
+ ./release/bin/pip install nominatim-db
+
+ - name: Import Nominatim database using release
+ run: |
+ ./release/bin/nominatim import --osm-file Nominatim/test/testdb/apidb-test-data.pbf
+ ./release/bin/nominatim add-data --file Nominatim/test/testdb/additional_api_test.data.osm
+
+ - name: Migrate to master version
+ run: |
+ ./master/bin/nominatim admin --migrate
+ ./release/bin/nominatim add-data --file Nominatim/test/testdb/additional_api_test.data.osm
+
+ codespell:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: codespell-project/actions-codespell@v2
+ with:
+ only_warn: 1
*.log
*.pyc
+*.swp
docs/develop/*.png
+site-html
build
+dist
+.coverage
.vagrant
data/country_osm_grid.sql.gz
-[submodule "osm2pgsql"]
- path = osm2pgsql
- url = https://github.com/openstreetmap/osm2pgsql.git
- ignore = dirty
[mypy]
+plugins = sqlalchemy.ext.mypy.plugin
+
+[mypy-sanic_cors.*]
+ignore_missing_imports = True
[mypy-icu.*]
ignore_missing_imports = True
-[mypy-osmium.*]
+[mypy-asyncpg.*]
ignore_missing_imports = True
[mypy-datrie.*]
[mypy-dotenv.*]
ignore_missing_imports = True
+
+[mypy-falcon.*]
+ignore_missing_imports = True
+
+[mypy-geoalchemy2.*]
+ignore_missing_imports = True
+++ /dev/null
-[MASTER]
-
-extension-pkg-whitelist=osmium
-ignored-modules=icu,datrie
-
-[MESSAGES CONTROL]
-
-[TYPECHECK]
-
-# closing added here because it sometimes triggers a false positive with
-# 'with' statements.
-ignored-classes=NominatimArgs,closing
-# 'too-many-ancestors' is triggered already by deriving from UserDict
-# 'not-context-manager' disabled because it causes false positives once
-# typed Python is enabled. See also https://github.com/PyCQA/pylint/issues/5273
-disable=too-few-public-methods,duplicate-code,too-many-ancestors,bad-option-value,no-self-use,not-context-manager
-
-good-names=i,x,y,fd,db,cc
+++ /dev/null
-#-----------------------------------------------------------------------------
-#
-# CMake Config
-#
-# Nominatim
-#
-#-----------------------------------------------------------------------------
-
-cmake_minimum_required(VERSION 3.0 FATAL_ERROR)
-list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
-
-
-#-----------------------------------------------------------------------------
-#
-# Project version
-#
-#-----------------------------------------------------------------------------
-
-project(nominatim)
-
-set(NOMINATIM_VERSION_MAJOR 4)
-set(NOMINATIM_VERSION_MINOR 1)
-set(NOMINATIM_VERSION_PATCH 0)
-
-set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}")
-
-add_definitions(-DNOMINATIM_VERSION="${NOMINATIM_VERSION}")
-
-# Setting GIT_HASH
-find_package(Git)
-if (GIT_FOUND)
- execute_process(
- COMMAND "${GIT_EXECUTABLE}" log -1 --format=%h
- WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
- OUTPUT_VARIABLE GIT_HASH
- OUTPUT_STRIP_TRAILING_WHITESPACE
- ERROR_QUIET
- )
-endif()
-
-#-----------------------------------------------------------------------------
-# Configuration
-#-----------------------------------------------------------------------------
-
-set(BUILD_IMPORTER on CACHE BOOL "Build everything for importing/updating the database")
-set(BUILD_API on CACHE BOOL "Build everything for the API server")
-set(BUILD_MODULE off CACHE BOOL "Build PostgreSQL module for legacy tokenizer")
-set(BUILD_TESTS on CACHE BOOL "Build test suite")
-set(BUILD_DOCS on CACHE BOOL "Build documentation")
-set(BUILD_MANPAGE on CACHE BOOL "Build Manual Page")
-set(BUILD_OSM2PGSQL on CACHE BOOL "Build osm2pgsql (expert only)")
-set(INSTALL_MUNIN_PLUGINS on CACHE BOOL "Install Munin plugins for supervising Nominatim")
-
-#-----------------------------------------------------------------------------
-# osm2pgsql (imports/updates only)
-#-----------------------------------------------------------------------------
-
-if (BUILD_IMPORTER AND BUILD_OSM2PGSQL)
- if (NOT EXISTS "${CMAKE_SOURCE_DIR}/osm2pgsql/CMakeLists.txt")
- message(FATAL_ERROR "The osm2pgsql directory is empty.\
- Did you forget to check out Nominatim recursively?\
- \nTry updating submodules with: git submodule update --init")
- endif()
- set(BUILD_TESTS_SAVED "${BUILD_TESTS}")
- set(BUILD_TESTS off)
- set(WITH_LUA off CACHE BOOL "")
- add_subdirectory(osm2pgsql)
- set(BUILD_TESTS ${BUILD_TESTS_SAVED})
-endif()
-
-
-#-----------------------------------------------------------------------------
-# python (imports/updates only)
-#-----------------------------------------------------------------------------
-
-if (BUILD_IMPORTER)
- find_package(PythonInterp 3.6 REQUIRED)
-endif()
-
-#-----------------------------------------------------------------------------
-# PHP
-#-----------------------------------------------------------------------------
-
-# Setting PHP binary variable as to command line (prevailing) or auto detect
-
-if (BUILD_API OR BUILD_IMPORTER)
- if (NOT PHP_BIN)
- find_program (PHP_BIN php)
- endif()
- # sanity check if PHP binary exists
- if (NOT EXISTS ${PHP_BIN})
- message(FATAL_ERROR "PHP binary not found. Install php or provide location with -DPHP_BIN=/path/php ")
- else()
- message (STATUS "Using PHP binary " ${PHP_BIN})
- endif()
- if (NOT PHPCGI_BIN)
- find_program (PHPCGI_BIN php-cgi)
- endif()
- # sanity check if PHP binary exists
- if (NOT EXISTS ${PHPCGI_BIN})
- message(WARNING "php-cgi binary not found. nominatim tool will not provide query functions.")
- set (PHPCGI_BIN "")
- else()
- message (STATUS "Using php-cgi binary " ${PHPCGI_BIN})
- endif()
-endif()
-
-#-----------------------------------------------------------------------------
-# import scripts and utilities (importer only)
-#-----------------------------------------------------------------------------
-
-if (BUILD_IMPORTER)
- find_file(COUNTRY_GRID_FILE country_osm_grid.sql.gz
- PATHS ${PROJECT_SOURCE_DIR}/data
- NO_DEFAULT_PATH
- DOC "Location of the country grid file."
- )
-
- if (NOT COUNTRY_GRID_FILE)
- message(FATAL_ERROR "\nYou need to download the country_osm_grid first:\n"
- " wget -O ${PROJECT_SOURCE_DIR}/data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz")
- endif()
-
- configure_file(${PROJECT_SOURCE_DIR}/cmake/tool.tmpl
- ${PROJECT_BINARY_DIR}/nominatim)
-endif()
-
-#-----------------------------------------------------------------------------
-# Tests
-#-----------------------------------------------------------------------------
-
-if (BUILD_TESTS)
- include(CTest)
-
- set(TEST_BDD db osm2pgsql api)
-
- find_program(PYTHON_BEHAVE behave)
- find_program(PYLINT NAMES pylint3 pylint)
- find_program(PYTEST NAMES pytest py.test-3 py.test)
- find_program(PHPCS phpcs)
- find_program(PHPUNIT phpunit)
-
- if (PYTHON_BEHAVE)
- message(STATUS "Using Python behave binary ${PYTHON_BEHAVE}")
- foreach (test ${TEST_BDD})
- add_test(NAME bdd_${test}
- COMMAND ${PYTHON_BEHAVE} ${test}
- WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/test/bdd)
- set_tests_properties(bdd_${test}
- PROPERTIES ENVIRONMENT "NOMINATIM_DIR=${PROJECT_BINARY_DIR}")
- endforeach()
- else()
- message(WARNING "behave not found. BDD tests disabled." )
- endif()
-
- if (PHPUNIT)
- message(STATUS "Using phpunit binary ${PHPUNIT}")
- add_test(NAME php
- COMMAND ${PHPUNIT} ./
- WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/test/php)
- else()
- message(WARNING "phpunit not found. PHP unit tests disabled." )
- endif()
-
- if (PHPCS)
- message(STATUS "Using phpcs binary ${PHPCS}")
- add_test(NAME phpcs
- COMMAND ${PHPCS} --report-width=120 --colors lib-php
- WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
- else()
- message(WARNING "phpcs not found. PHP linting tests disabled." )
- endif()
-
- if (PYLINT)
- message(STATUS "Using pylint binary ${PYLINT}")
- add_test(NAME pylint
- COMMAND ${PYLINT} nominatim
- WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
- else()
- message(WARNING "pylint not found. Python linting tests disabled.")
- endif()
-
- if (PYTEST)
- message(STATUS "Using pytest binary ${PYTEST}")
- add_test(NAME pytest
- COMMAND ${PYTEST} test/python
- WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
- else()
- message(WARNING "pytest not found. Python tests disabled." )
- endif()
-endif()
-
-#-----------------------------------------------------------------------------
-# Postgres module
-#-----------------------------------------------------------------------------
-
-if (BUILD_MODULE)
- add_subdirectory(module)
-endif()
-
-#-----------------------------------------------------------------------------
-# Documentation
-#-----------------------------------------------------------------------------
-
-if (BUILD_DOCS)
- add_subdirectory(docs)
-endif()
-
-#-----------------------------------------------------------------------------
-# Manual page
-#-----------------------------------------------------------------------------
-
-if (BUILD_MANPAGE)
- add_subdirectory(man)
-endif()
-
-#-----------------------------------------------------------------------------
-# Installation
-#-----------------------------------------------------------------------------
-
-
-include(GNUInstallDirs)
-set(NOMINATIM_DATADIR ${CMAKE_INSTALL_FULL_DATADIR}/${PROJECT_NAME})
-set(NOMINATIM_LIBDIR ${CMAKE_INSTALL_FULL_LIBDIR}/${PROJECT_NAME})
-set(NOMINATIM_CONFIGDIR ${CMAKE_INSTALL_FULL_SYSCONFDIR}/${PROJECT_NAME})
-set(NOMINATIM_MUNINDIR ${CMAKE_INSTALL_FULL_DATADIR}/munin/plugins)
-
-if (BUILD_IMPORTER)
- configure_file(${PROJECT_SOURCE_DIR}/cmake/tool-installed.tmpl installed.bin)
- install(PROGRAMS ${PROJECT_BINARY_DIR}/installed.bin
- DESTINATION ${CMAKE_INSTALL_BINDIR}
- RENAME nominatim)
-
- install(DIRECTORY nominatim
- DESTINATION ${NOMINATIM_LIBDIR}/lib-python
- FILES_MATCHING PATTERN "*.py"
- PATTERN __pycache__ EXCLUDE)
- install(DIRECTORY lib-sql DESTINATION ${NOMINATIM_LIBDIR})
-
- install(FILES ${COUNTRY_GRID_FILE}
- data/words.sql
- DESTINATION ${NOMINATIM_DATADIR})
-endif()
-
-if (BUILD_OSM2PGSQL)
- if (${CMAKE_VERSION} VERSION_LESS 3.13)
- # Installation of subdirectory targets was only introduced in 3.13.
- # So just copy the osm2pgsql file for older versions.
- install(PROGRAMS ${PROJECT_BINARY_DIR}/osm2pgsql/osm2pgsql
- DESTINATION ${NOMINATIM_LIBDIR})
- else()
- install(TARGETS osm2pgsql RUNTIME DESTINATION ${NOMINATIM_LIBDIR})
- endif()
-endif()
-
-if (BUILD_MODULE)
- install(PROGRAMS ${PROJECT_BINARY_DIR}/module/nominatim.so
- DESTINATION ${NOMINATIM_LIBDIR}/module)
-endif()
-
-if (BUILD_API)
- install(DIRECTORY lib-php DESTINATION ${NOMINATIM_LIBDIR})
-endif()
-
-install(FILES settings/env.defaults
- settings/address-levels.json
- settings/phrase-settings.json
- settings/import-admin.style
- settings/import-street.style
- settings/import-address.style
- settings/import-full.style
- settings/import-extratags.style
- settings/icu_tokenizer.yaml
- settings/country_settings.yaml
- DESTINATION ${NOMINATIM_CONFIGDIR})
-
-install(DIRECTORY settings/icu-rules
- DESTINATION ${NOMINATIM_CONFIGDIR})
-install(DIRECTORY settings/country-names
- DESTINATION ${NOMINATIM_CONFIGDIR})
-
-if (INSTALL_MUNIN_PLUGINS)
- install(FILES munin/nominatim_importlag
- munin/nominatim_query_speed
- munin/nominatim_requests
- DESTINATION ${NOMINATIM_MUNINDIR})
-endif()
an issue first or comment on the appropriate issue already existing so
that duplicate work can be avoided.
+### Using AI-assisted code generators
+
+PRs that include AI-generated content, may that be in code, in the PR
+description or in documentation need to
+
+1. clearly mark the AI-generated sections as such, for example, by
+ mentioning all use of AI in the PR description, and
+2. include proof that you have run the generated code on an actual
+ installation of Nominatim. Adding and excuting tests will not be
+ sufficient. You need to show that the code actually solves the problem
+ the PR claims to solve.
+
+
## Coding style
Nominatim historically hasn't followed a particular coding style but we
* no spaces after opening and before closing bracket
* leave out space between a function name and bracket
but add one between control statement(if, while, etc.) and bracket
- * for PHP variables use CamelCase with a prefixing letter indicating the type
- (i - integer, f - float, a - array, s - string, o - object)
-The coding style is enforced with PHPCS and pylint. It can be tested with:
+The coding style is enforced with flake8. It can be tested with:
```
-phpcs --report-width=120 --colors .
-pylint3 --extension-pkg-whitelist=osmium nominatim
+make lint
```
## Testing
Before submitting a pull request make sure that the tests pass:
```
- cd build
- make test
+ make tests
```
+
+## Releases
+
+Nominatim follows semantic versioning. Major releases are done for large changes
+that require (or at least strongly recommend) a reimport of the databases.
+Minor releases can usually be applied to existing databases. Patch releases
+contain bug fixes only and are released from a separate branch where the
+relevant changes are cherry-picked from the master branch.
+
+Checklist for releases:
+
+* [ ] increase versions in
+ * `src/nominatim_api/version.py`
+ * `src/nominatim_db/version.py`
+* [ ] update `ChangeLog` (copy information from patch releases from release branch)
+* [ ] complete `docs/admin/Migration.md`
+* [ ] update EOL dates in `SECURITY.md`
+* [ ] commit and make sure CI tests pass
+* [ ] update OSMF production repo and release new version -post1 there
+* [ ] test migration
+ * download, build and import previous version
+ * migrate using master version
+ * run updates using master version
+* [ ] prepare tarball:
+ * `git clone https://github.com/osm-search/Nominatim` (switch to right branch!)
+ * `rm -r .git*`
+ * copy country data into `data/`
+ * add version to base directory and package
+* [ ] upload tarball to https://nominatim.org
+* [ ] prepare documentation
+ * check out new docs branch
+ * change git checkout instructions to tarball download instructions or adapt version on existing ones
+ * build documentation and copy to https://github.com/osm-search/nominatim-org-site
+ * add new version to history
+* [ ] check release tarball
+ * download tarball as per new documentation instructions
+ * compile and import Nominatim
+ * run `nominatim --version` to confirm correct version
+* [ ] tag new release and add a release on github.com
+* [ ] build pip packages and upload to pypi
- GNU GENERAL PUBLIC LICENSE
- Version 2, June 1991
-
- Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The licenses for most software are designed to take away your
-freedom to share and change it. By contrast, the GNU General Public
-License is intended to guarantee your freedom to share and change free
-software--to make sure the software is free for all its users. This
-General Public License applies to most of the Free Software
-Foundation's software and to any other program whose authors commit to
-using it. (Some other Free Software Foundation software is covered by
-the GNU Lesser General Public License instead.) You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-this service if you wish), that you receive source code or can get it
-if you want it, that you can change the software or use pieces of it
-in new free programs; and that you know you can do these things.
-
- To protect your rights, we need to make restrictions that forbid
-anyone to deny you these rights or to ask you to surrender the rights.
-These restrictions translate to certain responsibilities for you if you
-distribute copies of the software, or if you modify it.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must give the recipients all the rights that
-you have. You must make sure that they, too, receive or can get the
-source code. And you must show them these terms so they know their
-rights.
-
- We protect your rights with two steps: (1) copyright the software, and
-(2) offer you this license which gives you legal permission to copy,
-distribute and/or modify the software.
-
- Also, for each author's protection and ours, we want to make certain
-that everyone understands that there is no warranty for this free
-software. If the software is modified by someone else and passed on, we
-want its recipients to know that what they have is not the original, so
-that any problems introduced by others will not reflect on the original
-authors' reputations.
-
- Finally, any free program is threatened constantly by software
-patents. We wish to avoid the danger that redistributors of a free
-program will individually obtain patent licenses, in effect making the
-program proprietary. To prevent this, we have made it clear that any
-patent must be licensed for everyone's free use or not licensed at all.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- GNU GENERAL PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. This License applies to any program or other work which contains
-a notice placed by the copyright holder saying it may be distributed
-under the terms of this General Public License. The "Program", below,
-refers to any such program or work, and a "work based on the Program"
-means either the Program or any derivative work under copyright law:
-that is to say, a work containing the Program or a portion of it,
-either verbatim or with modifications and/or translated into another
-language. (Hereinafter, translation is included without limitation in
-the term "modification".) Each licensee is addressed as "you".
-
-Activities other than copying, distribution and modification are not
-covered by this License; they are outside its scope. The act of
-running the Program is not restricted, and the output from the Program
-is covered only if its contents constitute a work based on the
-Program (independent of having been made by running the Program).
-Whether that is true depends on what the Program does.
-
- 1. You may copy and distribute verbatim copies of the Program's
-source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate
-copyright notice and disclaimer of warranty; keep intact all the
-notices that refer to this License and to the absence of any warranty;
-and give any other recipients of the Program a copy of this License
-along with the Program.
-
-You may charge a fee for the physical act of transferring a copy, and
-you may at your option offer warranty protection in exchange for a fee.
-
- 2. You may modify your copy or copies of the Program or any portion
-of it, thus forming a work based on the Program, and copy and
-distribute such modifications or work under the terms of Section 1
-above, provided that you also meet all of these conditions:
-
- a) You must cause the modified files to carry prominent notices
- stating that you changed the files and the date of any change.
-
- b) You must cause any work that you distribute or publish, that in
- whole or in part contains or is derived from the Program or any
- part thereof, to be licensed as a whole at no charge to all third
- parties under the terms of this License.
-
- c) If the modified program normally reads commands interactively
- when run, you must cause it, when started running for such
- interactive use in the most ordinary way, to print or display an
- announcement including an appropriate copyright notice and a
- notice that there is no warranty (or else, saying that you provide
- a warranty) and that users may redistribute the program under
- these conditions, and telling the user how to view a copy of this
- License. (Exception: if the Program itself is interactive but
- does not normally print such an announcement, your work based on
- the Program is not required to print an announcement.)
-
-These requirements apply to the modified work as a whole. If
-identifiable sections of that work are not derived from the Program,
-and can be reasonably considered independent and separate works in
-themselves, then this License, and its terms, do not apply to those
-sections when you distribute them as separate works. But when you
-distribute the same sections as part of a whole which is a work based
-on the Program, the distribution of the whole must be on the terms of
-this License, whose permissions for other licensees extend to the
-entire whole, and thus to each and every part regardless of who wrote it.
-
-Thus, it is not the intent of this section to claim rights or contest
-your rights to work written entirely by you; rather, the intent is to
-exercise the right to control the distribution of derivative or
-collective works based on the Program.
-
-In addition, mere aggregation of another work not based on the Program
-with the Program (or with a work based on the Program) on a volume of
-a storage or distribution medium does not bring the other work under
-the scope of this License.
-
- 3. You may copy and distribute the Program (or a work based on it,
-under Section 2) in object code or executable form under the terms of
-Sections 1 and 2 above provided that you also do one of the following:
-
- a) Accompany it with the complete corresponding machine-readable
- source code, which must be distributed under the terms of Sections
- 1 and 2 above on a medium customarily used for software interchange; or,
-
- b) Accompany it with a written offer, valid for at least three
- years, to give any third party, for a charge no more than your
- cost of physically performing source distribution, a complete
- machine-readable copy of the corresponding source code, to be
- distributed under the terms of Sections 1 and 2 above on a medium
- customarily used for software interchange; or,
-
- c) Accompany it with the information you received as to the offer
- to distribute corresponding source code. (This alternative is
- allowed only for noncommercial distribution and only if you
- received the program in object code or executable form with such
- an offer, in accord with Subsection b above.)
-
-The source code for a work means the preferred form of the work for
-making modifications to it. For an executable work, complete source
-code means all the source code for all modules it contains, plus any
-associated interface definition files, plus the scripts used to
-control compilation and installation of the executable. However, as a
-special exception, the source code distributed need not include
-anything that is normally distributed (in either source or binary
-form) with the major components (compiler, kernel, and so on) of the
-operating system on which the executable runs, unless that component
-itself accompanies the executable.
-
-If distribution of executable or object code is made by offering
-access to copy from a designated place, then offering equivalent
-access to copy the source code from the same place counts as
-distribution of the source code, even though third parties are not
-compelled to copy the source along with the object code.
-
- 4. You may not copy, modify, sublicense, or distribute the Program
-except as expressly provided under this License. Any attempt
-otherwise to copy, modify, sublicense or distribute the Program is
-void, and will automatically terminate your rights under this License.
-However, parties who have received copies, or rights, from you under
-this License will not have their licenses terminated so long as such
-parties remain in full compliance.
-
- 5. You are not required to accept this License, since you have not
-signed it. However, nothing else grants you permission to modify or
-distribute the Program or its derivative works. These actions are
-prohibited by law if you do not accept this License. Therefore, by
-modifying or distributing the Program (or any work based on the
-Program), you indicate your acceptance of this License to do so, and
-all its terms and conditions for copying, distributing or modifying
-the Program or works based on it.
-
- 6. Each time you redistribute the Program (or any work based on the
-Program), the recipient automatically receives a license from the
-original licensor to copy, distribute or modify the Program subject to
-these terms and conditions. You may not impose any further
-restrictions on the recipients' exercise of the rights granted herein.
-You are not responsible for enforcing compliance by third parties to
-this License.
-
- 7. If, as a consequence of a court judgment or allegation of patent
-infringement or for any other reason (not limited to patent issues),
-conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot
-distribute so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you
-may not distribute the Program at all. For example, if a patent
-license would not permit royalty-free redistribution of the Program by
-all those who receive copies directly or indirectly through you, then
-the only way you could satisfy both it and this License would be to
-refrain entirely from distribution of the Program.
-
-If any portion of this section is held invalid or unenforceable under
-any particular circumstance, the balance of the section is intended to
-apply and the section as a whole is intended to apply in other
-circumstances.
-
-It is not the purpose of this section to induce you to infringe any
-patents or other property right claims or to contest validity of any
-such claims; this section has the sole purpose of protecting the
-integrity of the free software distribution system, which is
-implemented by public license practices. Many people have made
-generous contributions to the wide range of software distributed
-through that system in reliance on consistent application of that
-system; it is up to the author/donor to decide if he or she is willing
-to distribute software through any other system and a licensee cannot
-impose that choice.
-
-This section is intended to make thoroughly clear what is believed to
-be a consequence of the rest of this License.
-
- 8. If the distribution and/or use of the Program is restricted in
-certain countries either by patents or by copyrighted interfaces, the
-original copyright holder who places the Program under this License
-may add an explicit geographical distribution limitation excluding
-those countries, so that distribution is permitted only in or among
-countries not thus excluded. In such case, this License incorporates
-the limitation as if written in the body of this License.
-
- 9. The Free Software Foundation may publish revised and/or new versions
-of the General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
-Each version is given a distinguishing version number. If the Program
-specifies a version number of this License which applies to it and "any
-later version", you have the option of following the terms and conditions
-either of that version or of any later version published by the Free
-Software Foundation. If the Program does not specify a version number of
-this License, you may choose any version ever published by the Free Software
-Foundation.
-
- 10. If you wish to incorporate parts of the Program into other free
-programs whose distribution conditions are different, write to the author
-to ask for permission. For software which is copyrighted by the Free
-Software Foundation, write to the Free Software Foundation; we sometimes
-make exceptions for this. Our decision will be guided by the two goals
-of preserving the free status of all derivatives of our free software and
-of promoting the sharing and reuse of software generally.
-
- NO WARRANTY
-
- 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
-FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
-OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
-PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
-TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
-PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
-REPAIR OR CORRECTION.
-
- 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
-REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
-INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
-OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
-TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
-YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
-PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGES.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-convey the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License along
- with this program; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+GNU GENERAL PUBLIC LICENSE
+Version 3, 29 June 2007
-Also add information on how to contact you by electronic and paper mail.
+Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+Preamble
+
+The GNU General Public License is a free, copyleft license for software and other kinds of works.
+
+The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things.
+
+To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it.
+
+For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions.
+
+Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users.
+
+Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free.
+
+The precise terms and conditions for copying, distribution and modification follow.
+
+TERMS AND CONDITIONS
+
+0. Definitions.
+
+“This License” refers to version 3 of the GNU General Public License.
+
+“Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.
+
+“The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations.
+
+To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work.
+
+A “covered work” means either the unmodified Program or a work based on the Program.
+
+To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well.
+
+To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying.
+
+An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion.
+
+1. Source Code.
+The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work.
+
+A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language.
+
+The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it.
+
+The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work.
+
+The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.
+
+The Corresponding Source for a work in source code form is that same work.
+
+2. Basic Permissions.
+All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.
+
+You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you.
+
+Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
+
+3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures.
+
+When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures.
+
+4. Conveying Verbatim Copies.
+You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program.
+
+You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee.
+
+5. Conveying Modified Source Versions.
+You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”.
+
+ c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
+
+A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate.
+
+6. Conveying Non-Source Forms.
+You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b.
+
+ d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d.
+
+A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work.
+
+A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product.
-If the program is interactive, make it output a short notice like this
-when it starts in an interactive mode:
+“Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made.
- Gnomovision version 69, Copyright (C) year name of author
- Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
+If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM).
+
+The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network.
+
+Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying.
+
+7. Additional Terms.
+“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions.
+
+When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission.
+
+Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors.
+
+All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying.
+
+If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.
+
+Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way.
+
+8. Termination.
+You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11).
+
+However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice.
+
+Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10.
+
+9. Acceptance Not Required for Having Copies.
+You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so.
+
+10. Automatic Licensing of Downstream Recipients.
+Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License.
+
+An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts.
+
+You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.
+
+11. Patents.
+A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”.
+
+A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License.
+
+Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version.
+
+In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party.
+
+If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid.
+
+If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it.
+
+A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007.
+
+Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law.
+
+12. No Surrender of Others' Freedom.
+If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program.
+
+13. Use with the GNU Affero General Public License.
+Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such.
+
+14. Revised Versions of this License.
+The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation.
+
+If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program.
+
+Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version.
+
+15. Disclaimer of Warranty.
+THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. Limitation of Liability.
+IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+17. Interpretation of Sections 15 and 16.
+If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee.
+
+END OF TERMS AND CONDITIONS
+
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, the commands you use may
-be called something other than `show w' and `show c'; they could even be
-mouse-clicks or menu items--whatever suits your program.
+If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:
-You should also get your employer (if you work as a programmer) or your
-school, if any, to sign a "copyright disclaimer" for the program, if
-necessary. Here is a sample; alter the names:
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details.
- Yoyodyne, Inc., hereby disclaims all copyright interest in the program
- `Gnomovision' (which makes passes at compilers) written by James Hacker.
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about box”.
- <signature of Ty Coon>, 1 April 1989
- Ty Coon, President of Vice
+You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see <https://www.gnu.org/licenses/>.
-This General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may
-consider it more useful to permit linking proprietary applications with the
-library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License.
+The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read <https://www.gnu.org/philosophy/why-not-lgpl.html>.
+5.0.0
+ * increase required versions for PostgreSQL (12+), PostGIS (3.0+)
+ * remove installation via cmake and debundle osm2pgsql
+ * remove deprecated PHP frontend
+ * remove deprecated legacy tokenizer
+ * add configurable pre-processing of queries
+ * add query pre-processor to split up Japanese addresses
+ * rewrite of osm2pgsql style implementation
+ (also adds support for osm2pgsql-themepark)
+ * reduce the number of SQL queries needed to complete a 'lookup' call
+ * improve computation of centroid for lines with only two points
+ * improve bbox output for postcode areas
+ * improve result order by returning the largest object when other things are
+ equal
+ * add fallback for reverse geocoding to default country tables
+ * exclude postcode areas from reverse geocoding
+ * disable search endpoint when database is reverse-only (regression)
+ * minor performance improvements to area split algorithm
+ * switch table and index creation to use autocommit mode to avoid deadlocks
+ * drop overly long ways during import
+ * restrict automatic migrations to versions 4.3+
+ * switch linting from pylint to flake8
+ * switch tests to use a wikimedia test file in the new CSV style
+ * various fixes and improvements to documentation
+
+4.5.0
+ * allow building Nominatim as a pip package
+ * make osm2pgsql building optional
+ * switch importer to psycopg3
+ * allow output format of web search to be customized in self-installations
+ * look up potential postcode areas for postcode results
+ * add word usage statistics for address terms
+ * implement more light-weight CSV format for wiki importance tables
+ * rewrite SQL for place search to use window functions
+ * increase search radius when filtering by postcode
+ * prefer POI points over POI areas
+ * reintroduce full terms for address terms in search_name table
+ * reindex postcodes when their parent is deleted
+ * indexing: precompute counts of affected rows
+ * ensure consistent country assignments for overlapping countries
+ * make Nominatim[Async]API context manager to ensure proper calling of
+ close()
+ * make usage of project dir optional for library
+ * drop interpolations when no parent can be found
+ * style tweaks to reflect OSM usage (man_made, highway and others)
+ * deprecation of: bundled osm2pgsql, legacy tokenizer, PHP frontend
+ * make documentation buildable without CMake
+ * various fixes and improvements to documentation
+
+4.4.1
+ * fix geocodejson output: admin level output should only print boundaries
+ * updating: restrict invalidation of child objects on large street features
+ * restrict valid interpolation house numbers to 0-999999
+ * fix import error when SQLAlchemy 1.4 and psycopg3 are installed
+ * various typo fixes in the documentation
+
+4.4.0
+ * add export to SQLite database and SQLite support for the frontend
+ * switch to Python frontend as the default frontend
+ * update to osm2pgsql 1.11.0
+ * add support for new osm2pgsql middle table format
+ * simplify geometry for large polygon objects not used in addresses
+ * various performance tweaks for search in Python frontend
+ * fix regression in search with categories where it was confused with near
+ search
+ * partially roll back use of SQLAlchemy lambda statements due to bugs
+ in SQLAlchemy
+ * fix handling of timezones for timestamps from the database
+ * fix handling of full address searches in connection with a viewbox
+ * fix postcode computation of highway areas
+ * fix handling of timeout errors for Python <= 3.10
+ * fix address computation for postcode areas
+ * fix variable shadowing in osm2pgsql flex script, causing bugs with LuaJIT
+ * make sure extratags are always null when empty
+ * reduce importance of places without wikipedia reference
+ * improve performance of word count computations
+ * drop support for wikipedia tags with full URLs
+ * replace get_addressdata() SQL implementation with a Python function
+ * improve display name for non-address features
+ * fix postcode validation for postcodes with country code
+ (thanks @pawel-wroniszewski)
+ * add possibility to run imports without superuser database rights
+ (thanks @robbe-haesendonck)
+ * new CLI command for cleaning deleted relations (thanks @lujoh)
+ * add check for database version in the CLI check command
+ * updates to import styles ignoring more unused objects
+ * various typo fixes (thanks @kumarUjjawal)
+
+4.3.2
+ * fix potential SQL injection issue for 'nominatim admin --collect-os-info'
+ * PHP frontend: fix on-the-fly lookup of postcode areas near boundaries
+ * Python frontend: improve handling of viewbox
+ * Python frontend: correct deployment instructions
+
+4.3.1
+ * reintroduce result rematching
+ * improve search of multi-part names
+ * fix accidentally switched meaning of --reverse-only and --search-only in
+ warm command
+
+4.3.0
+ * fix failing importance recalculation command
+ * fix merging of linked names into unnamed boundaries
+ * fix a number of corner cases with interpolation splitting resulting in
+ invalid geometries
+ * fix failure in website generation when password contains curly brackets
+ * fix broken use of ST_Project in PostGIS 3.4
+ * new NOMINATIM_SEARCH_WITHIN_COUNTRIES setting to restrict reverse lookups
+ to known countries (thanks @alfmarcua)
+ * allow negative OSM IDs (thanks @alfmarcua)
+ * disallow import of Tiger data in a frozen DB
+ * avoid UPDATE to change settings to be compatible with r/o DBs (thanks @t-tomek)
+ * update bundled osm2pgsql to 1.9.2
+ * reorganise osm2pgsql flex style and make it the default
+ * exclude names ending in :wikipedia from indexing
+ * no longer accept comma as a list separator in name tags
+ * process forward dependencies on update to catch updates in geometries
+ of ways and relations
+ * fix handling of isolated silent letters during transliteration
+ * no longer assign postcodes to large linear features like rivers
+ * introduce nominatim.paths module for finding data and libraries
+ * documentation layout changed to material theme
+ * new documentation section for library
+ * various smaller fixes to existing documentation
+ (thanks @woodpeck, @bloom256, @biswajit-k)
+ * updates to vagrant install scripts, drop support for Ubuntu 18
+ (thanks @n-timofeev)
+ * removed obsolete configuration variables from env.defaults
+ * add script for generating a taginfo description (thanks @biswajit-k)
+ * modernize Python code around BDD test and add testing of Python frontend
+ * lots of new BDD tests for API output
+
+4.2.3
+
+ * fix deletion handling for 'nominatim add-data'
+ * adapt place_force_delete() to new deletion handling
+ * flex style: avoid dropping of postcode areas
+ * fix update errors on address interpolation handling
+
+4.2.2
+
+ * extend flex-style library to fully support all default styles
+ * fix handling of Hebrew aleph
+ * do not assign postcodes to rivers
+ * fix string matching in PHP code
+ * update osm2pgsql (various updates to flex)
+ * fix slow query when deleting places on update
+ * fix CLI details query
+ * fix recalculation of importance values
+ * fix polygon simplification in reverse results
+ * add class/type information to reverse geocodejson result
+ * minor improvements to default tokenizer configuration
+ * various smaller fixes to documentation
+
+4.2.1
+
+ * fix XSS vulnerability in debug view
+
+4.2.0
+
+ * add experimental support for osm2pgsql flex style
+ * introduce secondary importance value to be retrieved from a raster data file
+ (currently still unused, to replace address importance, thanks to @tareqpi)
+ * add new report tool `nominatim admin --collect-os-info`
+ (thanks @micahcochran, @tareqpi)
+ * reorganise index to improve lookup performance and size
+ * run index creation after import in parallel
+ * run ANALYZE more selectively to speed up continuation of indexing
+ * fix crash on update when addr:interpolation receives an illegal value
+ * fix minimum number of retrieved results to be at least 10
+ * fix search for combinations of special term + name (e.g Hotel Bellevue)
+ * do not return interpolations without a parent street on reverse search
+ * improve invalidation of linked places on updates
+ * fix address parsing for interpolation lines
+ * make sure socket timeouts are respected during replication
+ (working around a bug in some versions of pyosmium)
+ * update bundled osm2pgsql to 1.7.1
+ * add support for PostgreSQL 15
+ * typing fixes to work with latest type annotations from typeshed
+ * smaller improvements to documentation (thanks to @mausch)
+
+4.1.1
+
+ * fix XSS vulnerability in debug view
+
4.1.0
* switch to ICU tokenizer as default
* add setup instructions for updates and systemd
* drop support for PostgreSQL 9.5
+4.0.2
+
+ * fix XSS vulnerability in debug view
+
4.0.1
* fix initialisation error in replication script
* add testing of installation scripts via CI
* drop support for Python < 3.6 and Postgresql < 9.5
+3.7.3
+
+ * fix XSS vulnerability in debug view
+
3.7.2
* fix database check for reverse-only imports
* increase splitting for large geometries to improve indexing speed
* remove deprecated get_magic_quotes_gpc() function
* make sure that all postcodes have an entry in word and are thus searchable
- * remove use of ST_Covers in conjunction woth ST_Intersects,
+ * remove use of ST_Covers in conjunction with ST_Intersects,
causes bad query planning and slow updates in Postgis3
* update osm2pgsql
* exclude postcode ranges separated by colon from centre point calculation
* update osm2pgsql, better handling of imports without flatnode file
* switch to more efficient algorithm for word set computation
- * use only boundries for country and state parts of addresses
+ * use only boundaries for country and state parts of addresses
* improve updates of addresses with housenumbers and interpolations
* remove country from place_addressline table and use country_code instead
* optimise indexes on search_name partition tables
* complete rewrite of reverse search algorithm
* add new geojson and geocodejson output formats
- * add simple export script to exprot addresses to CSV
+ * add simple export script to export addresses to CSV
* remove is_in terms from address computation
* remove unused search_name_country tables
* various smaller fixes to query parsing
* move installation documentation into this repo
* add self-documenting vagrant scripts
* remove --create-website, recommend to use website directory in build
- * add accessor functions for URL parameters and improve erro checking
+ * add accessor functions for URL parameters and improve error checking
* remove IP blocking and rate-limiting code
* enable CI via travis
* reformatting for more consistent coding style
* update to refactored osm2pgsql which use libosmium based types
* switch from osmosis to pyosmium for updates
* be more strict when matching against special search terms
- * handle postcode entries with mutliple values correctly
+ * handle postcode entries with multiple values correctly
2.5
--- /dev/null
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
--- /dev/null
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
--- /dev/null
+all:
+
+# Building of wheels
+
+build: clean-build build-db build-api
+
+clean-build:
+ rm -f dist/*
+
+build-db:
+ python3 -m build packaging/nominatim-db --outdir dist/
+
+build-api:
+ python3 -m build packaging/nominatim-api --outdir dist/
+
+# Tests
+
+tests: mypy lint pytest bdd
+
+mypy:
+ mypy --strict --python-version 3.8 src
+
+pytest:
+ pytest test/python
+
+lint:
+ flake8 src
+
+bdd:
+ cd test/bdd; behave -DREMOVE_TEMPLATE=1
+
+# Documentation
+
+doc:
+ mkdocs build
+
+serve-doc:
+ mkdocs serve
+
+manpage:
+ argparse-manpage --pyfile man/create-manpage.py --function get_parser --project-name Nominatim --url https://nominatim.org > man/nominatim.1 --author 'the Nominatim developer community' --author-email info@nominatim.org
+
+
+.PHONY: tests mypy pytest lint bdd build clean-build build-db build-api doc serve-doc manpage
[](https://github.com/osm-search/Nominatim/actions?query=workflow%3A%22CI+Tests%22)
-[](https://codecov.io/gh/osm-search/Nominatim)
Nominatim
=========
A quick summary of the necessary steps:
-1. Compile Nominatim:
+1. Create a Python virtualenv and install the packages:
- mkdir build
- cd build
- cmake ..
- make
- sudo make install
+ python3 -m venv nominatim-venv
+ ./nominatim-venv/bin/pip install packaging/nominatim-{api,db}
2. Create a project directory, get OSM data and import:
mkdir nominatim-project
cd nominatim-project
- nominatim import --osm-file <your planet file>
+ ../nominatim-venv/bin/nominatim import --osm-file <your planet file>
-3. Point your webserver to the nominatim-project/website directory.
+3. Start the webserver:
+
+ ./nominatim-venv/bin/pip install uvicorn falcon
+ ../nominatim-venv/bin/nominatim serve
License
=======
-The source code is available under a GPLv2 license.
+The Python source code is available under a GPL license version 3 or later.
+The Lua configuration files for osm2pgsql are released under the
+Apache License, Version 2.0. All other files are under a GPLv2 license.
Contributing
============
-Contributions, bugreport and pull requests are welcome.
-For details see [contribution guide](CONTRIBUTING.md).
+Contributions, bug reports and pull requests are welcome. When reporting a
+bug, please use one of the
+[issue templates](https://github.com/osm-search/Nominatim/issues/new/choose)
+and make sure to provide all the information requested. If you are not
+sure if you have really found a bug, please ask for help in the forums
+first (see 'Questions' below).
+
+For details on contributing, have a look at the
+[contribution guide](CONTRIBUTING.md).
Questions and help
==================
-For questions, community help and discussions you can use the
-[Github discussions forum](https://github.com/osm-search/Nominatim/discussions)
-or join the
-[geocoding mailing list](https://lists.openstreetmap.org/listinfo/geocoding).
+If you have questions about search results and the OpenStreetMap data
+used in the search, use the [OSM Forum](https://community.openstreetmap.org/).
+
+For questions, community help and discussions around the software and
+your own installation of Nominatim, use the
+[Github discussions forum](https://github.com/osm-search/Nominatim/discussions).
| Version | End of support for security updates |
| ------- | ----------------------------------- |
-| 4.1.x | 2024-08-05 |
-| 4.0.x | 2023-11-02 |
-| 3.7.x | 2023-04-05 |
-| 3.6.x | 2022-12-12 |
+| 5.0.x | 2027-02-06
+| 4.5.x | 2026-09-12 |
+| 4.4.x | 2026-03-07 |
+| 4.3.x | 2025-09-07 |
## Reporting a Vulnerability
## List of Previous Incidents
+* 2023-11-20 - [SQL injection vulnerability](https://nominatim.org/2023/11/20/release-432.html)
+* 2023-02-21 - [cross-site scripting vulnerability](https://nominatim.org/2023/02/21/release-421.html)
* 2020-05-04 - [SQL injection issue on /details endpoint](https://lists.openstreetmap.org/pipermail/geocoding/2020-May/002012.html)
# Install Nominatim in a virtual machine for development and testing
-This document describes how you can install Nominatim inside a Ubuntu 16
+This document describes how you can install Nominatim inside a Ubuntu 24
virtual machine on your desktop/laptop (host machine). The goal is to give
you a development environment to easily edit code and run the test suite
without affecting the rest of your system.
2. [Vagrant](https://www.vagrantup.com/downloads.html)
-3. Nominatim
-
- git clone --recursive https://github.com/openstreetmap/Nominatim.git
-
- If you forgot `--recursive`, it you can later load the submodules using
-
- git submodule init
- git submodule update
-
+3. Nominatim
+ git clone https://github.com/openstreetmap/Nominatim.git
## Installation
1. Start the virtual machine
- vagrant up ubuntu
+ vagrant up ubuntu24-nginx
2. Log into the virtual machine
- vagrant ssh ubuntu
+ vagrant ssh ubuntu24-nginx
3. Import a small country (Monaco)
-
+
See the FAQ how to skip this step and point Nominatim to an existing database.
```
You edit code on your host machine in any editor you like. There is no need to
restart any software: just refresh your browser window.
-Note that the webserver uses files from the /build directory. If you change
-files in Nominatim/website or Nominatim/utils for example you first need to
-copy them into the /build directory by running the `cmake` step from the
-installation.
-
-PHP errors are written to `/var/log/apache2/error.log`.
-
-With `echo` and `var_dump()` you write into the output (HTML/XML/JSON) when
-you either add `&debug=1` to the URL (preferred) or set
-`@define('CONST_Debug', true);` in `settings/local.php`.
+Use the functions of the `log()` object to create temporary debug output.
+Add `&debug=1` to the URL to see the output.
In the Python BDD test you can use `logger.info()` for temporary debug
statements.
-
-
-## Running unit tests
-
- cd ~/Nominatim/tests/php
- phpunit ./
-
-
-## Running PHP code style tests
-
- cd ~/Nominatim
- phpcs --colors .
-
-
-## Running functional tests
-
-Tests in `test/bdd/db` and `test/bdd/osm2pgsql` have to pass 100%. Other
-tests might require full planet-wide data. Sadly even if you have your own
-planet-wide data there will be enough differences to the openstreetmap.org
-installation to cause false positives in the other tests (see FAQ).
-
-To run the full test suite
-
- cd ~/Nominatim/test/bdd
- behave -DBUILDDIR=/home/vagrant/build/ db osm2pgsql
-
-To run a single file
-
- behave -DBUILDDIR=/home/vagrant/build/ api/lookup/simple.feature
-
-Or a single test by line number
-
- behave -DBUILDDIR=/home/vagrant/build/ api/lookup/simple.feature:34
-
-To run specific groups of tests you can add tags just before the `Scenario line`, e.g.
-
- @bug-34
- Scenario: address lookup for non-existing or invalid node, way, relation
-
-and then
-
- behave -DBUILDDIR=/home/vagrant/build/ --tags @bug-34
-
-
-
-
+For more information on running tests, see
+https://nominatim.org/release-docs/develop/develop/Testing/
## FAQ
##### Will it run on Windows?
-Yes, Vagrant and Virtualbox can be installed on MS Windows just fine. You need a 64bit
-version of Windows.
+Yes, Vagrant and Virtualbox can be installed on MS Windows just fine. You need
+a 64bit version of Windows.
+##### Will it run on Apple Silicon?
+
+You might need to replace Virtualbox with [Parallels](https://www.parallels.com/products/desktop/).
+There is no free/open source version of Parallels.
##### Why Monaco, can I use another country?
-Of course! The Monaco import takes less than 30 minutes and works with 2GB RAM.
+Of course! The Monaco import takes less than 10 minutes and works with 2GB RAM.
##### Will the results be the same as those from nominatim.openstreetmap.org?
-No. Long running Nominatim installations will differ once new import features (or
+No. Long-running Nominatim installations will differ once new import features (or
bug fixes) get added since those usually only get applied to new/changed data.
Also this document skips the optional Wikipedia data import which affects ranking
-of search results. See [Nominatim installation](https://nominatim.org/release-docs/latest/admin/Installation) for details.
+of search results. See [Nominatim installation](https://nominatim.org/release-docs/latest/admin/Installation)
+for details.
##### Why Ubuntu? Can I test CentOS/Fedora/CoreOS/FreeBSD?
-There is a Vagrant script for CentOS available, but the Nominatim directory
+There used to be a Vagrant script for CentOS available, but the Nominatim directory
isn't symlinked/mounted to the host which makes development trickier. We used
it mainly for debugging installation with SELinux.
name, location of files. We chose Ubuntu because that is closest to the
nominatim.openstreetmap.org production environment.
-You can configure/download other Vagrant boxes from [https://app.vagrantup.com/boxes/search](https://app.vagrantup.com/boxes/search).
+You can configure/download other Vagrant boxes from
+[https://app.vagrantup.com/boxes/search](https://app.vagrantup.com/boxes/search).
##### How can I connect to an existing database?
-Let's say you have a Postgres database named `nominatim_it` on server `your-server.com` and port `5432`. The Postgres username is `postgres`. You can edit `settings/local.php` and point Nominatim to it.
+Let's say you have a Postgres database named `nominatim_it` on server `your-server.com`
+and port `5432`. The Postgres username is `postgres`. You can edit the `.env` in your
+project directory and point Nominatim to it.
+
+ NOMINATIM_DATABASE_DSN="pgsql:host=your-server.com;port=5432;user=postgres;dbname=nominatim_it
- pgsql:host=your-server.com;port=5432;user=postgres;dbname=nominatim_it
-
No data import or restarting necessary.
If the Postgres installation is behind a firewall, you can try
ssh -L 9999:localhost:5432 your-username@your-server.com
inside the virtual machine. It will map the port to `localhost:9999` and then
-you edit `settings/local.php` with
+you edit `.env` file with
- @define('CONST_Database_DSN', 'pgsql:host=localhost;port=9999;user=postgres;dbname=nominatim_it');
+ NOMINATIM_DATABASE_DSN="pgsql:host=localhost;port=9999;user=postgres;dbname=nominatim_it"
-To access postgres directly remember to specify the hostname, e.g. `psql --host localhost --port 9999 nominatim_it`
+To access postgres directly remember to specify the hostname,
+e.g. `psql --host localhost --port 9999 nominatim_it`
##### My computer is slow and the import takes too long. Can I start the virtual machine "in the cloud"?
Yes. It's possible to start the virtual machine on [Amazon AWS (plugin)](https://github.com/mitchellh/vagrant-aws)
or [DigitalOcean (plugin)](https://github.com/smdahlen/vagrant-digitalocean).
-
-
-
-
checkout = "no"
end
+ config.vm.provider "hyperv" do |hv, override|
+ hv.memory = 2048
+ hv.linked_clone = true
+ if ENV['CHECKOUT'] != 'y' then
+ override.vm.synced_folder ".", "/home/vagrant/Nominatim", type: "smb", smb_host: ENV['SMB_HOST'] || ENV['COMPUTERNAME']
+ end
+ end
+
config.vm.provider "virtualbox" do |vb, override|
vb.gui = false
vb.memory = 2048
end
end
+ config.vm.provider "parallels" do |prl, override|
+ prl.update_guest_tools = false
+ prl.memory = 2048
+ if ENV['CHECKOUT'] != 'y' then
+ override.vm.synced_folder ".", "/home/vagrant/Nominatim"
+ end
+ end
+
config.vm.provider "libvirt" do |lv, override|
lv.memory = 2048
lv.nested = true
if ENV['CHECKOUT'] != 'y' then
- override.vm.synced_folder ".", "/home/vagrant/Nominatim", type: 'nfs'
+ override.vm.synced_folder ".", "/home/vagrant/Nominatim", type: 'nfs', nfs_udp: false
end
end
- config.vm.define "ubuntu", primary: true do |sub|
- sub.vm.box = "generic/ubuntu2004"
+ config.vm.define "ubuntu22", primary: true do |sub|
+ sub.vm.box = "generic/ubuntu2204"
sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Ubuntu-20.sh"
+ s.path = "vagrant/Install-on-Ubuntu-22.sh"
s.privileged = false
s.args = [checkout]
end
end
- config.vm.define "ubuntu-apache" do |sub|
- sub.vm.box = "generic/ubuntu2004"
+ config.vm.define "ubuntu22-apache" do |sub|
+ sub.vm.box = "generic/ubuntu2204"
sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Ubuntu-20.sh"
+ s.path = "vagrant/Install-on-Ubuntu-22.sh"
s.privileged = false
s.args = [checkout, "install-apache"]
end
end
- config.vm.define "ubuntu-nginx" do |sub|
- sub.vm.box = "generic/ubuntu2004"
+ config.vm.define "ubuntu22-nginx" do |sub|
+ sub.vm.box = "generic/ubuntu2204"
sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Ubuntu-20.sh"
+ s.path = "vagrant/Install-on-Ubuntu-22.sh"
s.privileged = false
s.args = [checkout, "install-nginx"]
end
end
- config.vm.define "ubuntu18" do |sub|
- sub.vm.box = "generic/ubuntu1804"
+ config.vm.define "ubuntu24" do |sub|
+ sub.vm.box = "bento/ubuntu-24.04"
+ if RUBY_PLATFORM.include?('darwin') && RUBY_PLATFORM.include?('arm64')
+ # Apple M processor
+ sub.vm.box = 'gutehall/ubuntu24-04'
+ end
sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Ubuntu-18.sh"
+ s.path = "vagrant/Install-on-Ubuntu-24.sh"
s.privileged = false
s.args = [checkout]
end
end
- config.vm.define "ubuntu18-apache" do |sub|
- sub.vm.box = "generic/ubuntu1804"
+ config.vm.define "ubuntu24-apache" do |sub|
+ sub.vm.box = "bento/ubuntu-24.04"
sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Ubuntu-18.sh"
+ s.path = "vagrant/Install-on-Ubuntu-24.sh"
s.privileged = false
s.args = [checkout, "install-apache"]
end
end
- config.vm.define "ubuntu18-nginx" do |sub|
- sub.vm.box = "generic/ubuntu1804"
+ config.vm.define "ubuntu24-nginx" do |sub|
+ sub.vm.box = "bento/ubuntu-24.04"
sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Ubuntu-18.sh"
+ s.path = "vagrant/Install-on-Ubuntu-24.sh"
s.privileged = false
s.args = [checkout, "install-nginx"]
end
end
-
- config.vm.define "centos7" do |sub|
- sub.vm.box = "centos/7"
- sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Centos-7.sh"
- s.privileged = false
- s.args = [checkout]
- end
- end
-
- config.vm.define "centos" do |sub|
- sub.vm.box = "generic/centos8"
- sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Centos-8.sh"
- s.privileged = false
- s.args = [checkout]
- end
- end
-
-
end
+++ /dev/null
-#!/usr/bin/env python3
-import sys
-import os
-
-sys.path.insert(1, '@NOMINATIM_LIBDIR@/lib-python')
-
-os.environ['NOMINATIM_NOMINATIM_TOOL'] = os.path.abspath(__file__)
-
-from nominatim import cli
-from nominatim import version
-
-version.GIT_COMMIT_HASH = '@GIT_HASH@'
-
-exit(cli.nominatim(module_dir='@NOMINATIM_LIBDIR@/module',
- osm2pgsql_path='@NOMINATIM_LIBDIR@/osm2pgsql',
- phplib_dir='@NOMINATIM_LIBDIR@/lib-php',
- sqllib_dir='@NOMINATIM_LIBDIR@/lib-sql',
- data_dir='@NOMINATIM_DATADIR@',
- config_dir='@NOMINATIM_CONFIGDIR@',
- phpcgi_path='@PHPCGI_BIN@'))
+++ /dev/null
-#!/usr/bin/env python3
-import sys
-import os
-
-sys.path.insert(1, '@CMAKE_SOURCE_DIR@')
-
-os.environ['NOMINATIM_NOMINATIM_TOOL'] = os.path.abspath(__file__)
-
-from nominatim import cli
-from nominatim import version
-
-version.GIT_COMMIT_HASH = '@GIT_HASH@'
-
-exit(cli.nominatim(module_dir='@CMAKE_BINARY_DIR@/module',
- osm2pgsql_path='@CMAKE_BINARY_DIR@/osm2pgsql/osm2pgsql',
- phplib_dir='@CMAKE_SOURCE_DIR@/lib-php',
- sqllib_dir='@CMAKE_SOURCE_DIR@/lib-sql',
- data_dir='@CMAKE_SOURCE_DIR@/data',
- config_dir='@CMAKE_SOURCE_DIR@/settings',
- phpcgi_path='@PHPCGI_BIN@'))
+++ /dev/null
-# Auto-generated vagrant install documentation
-
-
-# build the actual documentation
-
-configure_file(mkdocs.yml ../mkdocs.yml)
-file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/appendix)
-
-set (DOC_SOURCES
- admin
- develop
- api
- customize
- index.md
- extra.css
- styles.css
- )
-
-foreach (src ${DOC_SOURCES})
- execute_process(
- COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/${src} ${CMAKE_CURRENT_BINARY_DIR}/${src}
- )
-endforeach()
-
-ADD_CUSTOM_TARGET(doc
- COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Ubuntu-18.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Ubuntu-18.md
- COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Ubuntu-20.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Ubuntu-20.md
- COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Ubuntu-22.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Ubuntu-22.md
- COMMAND PYTHONPATH=${PROJECT_SOURCE_DIR} mkdocs build -d ${CMAKE_CURRENT_BINARY_DIR}/../site-html -f ${CMAKE_CURRENT_BINARY_DIR}/../mkdocs.yml
-)
-
-ADD_CUSTOM_TARGET(serve-doc
- COMMAND PYTHONPATH=${PROJECT_SOURCE_DIR} mkdocs serve
- WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
-)
installed the Nominatim software itself, if not return to the
[installation page](Installation.md).
+## Importing with a database user without superuser rights
+
+Nominatim usually creates its own PostgreSQL database at the beginning of the
+import process. This makes usage easier for the user but means that the
+database user doing the import needs the appropriate rights.
+
+If you prefer to run the import with a database user with limited rights,
+you can do so by changing the import process as follows:
+
+1. Run the command for database preparation with a database user with
+ superuser rights. For example, to use a db user 'dbadmin' for a
+ database 'nominatim', execute:
+
+ ```
+ NOMINATIM_DATABASE_DSN="pgsql:dbname=nominatim;user=dbadmin" nominatim import --prepare-database
+ ```
+
+2. Grant the import user the right to create tables. For example, foe user 'import-user':
+
+ ```
+ psql -d nominatim -c 'GRANT CREATE ON SCHEMA public TO "import-user"'
+ ```
+
+3. Now run the reminder of the import with the import user:
+
+ ```
+ NOMINATIM_DATABASE_DSN="pgsql:dbname=nominatim;user=import-user" nominatim import --continue import-from-file --osm-file file.pbf
+ ```
+
## Importing multiple regions (without updates)
To import multiple regions in your database you can simply give multiple
```bash
update
- ├── europe
- │ ├── andorra
- │ │ └── sequence.state
- │ └── monaco
- │ └── sequence.state
- └── tmp
- └── europe
- ├── andorra-latest.osm.pbf
- └── monaco-latest.osm.pbf
-
+ ├── europe
+ │ ├── andorra
+ │ │ └── sequence.state
+ │ └── monaco
+ │ └── sequence.state
+ └── tmp
+ └── europe
+ ├── andorra-latest.osm.pbf
+ └── monaco-latest.osm.pbf
```
This will get diffs from the replication server, import diffs and index
the database. The default replication server in the
-script([Geofabrik](https://download.geofabrik.de)) provides daily updates.
+script ([Geofabrik](https://download.geofabrik.de)) provides daily updates.
## Using an external PostgreSQL database
-You can install Nominatim using a database that runs on a different server when
-you have physical access to the file system on the other server. Nominatim
-uses a custom normalization library that needs to be made accessible to the
-PostgreSQL server. This section explains how to set up the normalization
-library.
-
-!!! note
- The external module is only needed when using the legacy tokenizer.
- If you have chosen the ICU tokenizer, then you can ignore this section
- and follow the standard import documentation.
-
-### Option 1: Compiling the library on the database server
-
-The most sure way to get a working library is to compile it on the database
-server. From the prerequisites you need at least cmake, gcc and the
-PostgreSQL server package.
-
-Clone or unpack the Nominatim source code, enter the source directory and
-create and enter a build directory.
-
-```sh
-cd Nominatim
-mkdir build
-cd build
-```
-
-Now configure cmake to only build the PostgreSQL module and build it:
-
-```
-cmake -DBUILD_IMPORTER=off -DBUILD_API=off -DBUILD_TESTS=off -DBUILD_DOCS=off -DBUILD_OSM2PGSQL=off ..
-make
-```
-
-When done, you find the normalization library in `build/module/nominatim.so`.
-Copy it to a place where it is readable and executable by the PostgreSQL server
-process.
-
-### Option 2: Compiling the library on the import machine
-
-You can also compile the normalization library on the machine from where you
-run the import.
-
-!!! important
- You can only do this when the database server and the import machine have
- the same architecture and run the same version of Linux. Otherwise there is
- no guarantee that the compiled library is compatible with the PostgreSQL
- server running on the database server.
-
-Make sure that the PostgreSQL server package is installed on the machine
-**with the same version as on the database server**. You do not need to install
-the PostgreSQL server itself.
-
-Download and compile Nominatim as per standard instructions. Once done, you find
-the normalization library in `build/module/nominatim.so`. Copy the file to
-the database server at a location where it is readable and executable by the
-PostgreSQL server process.
-
-### Running the import
-
-On the client side you now need to configure the import to point to the
-correct location of the library **on the database server**. Add the following
-line to your your `.env` file:
-
-```php
-NOMINATIM_DATABASE_MODULE_PATH="<directory on the database server where nominatim.so resides>"
-```
-
-Now change the `NOMINATIM_DATABASE_DSN` to point to your remote server and continue
-to follow the [standard instructions for importing](Import.md).
+You can install Nominatim using a database that runs on a different server.
+Simply point the configuration variable `NOMINATIM_DATABASE_DSN` to the
+server and follow the standard import documentation.
+The import will be faster, if the import is run directly from the database
+machine. You can easily switch to a different machine for the query frontend
+after the import.
## Moving the database to another machine
data updates but the resulting database is only about a third of the size
of a full database.
-Next install Nominatim on the target machine by following the standard installation
-instructions. Again, make sure to use the same version as the source machine.
+Next install nominatim-api on the target machine by following the standard
+installation instructions. Again, make sure to use the same version as the
+source machine.
Create a project directory on your destination machine and set up the `.env`
-file to match the configuration on the source machine. Finally run
-
- nominatim refresh --website
-
-to make sure that the local installation of Nominatim will be used.
-
-If you are using the legacy tokenizer you might also have to switch to the
-PostgreSQL module that was compiled on your target machine. If you get errors
-that PostgreSQL cannot find or access `nominatim.so` then rerun
-
- nominatim refresh --functions
-
-on the target machine to update the the location of the module.
+file to match the configuration on the source machine. That's all.
--- /dev/null
+# Deploying the Nominatim Python frontend
+
+Nominatim can be run as a Python-based
+[ASGI web application](https://asgi.readthedocs.io/en/latest/). You have the
+choice between [Falcon](https://falcon.readthedocs.io/en/stable/)
+and [Starlette](https://www.starlette.io/) as the ASGI framework.
+
+This section gives a quick overview on how to configure Nginx to serve
+Nominatim. Please refer to the documentation of
+[Nginx](https://nginx.org/en/docs/) for background information on how
+to configure it.
+
+!!! Note
+ Throughout this page, we assume your Nominatim project directory is
+ located in `/srv/nominatim-project`. If you have put it somewhere else,
+ you need to adjust the commands and configuration accordingly.
+
+
+### Installing the required packages
+
+The Nominatim frontend is best run from its own virtual environment. If
+you have already created one for the database backend during the
+[installation](Installation.md#building-nominatim), you can use that. Otherwise
+create one now with:
+
+```sh
+sudo apt-get install virtualenv
+virtualenv /srv/nominatim-venv
+```
+
+The Nominatim frontend is contained in the 'nominatim-api' package. To
+install directly from the source tree run:
+
+```sh
+cd Nominatim
+/srv/nominatim-venv/bin/pip install packaging/nominatim-api
+```
+
+The recommended way to deploy a Python ASGI application is to run
+the ASGI runner [uvicorn](https://www.uvicorn.org/)
+together with [gunicorn](https://gunicorn.org/) HTTP server. We use
+Falcon here as the web framework.
+
+Add the necessary packages to your virtual environment:
+
+``` sh
+/srv/nominatim-venv/bin/pip install falcon uvicorn gunicorn
+```
+
+### Setting up Nominatim as a systemd job
+
+Next you need to set up the service that runs the Nominatim frontend. This is
+easiest done with a systemd job.
+
+First you need to tell systemd to create a socket file to be used by
+hunicorn. Create the following file `/etc/systemd/system/nominatim.socket`:
+
+``` systemd
+[Unit]
+Description=Gunicorn socket for Nominatim
+
+[Socket]
+ListenStream=/run/nominatim.sock
+SocketUser=www-data
+
+[Install]
+WantedBy=multi-user.target
+```
+
+Now you can add the systemd service for Nominatim itself.
+Create the following file `/etc/systemd/system/nominatim.service`:
+
+``` systemd
+[Unit]
+Description=Nominatim running as a gunicorn application
+After=network.target
+Requires=nominatim.socket
+
+[Service]
+Type=simple
+User=www-data
+Group=www-data
+WorkingDirectory=/srv/nominatim-project
+ExecStart=/srv/nominatim-venv/bin/gunicorn -b unix:/run/nominatim.sock -w 4 -k uvicorn.workers.UvicornWorker "nominatim_api.server.falcon.server:run_wsgi()"
+ExecReload=/bin/kill -s HUP $MAINPID
+StandardOutput=append:/var/log/gunicorn-nominatim.log
+StandardError=inherit
+PrivateTmp=true
+TimeoutStopSec=5
+KillMode=mixed
+
+[Install]
+WantedBy=multi-user.target
+```
+
+This sets up gunicorn with 4 workers (`-w 4` in ExecStart). Each worker runs
+its own Python process using
+[`NOMINATIM_API_POOL_SIZE`](../customize/Settings.md#nominatim_api_pool_size)
+connections to the database to serve requests in parallel.
+
+Make the new services known to systemd and start it:
+
+``` sh
+sudo systemctl daemon-reload
+sudo systemctl enable nominatim.socket
+sudo systemctl start nominatim.socket
+sudo systemctl enable nominatim.service
+sudo systemctl start nominatim.service
+```
+
+This sets the service up, so that Nominatim is automatically started
+on reboot.
+
+### Configuring nginx
+
+To make the service available to the world, you need to proxy it through
+nginx. Add the following definition to the default configuration:
+
+``` nginx
+upstream nominatim_service {
+ server unix:/run/nominatim.sock fail_timeout=0;
+}
+
+server {
+ listen 80;
+ listen [::]:80;
+
+ root /var/www/html;
+ index /search;
+
+ location / {
+ proxy_set_header Host $http_host;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_redirect off;
+ proxy_pass http://nominatim_service;
+ }
+}
+```
+
+Reload nginx with
+
+```
+sudo systemctl reload nginx
+```
+
+and you should be able to see the status of your server under
+`http://localhost/status`.
+++ /dev/null
-# Deploying Nominatim
-
-The Nominatim API is implemented as a PHP application. The `website/` directory
-in the project directory contains the configured website. You can serve this
-in a production environment with any web server that is capable to run
-PHP scripts.
-
-This section gives a quick overview on how to configure Apache and Nginx to
-serve Nominatim. It is not meant as a full system administration guide on how
-to run a web service. Please refer to the documentation of
-[Apache](http://httpd.apache.org/docs/current/) and
-[Nginx](https://nginx.org/en/docs/)
-for background information on configuring the services.
-
-!!! Note
- Throughout this page, we assume that your Nominatim project directory is
- located in `/srv/nominatim-project` and that you have installed Nominatim
- using the default installation prefix `/usr/local`. If you have put it
- somewhere else, you need to adjust the commands and configuration
- accordingly.
-
- We further assume that your web server runs as user `www-data`. Older
- versions of CentOS may still use the user name `apache`. You also need
- to adapt the instructions in this case.
-
-## Making the website directory accessible
-
-You need to make sure that the `website` directory is accessible for the
-web server user. You can check that the permissions are correct by accessing
-on of the php files as the web server user:
-
-``` sh
-sudo -u www-data head -n 1 /srv/nominatim-project/website/search.php
-```
-
-If this shows a permission error, then you need to adapt the permissions of
-each directory in the path so that it is executable for `www-data`.
-
-If you have SELinux enabled, further adjustments may be necessary to give the
-web server access. At a minimum the following SELinux labelling should be done
-for Nominatim:
-
-``` sh
-sudo semanage fcontext -a -t httpd_sys_content_t "/usr/local/nominatim/lib/lib-php(/.*)?"
-sudo semanage fcontext -a -t httpd_sys_content_t "/srv/nominatim-project/website(/.*)?"
-sudo semanage fcontext -a -t lib_t "/srv/nominatim-project/module/nominatim.so"
-sudo restorecon -R -v /usr/local/lib/nominatim
-sudo restorecon -R -v /srv/nominatim-project
-```
-
-## Nominatim with Apache
-
-### Installing the required packages
-
-With Apache you can use the PHP module to run Nominatim.
-
-Under Ubuntu/Debian install them with:
-
-``` sh
-sudo apt install apache2 libapache2-mod-php
-```
-
-### Configuring Apache
-
-Make sure your Apache configuration contains the required permissions for the
-directory and create an alias:
-
-``` apache
-<Directory "/srv/nominatim-project/website">
- Options FollowSymLinks MultiViews
- AddType text/html .php
- DirectoryIndex search.php
- Require all granted
-</Directory>
-Alias /nominatim /srv/nominatim-project/website
-```
-
-After making changes in the apache config you need to restart apache.
-The website should now be available on `http://localhost/nominatim`.
-
-## Nominatim with Nginx
-
-### Installing the required packages
-
-Nginx has no built-in PHP interpreter. You need to use php-fpm as a daemon for
-serving PHP cgi.
-
-On Ubuntu/Debian install nginx and php-fpm with:
-
-``` sh
-sudo apt install nginx php-fpm
-```
-
-### Configure php-fpm and Nginx
-
-By default php-fpm listens on a network socket. If you want it to listen to a
-Unix socket instead, change the pool configuration
-(`/etc/php/<php version>/fpm/pool.d/www.conf`) as follows:
-
-``` ini
-; Replace the tcp listener and add the unix socket
-listen = /var/run/php-fpm.sock
-
-; Ensure that the daemon runs as the correct user
-listen.owner = www-data
-listen.group = www-data
-listen.mode = 0666
-```
-
-Tell nginx that php files are special and to fastcgi_pass to the php-fpm
-unix socket by adding the location definition to the default configuration.
-
-``` nginx
-root /srv/nominatim-project/website;
-index search.php;
-location / {
- try_files $uri $uri/ @php;
-}
-
-location @php {
- fastcgi_param SCRIPT_FILENAME "$document_root$uri.php";
- fastcgi_param PATH_TRANSLATED "$document_root$uri.php";
- fastcgi_param QUERY_STRING $args;
- fastcgi_pass unix:/var/run/php-fpm.sock;
- fastcgi_index index.php;
- include fastcgi_params;
-}
-
-location ~ [^/]\.php(/|$) {
- fastcgi_split_path_info ^(.+?\.php)(/.*)$;
- if (!-f $document_root$fastcgi_script_name) {
- return 404;
- }
- fastcgi_pass unix:/var/run/php-fpm.sock;
- fastcgi_index search.php;
- include fastcgi.conf;
-}
-```
-
-Restart the nginx and php-fpm services and the website should now be available
-at `http://localhost/`.
-
-## Nominatim with other webservers
-
-Users have created instructions for other webservers:
-
-* [Caddy](https://github.com/osm-search/Nominatim/discussions/2580)
-
Otherwise it's best to start the full setup from the beginning.
-### PHP "open_basedir restriction in effect" warnings
-
- PHP Warning: file_get_contents(): open_basedir restriction in effect.
-
-You need to adjust the
-[open_basedir](https://www.php.net/manual/en/ini.core.php#ini.open-basedir)
-setting in your PHP configuration (`php.ini` file). By default this setting may
-look like this:
-
- open_basedir = /srv/http/:/home/:/tmp/:/usr/share/pear/
-
-Either add reported directories to the list or disable this setting temporarily
-by adding ";" at the beginning of the line. Don't forget to enable this setting
-again once you are done with the PHP command line operations.
-
-
-### PHP timezeone warnings
-
-The Apache log may contain lots of PHP warnings like this:
- `PHP Warning: date_default_timezone_set() function.`
-
-You should set the default time zone as instructed in the warning in
-your `php.ini` file. Find the entry about timezone and set it to
-something like this:
-
- ; Defines the default timezone used by the date functions
- ; https://php.net/date.timezone
- date.timezone = 'America/Denver'
-
-Or
-
-```
-echo "date.timezone = 'America/Denver'" > /etc/php.d/timezone.ini
-```
-
-### nominatim.so version mismatch
-
-When running the import you may get a version mismatch:
-`COPY_END for place failed: ERROR: incompatible library "/srv/Nominatim/nominatim/build/module/nominatim.so": version mismatch`
-
-pg_config seems to use bad includes sometimes when multiple versions
-of PostgreSQL are available in the system. Make sure you remove the
-server development libraries (`postgresql-server-dev-13` on Ubuntu)
-and recompile (`cmake .. && make`).
-
-
-### I see the error "ERROR: permission denied for language c"
-
-`nominatim.so`, written in C, is required to be installed on the database
-server. Some managed database (cloud) services like Amazon RDS do not allow
-this. There is currently no work-around other than installing a database
-on a non-managed machine.
-
-
-### I see the error: "function transliteration(text) does not exist"
-
-Reinstall the nominatim functions with `nominatim refresh --functions`
-and check for any errors, e.g. a missing `nominatim.so` file.
-
### I see the error: "ERROR: mmap (remap) failed"
This may be a simple out-of-memory error. Try reducing the memory used
GRANT SELECT ON ALL TABLES IN SCHEMA public TO "www-data";
```
-### Website reports "Could not load library "nominatim.so"
+### Setup fails with "DB Error: extension not found"
-Example error message
+Make sure you have the PostgreSQL extensions "hstore" and "postgis" installed.
+See the installation instructions for a full list of required packages.
-```
- SELECT make_standard_name('3039 E MEADOWLARK LN') [nativecode=ERROR: could not
- load library "/srv/nominatim/Nominatim-3.1.0/build/module/nominatim.so":
- /srv/nominatim/Nominatim-3.1.0/build/module/nominatim.so: cannot open shared
- object file: Permission denied
- CONTEXT: PL/pgSQL function make_standard_name(text) line 5 at assignment]
-```
-The PostgreSQL database, i.e. user `postgres`, needs to have access to that file.
+### UnicodeEncodeError: 'ascii' codec can't encode character
+
+Make sure that the operating system's locale is UTF-8. With some prebuilt
+images (e.g. LXC containers from Proxmox, see
+[discussion](https://github.com/osm-search/Nominatim/discussions/2343)) or
+images that optimize for size it might be missing.
-The permission need to be read & executable by everybody, but not writeable
-by everybody, e.g.
+On Ubuntu you can check the locale is installed:
```
- -rwxr-xr-x 1 nominatim nominatim 297984 build/module/nominatim.so
+ grep UTF-8 /etc/default/locale
```
-Try `chmod a+r nominatim.so; chmod a+x nominatim.so`.
-
-When you recently updated your operating system, updated PostgreSQL to
-a new version or moved files (e.g. the build directory) you should
-recreate `nominatim.so`. Try
+And install it using
```
- cd build
- rm -r module/
- cmake $main_Nominatim_path && make
+ dpkg-reconfigure locales
```
-### Setup.php fails with "DB Error: extension not found"
-
-Make sure you have the PostgreSQL extensions "hstore" and "postgis" installed.
-See the installation instructions for a full list of required packages.
-
-
### I forgot to delete the flatnodes file before starting an import.
That's fine. For each import the flatnodes file get overwritten.
### Can I import negative OSM ids into Nominatim?
-See [this question of Stackoverflow](https://help.openstreetmap.org/questions/64662/nominatim-flatnode-with-negative-id).
+No, negative IDs are no longer supported by osm2pgsql. You can use
+large 64-bit IDs that are guaranteed not to clash with OSM IDs. However,
+you will not able to use a flatnode file with them.
directory apart from the Nominatim software and change into the directory:
```
-mkdir ~/nominatim-planet
-cd ~/nominatim-planet
+mkdir ~/nominatim-project
+cd ~/nominatim-project
```
In the following, we refer to the project directory as `$PROJECT_DIR`. To be
able to copy&paste instructions, you can export the appropriate variable:
```
-export PROJECT_DIR=~/nominatim-planet
+export PROJECT_DIR=~/nominatim-project
```
The Nominatim tool assumes per default that the current working directory is
This data is available as a binary download. Put it into your project directory:
cd $PROJECT_DIR
- wget https://www.nominatim.org/data/wikimedia-importance.sql.gz
+ wget https://nominatim.org/data/wikimedia-importance.csv.gz
+ wget -O secondary_importance.sql.gz https://nominatim.org/data/wikimedia-secondary-importance.sql.gz
-The file is about 400MB and adds around 4GB to the Nominatim database.
+The files are about 400MB and add around 4GB to the Nominatim database. For
+more information about importance,
+see [Importance Customization](../customize/Importance.md).
!!! tip
If you forgot to download the wikipedia rankings, then you can
also add importances after the import. Download the SQL files, then
- run `nominatim refresh --wiki-data --importance`. Updating
- importances for a planet will take a couple of hours.
+ run `nominatim refresh --wiki-data --secondary-importance --importance`.
+ Updating importances for a planet will take a couple of hours.
### External postcodes
This data can be optionally downloaded into the project directory:
cd $PROJECT_DIR
- wget https://www.nominatim.org/data/gb_postcodes.csv.gz
- wget https://www.nominatim.org/data/us_postcodes.csv.gz
+ wget https://nominatim.org/data/gb_postcodes.csv.gz
+ wget https://nominatim.org/data/us_postcodes.csv.gz
You can also add your own custom postcode sources, see
[Customization of postcodes](../customize/Postcodes.md).
[photon](https://photon.komoot.io/) database, then you can set up a database
without search indexes. Add `--reverse-only` to your setup command above.
-This saves about 5% of disk space.
+This saves about 5% of disk space, import time won't be significant faster.
### Filtering Imported Data
in terms of RAM usage. osm2pgsql and PostgreSQL are running in parallel at
this point. PostgreSQL blocks at least the part of RAM that has been configured
with the `shared_buffers` parameter during
-[PostgreSQL tuning](Installation.md#postgresql-tuning)
+[PostgreSQL tuning](Installation.md#tuning-the-postgresql-database)
and needs some memory on top of that. osm2pgsql needs at least 2GB of RAM for
its internal data structures, potentially more when it has to process very large
relations. In addition it needs to maintain a cache for node locations. The size
nominatim admin --check-database
```
-Now you can try out your installation by running:
+If you have installed the `nominatim-api` package, then you can try out
+your installation by executing a simple query on the command line:
-```sh
+``` sh
+nominatim search --query Berlin
+```
+
+or, when you have a reverse-only installation:
+
+``` sh
+nominatim reverse --lat 51 --lon 45
+```
+
+If you want to run Nominatim as a service, make sure you have installed
+the right packages as per [Installation](Installation.md#software).
+
+#### Testing the Python frontend
+
+To run the test server against the Python frontend, you must choose a
+web framework to use, either starlette or falcon. Make sure the appropriate
+packages are installed. Then run
+
+``` sh
nominatim serve
```
-This runs a small test server normally used for development. You can use it
-to verify that your installation is working. Go to
-`http://localhost:8088/status.php` and you should see the message `OK`.
-You can also run a search query, e.g. `http://localhost:8088/search.php?q=Berlin`.
+or, if you prefer to use Starlette instead of Falcon as webserver,
+
+``` sh
+nominatim serve --engine starlette
+```
+
+Go to `http://localhost:8088/status` and you should see the message `OK`.
+You can also run a search query, e.g. `http://localhost:8088/search?q=Berlin`
+or, for reverse-only installations a reverse query,
+e.g. `http://localhost:8088/reverse?lat=27.1750090510034&lon=78.04209025`.
-Note that search query is not supported for reverse-only imports. You can run a
-reverse query, e.g. `http://localhost:8088/reverse.php?lat=27.1750090510034&lon=78.04209025`.
+Do not use this test server in production.
+To run Nominatim via webservers like Apache or nginx, please continue reading
+[Deploy the Python frontend](Deployment-Python.md).
-To run Nominatim via webservers like Apache or nginx, please read the
-[Deployment chapter](Deployment.md).
-## Adding search through category phrases
+## Enabling search by category phrases
-If you want to be able to search for places by their type through
+To be able to search for places by their type using
[special phrases](https://wiki.openstreetmap.org/wiki/Nominatim/Special_Phrases)
you also need to import these key phrases like this:
prerequisites. There are also step-by-step instructions available for
the following operating systems:
- * [Ubuntu 22.04](../appendix/Install-on-Ubuntu-22.md)
- * [Ubuntu 20.04](../appendix/Install-on-Ubuntu-20.md)
- * [Ubuntu 18.04](../appendix/Install-on-Ubuntu-18.md)
+ * [Ubuntu 24.04](Install-on-Ubuntu-24.md)
+ * [Ubuntu 22.04](Install-on-Ubuntu-22.md)
These OS-specific instructions can also be found in executable form
in the `vagrant/` directory.
### Software
-!!! Warning
- For larger installations you **must have** PostgreSQL 11+ and PostGIS 3+
- otherwise import and queries will be slow to the point of being unusable.
- Query performance has marked improvements with PostgreSQL 13+ and PostGIS 3.2+.
-
-For compiling:
+For running Nominatim:
- * [cmake](https://cmake.org/)
- * [expat](https://libexpat.github.io/)
- * [proj](https://proj.org/)
- * [bzip2](http://www.bzip.org/)
- * [zlib](https://www.zlib.net/)
- * [ICU](http://site.icu-project.org/)
- * [Boost libraries](https://www.boost.org/), including system and filesystem
- * PostgreSQL client libraries
- * a recent C++ compiler (gcc 5+ or Clang 3.8+)
+ * [PostgreSQL](https://www.postgresql.org) (12+ will work, 13+ strongly recommended)
+ * [PostGIS](https://postgis.net) (3.0+ will work, 3.2+ strongly recommended)
+ * [osm2pgsql](https://osm2pgsql.org) (1.8+)
+ * [Python 3](https://www.python.org/) (3.7+)
-For running Nominatim:
+Furthermore the following Python libraries are required:
- * [PostgreSQL](https://www.postgresql.org) (9.6+ will work, 11+ strongly recommended)
- * [PostGIS](https://postgis.net) (2.2+ will work, 3.0+ strongly recommended)
- * [Python 3](https://www.python.org/) (3.6+)
- * [Psycopg2](https://www.psycopg.org) (2.7+)
+ * [Psycopg3](https://www.psycopg.org)
* [Python Dotenv](https://github.com/theskumar/python-dotenv)
* [psutil](https://github.com/giampaolo/psutil)
* [Jinja2](https://palletsprojects.com/p/jinja/)
* [PyICU](https://pypi.org/project/PyICU/)
* [PyYaml](https://pyyaml.org/) (5.1+)
* [datrie](https://github.com/pytries/datrie)
- * [PHP](https://php.net) (7.0 or later)
- * PHP-pgsql
- * PHP-intl (bundled with PHP)
- * PHP-cgi (for running queries from the command line)
+
+These will be installed automatically when using pip installation.
For running continuous updates:
* [pyosmium](https://osmcode.org/pyosmium/)
+For running the Python frontend:
+
+ * [SQLAlchemy](https://www.sqlalchemy.org/) (1.4.31+ with greenlet support)
+ * [asyncpg](https://magicstack.github.io/asyncpg) (0.8+, only when using SQLAlchemy < 2.0)
+ * one of the following web frameworks:
+ * [falcon](https://falconframework.org/) (3.0+)
+ * [starlette](https://www.starlette.io/)
+ * [uvicorn](https://www.uvicorn.org/)
+
For dependencies for running tests and building documentation, see
the [Development section](../develop/Development-Environment.md).
Fast disks are essential. Using NVME disks is recommended.
Even on a well configured machine the import of a full planet takes
-around 2 days. On traditional spinning disks, 7-8 days are more realistic.
+around 2.5 days. When using traditional SSDs, 4-5 days are more realistic.
## Tuning the PostgreSQL database
maintenance_work_mem = (10GB)
autovacuum_work_mem = 2GB
work_mem = (50MB)
- effective_cache_size = (24GB)
synchronous_commit = off
max_wal_size = 1GB
- checkpoint_timeout = 10min
+ checkpoint_timeout = 60min
checkpoint_completion_target = 0.9
+ random_page_cost = 1.0
+ wal_level = minimal
+ max_wal_senders = 0
The numbers in brackets behind some parameters seem to work fine for
-64GB RAM machine. Adjust to your setup. A higher number for `max_wal_size`
+128GB RAM machine. Adjust to your setup. A higher number for `max_wal_size`
means that PostgreSQL needs to run checkpoints less often but it does require
the additional space on your disk.
and even reduce `autovacuum_work_mem` further. This will reduce the amount
of memory that autovacuum takes away from the import process.
-For the initial import, you should also set:
+## Installing the latest release
- fsync = off
- full_page_writes = off
+Nominatim is easiest installed directly from Pypi. Make sure you have installed
+osm2pgsql, PostgreSQL/PostGIS and libICU together with its header files.
-Don't forget to re-enable them after the initial import or you risk database
-corruption.
+Then you can install Nominatim with:
+ pip install nominatim-db nominatim-api
## Downloading and building Nominatim
### Downloading the latest development version
-If you want to install latest development version from github, make sure to
-also check out the osm2pgsql subproject:
+If you want to install latest development version from github:
```
-git clone --recursive https://github.com/openstreetmap/Nominatim.git
+git clone https://github.com/osm-search/Nominatim.git
```
The development version does not include the country grid. Download it separately:
```
-wget -O Nominatim/data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
+wget -O Nominatim/data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
```
### Building Nominatim
-The code must be built in a separate directory. Create the directory and
-change into it.
+Nominatim is easiest to run from its own virtual environment. To create one, run:
-```
-mkdir build
-cd build
-```
+ sudo apt-get install virtualenv
+ virtualenv /srv/nominatim-venv
-Nominatim uses cmake and make for building. Assuming that you have created the
-build at the same level as the Nominatim source directory run:
-
-```
-cmake ../Nominatim
-make
-sudo make install
-```
-
-!!! warning
- The default installation no longer compiles the PostgreSQL module that
- is needed for the legacy tokenizer from older Nominatim versions. If you
- are upgrading an older database or want to run the
- [legacy tokenizer](../customize/Tokenizers.md#legacy-tokenizer) for
- some other reason, you need to enable the PostgreSQL module via
- cmake: `cmake -DBUILD_MODULE=on ../Nominatim`. To compile the module
- you need to have the server development headers for PostgreSQL installed.
- On Ubuntu/Debian run: `sudo apt install postgresql-server-dev-<postgresql version>`
+To install Nominatim directly from the source tree into the virtual environment, run:
+ /srv/nominatim-venv/bin/pip install packaging/nominatim-{db,api}
-Nominatim installs itself into `/usr/local` per default. To choose a different
-installation directory add `-DCMAKE_INSTALL_PREFIX=<install root>` to the
-cmake command. Make sure that the `bin` directory is available in your path
-in that case, e.g.
-
-```
-export PATH=<install root>/bin:$PATH
-```
Now continue with [importing the database](Import.md).
## Removing large deleted objects
+Command: `nominatim admin --clean-deleted <PostgreSQL Time Interval>`
+
Nominatim refuses to delete very large areas because often these deletions are
accidental and are reverted within hours. Instead the deletions are logged in
the `import_polygon_delete` table and left to the administrator to clean up.
-There is currently no command to do that. You can use the following SQL
-query to force a deletion on all objects that have been deleted more than
-a certain timespan ago (here: 1 month):
+To run this command you will need to pass a PostgreSQL time interval. For example to
+delete any objects that have been deleted more than a month ago you would run:
+`nominatim admin --clean-deleted '1 month'`
-```sql
-SELECT place_force_delete(p.place_id) FROM import_polygon_delete d, placex p
-WHERE p.osm_type = d.osm_type and p.osm_id = d.osm_id
- and age(p.indexed_date) > '1 month'::interval
-```
# Database Migrations
-Since version 3.7.0 Nominatim offers automatic migrations. Please follow
+Nominatim offers automatic migrations for versions 4.3+. Please follow
the following steps:
-* stop any updates that are potentially running
-* update Nominatim to the newer version
-* go to your project directory and run `nominatim admin --migrate`
-* (optionally) restart updates
+* Stop any updates that are potentially running
+* Update the backend: `pip install -U nominatim-db`
+* Go to your project directory and run `nominatim admin --migrate`
+* Update the frontend: `pip install -U nominatim-api`
+* (optionally) Restart updates
Below you find additional migrations and hints about other structural and
breaking changes. **Please read them before running the migration.**
!!! note
- If you are migrating from a version <3.6, then you still have to follow
- the manual migration steps up to 3.6.
+ If you are migrating from a version <4.3, you need to install 4.3
+ and migrate to 4.3 first. Then you can migrate to the current
+ version. It is strongly recommended to do a reimport instead.
+
+## 4.5.0 -> 5.0.0
+
+### PHP frontend removed
+
+The PHP frontend has been completely removed. Please switch to the Python
+frontend.
+
+Without the PHP code, the `nominatim refresh --website` command is no longer
+needed. It currently omits a warning and does otherwise nothing. It will be
+removed in later versions of Nominatim. So make sure you remove it from your
+scripts.
+
+### CMake building removed
+
+Nominatim can now only be installed via pip. Please follow the installation
+instructions for the current version to change to pip.
+
+### osm2pgsql no longer vendored in
+
+Nominatim no longer ships its own version of osm2pgsql. Please install a
+stock version of osm2pgsql from your distribution. See the
+[installation instruction for osm2pgsql](https://osm2pgsql.org/doc/install.html)
+for details. A minimum version of 1.8 is required. The current stable versions
+of Ubuntu and Debian already ship with an appropriate versions. For older
+installation, you may have to compile a newer osm2pgsql yourself.
+
+### Legacy tokenizer removed
+
+The `legacy` tokenizer is no longer enabled. This tokenizer has been superseded
+by the `ICU` tokenizer a long time ago. In the unlikely case that your database
+still uses the `legacy` tokenizer, you must reimport your database.
+
+### osm2pgsql style overhauled
+
+There are some fundamental changes to how customized osm2pgsql styles should
+be written. The changes are mostly backwards compatible, i.e. custom styles
+should still work with the new implementation. The only exception is a
+customization of the `process_tags()` function. This function is no longer
+considered public and neither are the helper functions used in it.
+They currently still work but will be removed at some point. If you have
+been making changes to `process_tags`, please review your style and try
+to switch to the new convenience functions.
+
+For more information on the changes, see the
+[pull request](https://github.com/osm-search/Nominatim/pull/3615)
+and read the new
+[customization documentation](https://nominatim.org/release-docs/latest/customize/Import-Styles/).
+
+## 4.4.0 -> 4.5.0
+
+### New structure for Python packages
+
+The nominatim Python package has been split into `nominatim-db` and `nominatim-api`.
+Any imports need to be adapted accordingly.
+
+If you are running the Python frontend, change the server module from
+`nominatim.server.falcon.server` to `nominatim_api.server.falcon.server`.
+
+If you are using the Nominatim library, all imports need to be changed
+from `nominatim.api.<module>` to `nominatim_api.<module>`.
+
+If you have written custom tokenizers or sanitizers, the appropriate modules
+are now found in `nominatim_db`.
+
+## 4.2.0 -> 4.3.0
+
+### New indexes for reverse lookup
+
+The reverse lookup algorithm has changed slightly to improve performance.
+This change needs a different index in the database. The required index
+will be automatically build during migration. Until the new index is available
+performance of the /reverse endpoint is significantly reduced. You should
+therefore either remove traffic from the machine before attempting a
+version update or create the index manually **before** starting the update
+using the following SQL:
+
+```sql
+CREATE INDEX IF NOT EXISTS idx_placex_geometry_reverse_lookupPlaceNode
+ ON placex USING gist (ST_Buffer(geometry, reverse_place_diameter(rank_search)))
+ WHERE rank_address between 4 and 25 AND type != 'postcode'
+ AND name is not null AND linked_place_id is null AND osm_type = 'N';
+```
## 4.0.0 -> 4.1.0
up-to-date, [Advanced installations section](Advanced-Installations.md)
contains instructions to set up and update multiple country extracts.
-#### Continuous updates
-
-This is the easiest mode. Simply run the replication command without any
-parameters:
-
- nominatim replication
-
-The update application keeps running forever and retrieves and applies
-new updates from the server as they are published.
-
-You can run this command as a simple systemd service. Create a service
-description like that in `/etc/systemd/system/nominatim-updates.service`:
-
-```
-[Unit]
-Description=Continuous updates of Nominatim
-
-[Service]
-WorkingDirectory=/srv/nominatim
-ExecStart=nominatim replication
-StandardOutput=append:/var/log/nominatim-updates.log
-StandardError=append:/var/log/nominatim-updates.error.log
-User=nominatim
-Group=nominatim
-Type=simple
-
-[Install]
-WantedBy=multi-user.target
-```
-
-Replace the `WorkingDirectory` with your project directory. Also adapt user
-and group names as required.
-
-Now activate the service and start the updates:
-
-```
-sudo systemctl daemon-reload
-sudo systemctl enable nominatim-updates
-sudo systemctl start nominatim-updates
-```
-
#### One-time mode
When the `--once` parameter is given, then Nominatim will download exactly one
until the next expected update and only then attempt to download the next batch.
The one-time mode is particularly useful if you want to run updates continuously
-but need to schedule other work in between updates. For example, the main
-service at osm.org uses it, to regularly recompute postcodes -- a process that
-must not be run while updates are in progress. Its update script
-looks like this:
+but need to schedule other work in between updates. For example, you might
+want to regularly recompute postcodes -- a process that
+must not be run while updates are in progress. An update script refreshing
+postcodes regularly might look like this:
```sh
#!/bin/bash
WantedBy=multi-user.target
```
-And then a similar service definition: `/etc/systemd/system/nominatim-updates.service`:
+`OnUnitActiveSec` defines how often the individual update command is run.
+
+Then add a service definition for the timer in `/etc/systemd/system/nominatim-updates.service`:
```
[Unit]
Description=Single updates of Nominatim
[Service]
-WorkingDirectory=/srv/nominatim
-ExecStart=nominatim replication --once
-StandardOutput=append:/var/log/nominatim-updates.log
-StandardError=append:/var/log/nominatim-updates.error.log
+WorkingDirectory=/srv/nominatim-project
+ExecStart=/srv/nominatim-venv/bin/nominatim replication --once
+StandardOutput=journald
+StandardError=inherit
User=nominatim
Group=nominatim
Type=simple
WantedBy=multi-user.target
```
-Replace the `WorkingDirectory` with your project directory. Also adapt user and
-group names as required. `OnUnitActiveSec` defines how often the individual
-update command is run.
+Replace the `WorkingDirectory` with your project directory. `ExecStart` points
+to the nominatim binary that was installed in your virtualenv earlier.
+Finally, you might need to adapt user and group names as required.
Now activate the service and start the updates:
sudo systemctl start nominatim-updates.timer
```
-You can stop future data updates, while allowing any current, in-progress
+You can stop future data updates while allowing any current, in-progress
update steps to finish, by running `sudo systemctl stop
nominatim-updates.timer` and waiting until `nominatim-updates.service` isn't
-running (`sudo systemctl is-active nominatim-updates.service`). Current output
-from the update can be seen like above (`systemctl status
-nominatim-updates.service`).
+running (`sudo systemctl is-active nominatim-updates.service`).
+
+To check the output from the update process, use journalctl: `journalctl -u
+nominatim-updates.service`
#### Catch-up mode
still respects the parameter `NOMINATIM_REPLICATION_MAX_DIFF`. It downloads and
applies the changes in appropriate batches until all is done.
-The catch-up mode is foremost useful to bring the database up to speed after the
+The catch-up mode is foremost useful to bring the database up to date after the
initial import. Give that the service usually is not in production at this
point, you can temporarily be a bit more generous with the batch size and
number of threads you use for the updates by running catch-up like this:
```
-cd /srv/nominatim
+cd /srv/nominatim-project
NOMINATIM_REPLICATION_MAX_DIFF=5000 nominatim replication --catch-up --threads 15
```
When running scheduled updates with catch-up, it is a good idea to choose
a replication source with an update frequency that is an order of magnitude
lower. For example, if you want to update once a day, use an hourly updated
- source. This makes sure that you don't miss an entire day of updates when
+ source. This ensures that you don't miss an entire day of updates when
the source is unexpectedly late to publish its update.
If you want to use the source with the same update frequency (e.g. a daily
updated source with daily updates), use the
- continuous update mode. It ensures to re-request the newest update until it
- is published.
+ once mode together with a frequently run systemd script as described above.
+ It ensures to re-request the newest update until they have been published.
+
+
+#### Continuous updates
+
+!!! danger
+ This mode is no longer recommended to use and will removed in future
+ releases. systemd is much better
+ suited for running regular updates. Please refer to the setup
+ instructions for running one-time mode with systemd above.
+
+This is the easiest mode. Simply run the replication command without any
+parameters:
+
+ nominatim replication
+
+The update application keeps running forever and retrieves and applies
+new updates from the server as they are published.
Show all details about a single place saved in the database.
+This API endpoint is meant for visual inspection of the data in the database,
+mainly together with [Nominatim-UI](https://github.com/osm-search/nominatim-ui/).
+The parameters of the endpoint and the output may change occasionally between
+versions of Nominatim. Do not rely on the output in scripts or applications.
+
!!! warning
- The details page exists for debugging only. You may not use it in scripts
- or to automatically query details about a result.
+ The details endpoint at https://nominatim.openstreetmap.org
+ may not used in scripts or bots at all.
See [Nominatim Usage Policy](https://operations.osmfoundation.org/policies/nominatim/).
-## Parameters
The details API supports the following two request formats:
changes when data gets reimported. Therefore it cannot be used as
a permanent id and shouldn't be used in bug reports.
+!!! danger "Deprecation warning"
+ The API can also be used with the URL
+ `https://nominatim.openstreetmap.org/details.php`. This is now deprecated
+ and will be removed in future versions.
-Additional optional parameters are explained below.
-
-### Output format
-* `json_callback=<string>`
+## Parameters
-Wrap JSON output in a callback function (JSONP) i.e. `<string>(<json>)`.
+This section lists additional optional parameters.
-* `pretty=[0|1]`
+### Output format
-Add indentation to make it more human-readable. (Default: 0)
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| json_callback | function name | _unset_ |
+When set, then JSON output will be wrapped in a callback function with
+the given name. See [JSONP](https://en.wikipedia.org/wiki/JSONP) for more
+information.
### Output details
-* `addressdetails=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| addressdetails | 0 or 1 | 0 |
-Include a breakdown of the address into elements. (Default: 0)
+When set to 1, include a breakdown of the address into elements.
-* `keywords=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| keywords | 0 or 1 | 0 |
-Include a list of name keywords and address keywords (word ids). (Default: 0)
+When set to 1, include a list of name keywords and address keywords
+in the result.
-* `linkedplaces=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| linkedplaces | 0 or 1 | 1 |
-Include a details of places that are linked with this one. Places get linked
+Include details of places that are linked with this one. Places get linked
together when they are different forms of the same physical object. Nominatim
links two kinds of objects together: place nodes get linked with the
corresponding administrative boundaries. Waterway relations get linked together with their
members.
-(Default: 1)
-* `hierarchy=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| hierarchy | 0 or 1 | 0 |
+
+Include details of POIs and address that depend on the place. Only POIs
+that use this place to determine their address will be returned.
-Include details of places lower in the address hierarchy. (Default: 0)
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| group_hierarchy | 0 or 1 | 0 |
-* `group_hierarchy=[0|1]`
+When set to 1, the output of the address hierarchy will be
+grouped by type.
-For JSON output will group the places by type. (Default: 0)
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| polygon_geojson | 0 or 1 | 0 |
-* `polygon_geojson=[0|1]`
-Include geometry of result. (Default: 0)
+Include geometry of result.
### Language of results
-* `accept-language=<browser language string>`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| accept-language | browser language string | content of "Accept-Language" HTTP header |
-Preferred language order for showing result, overrides the value
-specified in the "Accept-Language" HTTP header.
-Either use a standard RFC2616 accept-language string or a simple
-comma-separated list of language codes.
+Preferred language order for showing search results. This may either be
+a simple comma-separated list of language codes or have the same format
+as the ["Accept-Language" HTTP header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language).
## Examples
##### JSON
-[https://nominatim.openstreetmap.org/details.php?osmtype=W&osmid=38210407&format=json](https://nominatim.openstreetmap.org/details.php?osmtype=W&osmid=38210407&format=json)
+[https://nominatim.openstreetmap.org/details?osmtype=W&osmid=38210407&format=json](https://nominatim.openstreetmap.org/details?osmtype=W&osmid=38210407&format=json)
```json
That said if you installed your own Nominatim instance you can use the
`nominatim export` PHP script as basis to return such lists.
+
+#### 7. My result has a wrong postcode. Where does it come from?
+
+Most places in OSM don't have a postcode, so Nominatim tries to interpolate
+one. It first look at all the places that make up the address of the place.
+If one of them has a postcode defined, this is the one to be used. When
+none of the address parts has a postcode either, Nominatim interpolates one
+from the surrounding objects. If the postcode is for your result is one, then
+most of the time there is an OSM object with the wrong postcode nearby.
+
+To find the bad postcode, go to
+[https://nominatim.openstreetmap.org](https://nominatim.openstreetmap.org)
+and search for your place. When you have found it, click on the 'details' link
+under the result to go to the details page. There is a field 'Computed Postcode'
+which should display the bad postcode. Click on the 'how?' link. A small
+explanation text appears. It contains a link to a query for Overpass Turbo.
+Click on that and you get a map with all places in the area that have the bad
+postcode. If none is displayed, zoom the map out a bit and then click on 'Run'.
+
+Now go to [OpenStreetMap](https://openstreetmap.org) and fix the error you
+have just found. It will take at least a day for Nominatim to catch up with
+your data fix. Sometimes longer, depending on how much editing activity is in
+the area.
+
The lookup API allows to query the address and other details of one or
multiple OSM objects like node, way or relation.
-## Parameters
+## Endpoint
The lookup API has the following format:
prefixed with its type, one of node(N), way(W) or relation(R). Up to 50 ids
can be queried at the same time.
-Additional optional parameters are explained below.
+!!! danger "Deprecation warning"
+ The API can also be used with the URL
+ `https://nominatim.openstreetmap.org/lookup.php`. This is now deprecated
+ and will be removed in future versions.
+
+
+## Parameters
+
+This section lists additional optional parameters.
### Output format
-* `format=[xml|json|jsonv2|geojson|geocodejson]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| format | one of: `xml`, `json`, `jsonv2`, `geojson`, `geocodejson` | `jsonv2` |
+
+See [Place Output Formats](Output.md) for details on each format.
+
-See [Place Output Formats](Output.md) for details on each format. (Default: xml)
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| json_callback | function name | _unset_ |
-* `json_callback=<string>`
+When given, then JSON output will be wrapped in a callback function with
+the given name. See [JSONP](https://en.wikipedia.org/wiki/JSONP) for more
+information.
-Wrap JSON output in a callback function (JSONP) i.e. `<string>(<json>)`.
Only has an effect for JSON output formats.
+
### Output details
-* `addressdetails=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| addressdetails | 0 or 1 | 0 |
+
+When set to 1, include a breakdown of the address into elements.
+The exact content of the address breakdown depends on the output format.
-Include a breakdown of the address into elements. (Default: 0)
+!!! tip
+ If you are interested in a stable classification of address categories
+ (suburb, city, state, etc), have a look at the `geocodejson` format.
+ All other formats return classifications according to OSM tagging.
+ There is a much larger set of categories and they are not always consistent,
+ which makes them very hard to work with.
-* `extratags=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| extratags | 0 or 1 | 0 |
-Include additional information in the result if available,
-e.g. wikipedia link, opening hours. (Default: 0)
+When set to 1, the response include any additional information in the result
+that is available in the database, e.g. wikipedia link, opening hours.
-* `namedetails=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| namedetails | 0 or 1 | 0 |
-Include a list of alternative names in the results. These may include
-language variants, references, operator and brand. (Default: 0)
+When set to 1, include a full list of names for the result. These may include
+language variants, older names, references and brand.
### Language of results
-* `accept-language=<browser language string>`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| accept-language | browser language string | content of "Accept-Language" HTTP header |
+
+Preferred language order for showing search results. This may either be
+a simple comma-separated list of language codes or have the same format
+as the ["Accept-Language" HTTP header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language).
+
+!!! tip
+ First-time users of Nominatim tend to be confused that they get different
+ results when using Nominatim in the browser versus in a command-line tool
+ like wget or curl. The command-line tools
+ usually don't send any Accept-Language header, prompting Nominatim
+ to show results in the local language. Browsers on the contrary always
+ send the currently chosen browser language.
-Preferred language order for showing search results, overrides the value
-specified in the "Accept-Language" HTTP header.
-Either use a standard RFC2616 accept-language string or a simple
-comma-separated list of language codes.
### Polygon output
-* `polygon_geojson=1`
-* `polygon_kml=1`
-* `polygon_svg=1`
-* `polygon_text=1`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| polygon_geojson | 0 or 1 | 0 |
+| polygon_kml | 0 or 1 | 0 |
+| polygon_svg | 0 or 1 | 0 |
+| polygon_text | 0 or 1 | 0 |
-Output geometry of results as a GeoJSON, KML, SVG or WKT. Only one of these
-options can be used at a time. (Default: 0)
+Add the full geometry of the place to the result output. Output formats
+in GeoJSON, KML, SVG or WKT are supported. Only one of these
+options can be used at a time.
-* `polygon_threshold=0.0`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| polygon_threshold | floating-point number | 0.0 |
-Return a simplified version of the output geometry. The parameter is the
+When one of the polygon_* outputs is chosen, return a simplified version
+of the output geometry. The parameter describes the
tolerance in degrees with which the geometry may differ from the original
-geometry. Topology is preserved in the result. (Default: 0.0)
+geometry. Topology is preserved in the geometry.
+
### Other
-* `email=<valid email address>`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| email | valid email address | _unset_ |
If you are making large numbers of request please include an appropriate email
-address to identify your requests. See Nominatim's [Usage Policy](https://operations.osmfoundation.org/policies/nominatim/) for more details.
+address to identify your requests. See Nominatim's
+[Usage Policy](https://operations.osmfoundation.org/policies/nominatim/) for more details.
+
-* `debug=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| debug | 0 or 1 | 0 |
Output assorted developer debug information. Data on internals of Nominatim's
-"Search Loop" logic, and SQL queries. The output is (rough) HTML format.
-This overrides the specified machine readable format. (Default: 0)
+"search loop" logic, and SQL queries. The output is HTML format.
+This overrides the specified machine readable format.
## Examples
```
{
- "place_id": "100149",
+ "place_id": 100149,
"licence": "Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright",
"osm_type": "node",
"osm_id": "107775",
* `name` - localised name of the place
* `housenumber`, `street`, `locality`, `district`, `postcode`, `city`,
`county`, `state`, `country` -
- provided when it can be determined from the address
+ provided when it can be determined from the address (only with `addressdetails=1`)
* `admin` - list of localised names of administrative boundaries (only with `addressdetails=1`)
+ * `extra` - dictionary with additional useful tags like `website` or `maxspeed`
+ (only with `extratags=1`)
+
Use `polygon_geojson` to output the full geometry of the object instead
of the centroid.
<searchresults timestamp="Sat, 11 Aug 18 11:55:35 +0000"
attribution="Data © OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright"
querystring="london" polygon="false" exclude_place_ids="100149"
- more_url="https://nominatim.openstreetmap.org/search.php?q=london&addressdetails=1&extratags=1&exclude_place_ids=100149&format=xml&accept-language=en-US%2Cen%3Bq%3D0.7%2Cde%3Bq%3D0.3">
+ more_url="https://nominatim.openstreetmap.org/search?q=london&addressdetails=1&extratags=1&exclude_place_ids=100149&format=xml&accept-language=en-US%2Cen%3Bq%3D0.7%2Cde%3Bq%3D0.3">
<place place_id="100149" osm_type="node" osm_id="107775" place_rank="15" address_rank="15"
boundingbox="51.3473219,51.6673219,-0.2876474,0.0323526" lat="51.5073219" lon="-0.1276474"
display_name="London, Greater London, England, SW1A 2DU, United Kingdom"
* `ref` - content of `ref` tag if it exists
* `lat`, `lon` - latitude and longitude of the centroid of the object
* `boundingbox` - comma-separated list of corner coordinates ([see notes](#boundingbox))
- * `place_rank` - class [search rank](../customize/Ranking#search-rank)
- * `address_rank` - place [address rank](../customize/Ranking#address-rank)
+ * `place_rank` - class [search rank](../customize/Ranking.md#search-rank)
+ * `address_rank` - place [address rank](../customize/Ranking.md#address-rank)
* `display_name` - full comma-separated address
* `class`, `type` - key and value of the main OSM tag
* `importance` - computed importance rank
-### Nominatim API
-
-Nominatim indexes named (or numbered) features within the OpenStreetMap (OSM) dataset and a subset of other unnamed features (pubs, hotels, churches, etc).
-
-Its API has the following endpoints for querying the data:
+This section describes the API V1 of the Nominatim web service. The
+service offers the following endpoints:
* __[/search](Search.md)__ - search OSM objects by name or type
* __[/reverse](Reverse.md)__ - search OSM object by their location
back in Nominatim in case the deletion was accidental
* __/polygons__ - list of broken polygons detected by Nominatim
* __[/details](Details.md)__ - show internal details for an object (for debugging only)
+
+
+
# Reverse Geocoding
-Reverse geocoding generates an address from a latitude and longitude.
+Reverse geocoding generates an address from a coordinate given as
+latitude and longitude.
## How it works
have a similar enough address to the coordinate you were requesting. For
example, in dense city areas it may belong to a completely different street.
-
-## Parameters
+## Endpoint
The main format of the reverse API is
projection. The API returns exactly one result or an error when the coordinate
is in an area with no OSM data coverage.
-Additional parameters are accepted as listed below.
-!!! warning "Deprecation warning"
- The reverse API used to allow address lookup for a single OSM object by
- its OSM id. This use is now deprecated. Use the [Address Lookup API](../Lookup)
- instead.
+!!! tip
+ The reverse API allows a lookup of object by coordinate. If you want
+ to look up an object by ID, use the [Address Lookup API](Lookup.md) instead.
+
+!!! danger "Deprecation warning"
+ The API can also be used with the URL
+ `https://nominatim.openstreetmap.org/reverse.php`. This is now deprecated
+ and will be removed in future versions.
+
+
+## Parameters
+
+This section lists additional parameters to further influence the output.
### Output format
-* `format=[xml|json|jsonv2|geojson|geocodejson]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| format | one of: `xml`, `json`, `jsonv2`, `geojson`, `geocodejson` | `xml` |
+
+See [Place Output Formats](Output.md) for details on each format.
+
-See [Place Output Formats](Output.md) for details on each format. (Default: xml)
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| json_callback | function name | _unset_ |
-* `json_callback=<string>`
+When given, then JSON output will be wrapped in a callback function with
+the given name. See [JSONP](https://en.wikipedia.org/wiki/JSONP) for more
+information.
-Wrap JSON output in a callback function ([JSONP](https://en.wikipedia.org/wiki/JSONP)) i.e. `<string>(<json>)`.
Only has an effect for JSON output formats.
+
### Output details
-* `addressdetails=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| addressdetails | 0 or 1 | 1 |
+
+When set to 1, include a breakdown of the address into elements.
+The exact content of the address breakdown depends on the output format.
-Include a breakdown of the address into elements. (Default: 1)
+!!! tip
+ If you are interested in a stable classification of address categories
+ (suburb, city, state, etc), have a look at the `geocodejson` format.
+ All other formats return classifications according to OSM tagging.
+ There is a much larger set of categories and they are not always consistent,
+ which makes them very hard to work with.
-* `extratags=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| extratags | 0 or 1 | 0 |
-Include additional information in the result if available,
-e.g. wikipedia link, opening hours. (Default: 0)
+When set to 1, the response include any additional information in the result
+that is available in the database, e.g. wikipedia link, opening hours.
-* `namedetails=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| namedetails | 0 or 1 | 0 |
-Include a list of alternative names in the results. These may include
-language variants, references, operator and brand. (Default: 0)
+When set to 1, include a full list of names for the result. These may include
+language variants, older names, references and brand.
### Language of results
-* `accept-language=<browser language string>`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| accept-language | browser language string | content of "Accept-Language" HTTP header |
-Preferred language order for showing search results, overrides the value
-specified in the "Accept-Language" HTTP header.
-Either use a standard RFC2616 accept-language string or a simple
-comma-separated list of language codes.
+Preferred language order for showing search results. This may either be
+a simple comma-separated list of language codes or have the same format
+as the ["Accept-Language" HTTP header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language).
-### Result limitation
+!!! tip
+ First-time users of Nominatim tend to be confused that they get different
+ results when using Nominatim in the browser versus in a command-line tool
+ like wget or curl. The command-line tools
+ usually don't send any Accept-Language header, prompting Nominatim
+ to show results in the local language. Browsers on the contrary always
+ send the currently chosen browser language.
-* `zoom=[0-18]`
-Level of detail required for the address. Default: 18. This is a number that
+### Result restriction
+
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| zoom | 0-18 | 18 |
+
+Level of detail required for the address. This is a number that
corresponds roughly to the zoom level used in XYZ tile sources in frameworks
like Leaflet.js, Openlayers etc.
In terms of address details the zoom levels are as follows:
5 | state
8 | county
10 | city
- 14 | suburb
+ 12 | town / borough
+ 13 | village / suburb
+ 14 | neighbourhood
+ 15 | any settlement
16 | major streets
17 | major and minor streets
18 | building
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| layer | comma-separated list of: `address`, `poi`, `railway`, `natural`, `manmade` | _unset_ (no restriction) |
+
+The layer filter allows to select places by themes.
+
+The `address` layer contains all places that make up an address:
+address points with house numbers, streets, inhabited places (suburbs, villages,
+cities, states etc.) and administrative boundaries.
+
+The `poi` layer selects all point of interest. This includes classic points
+of interest like restaurants, shops, hotels but also less obvious features
+like recycling bins, guideposts or benches.
+
+The `railway` layer includes railway infrastructure like tracks.
+Note that in Nominatim's standard configuration, only very few railway
+features are imported into the database.
+
+The `natural` layer collects features like rivers, lakes and mountains while
+the `manmade` layer functions as a catch-all for features not covered by the
+other layers.
+
+
### Polygon output
-* `polygon_geojson=1`
-* `polygon_kml=1`
-* `polygon_svg=1`
-* `polygon_text=1`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| polygon_geojson | 0 or 1 | 0 |
+| polygon_kml | 0 or 1 | 0 |
+| polygon_svg | 0 or 1 | 0 |
+| polygon_text | 0 or 1 | 0 |
-Output geometry of results as a GeoJSON, KML, SVG or WKT. Only one of these
-options can be used at a time. (Default: 0)
+Add the full geometry of the place to the result output. Output formats
+in GeoJSON, KML, SVG or WKT are supported. Only one of these
+options can be used at a time.
-* `polygon_threshold=0.0`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| polygon_threshold | floating-point number | 0.0 |
-Return a simplified version of the output geometry. The parameter is the
+When one of the polygon_* outputs is chosen, return a simplified version
+of the output geometry. The parameter describes the
tolerance in degrees with which the geometry may differ from the original
-geometry. Topology is preserved in the result. (Default: 0.0)
+geometry. Topology is preserved in the geometry.
+
### Other
-* `email=<valid email address>`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| email | valid email address | _unset_ |
-If you are making a large number of requests, please include an appropriate email
-address to identify your requests. See Nominatim's [Usage Policy](https://operations.osmfoundation.org/policies/nominatim/) for more details.
+If you are making large numbers of request please include an appropriate email
+address to identify your requests. See Nominatim's
+[Usage Policy](https://operations.osmfoundation.org/policies/nominatim/) for more details.
-* `debug=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| debug | 0 or 1 | 0 |
Output assorted developer debug information. Data on internals of Nominatim's
-"Search Loop" logic, and SQL queries. The output is (rough) HTML format.
-This overrides the specified machine readable format. (Default: 0)
+"search loop" logic, and SQL queries. The output is HTML format.
+This overrides the specified machine readable format.
## Examples
which are translated into specific OpenStreetMap (OSM) tags (e.g. Pub => `amenity=pub`).
This can be used to narrow down the kind of objects to be returned.
-!!! warning
+!!! note
Special phrases are not suitable to query all objects of a certain type in an
area. Nominatim will always just return a collection of the best matches. To
download OSM data by object type, use the [Overpass API](https://overpass-api.de/).
-## Parameters
+## Endpoint
The search API has the following format:
https://nominatim.openstreetmap.org/search?<params>
```
-The search term may be specified with two different sets of parameters:
+!!! danger "Deprecation warning"
+ The API can also be used with the URL
+ `https://nominatim.openstreetmap.org/search.php`. This is now deprecated
+ and will be removed in future versions.
+
+The query term can be given in two different forms: free-form or structured.
+
+### Free-form query
+
+| Parameter | Value |
+|-----------| ----- |
+| q | Free-form query string to search for |
-* `q=<query>`
+In this form, the query can be unstructured.
+Free-form queries are processed first left-to-right and then right-to-left if that fails. So you may search for
+[pilkington avenue, birmingham](https://nominatim.openstreetmap.org/search?q=pilkington+avenue,birmingham) as well as for
+[birmingham, pilkington avenue](https://nominatim.openstreetmap.org/search?q=birmingham,+pilkington+avenue).
+Commas are optional, but improve performance by reducing the complexity of the search.
- Free-form query string to search for.
- Free-form queries are processed first left-to-right and then right-to-left if that fails. So you may search for
- [pilkington avenue, birmingham](https://nominatim.openstreetmap.org/search?q=pilkington+avenue,birmingham) as well as for
- [birmingham, pilkington avenue](https://nominatim.openstreetmap.org/search?q=birmingham,+pilkington+avenue).
- Commas are optional, but improve performance by reducing the complexity of the search.
+The free-form may also contain special phrases to describe the type of
+place to be returned or a coordinate to search close to a position.
+### Structured query
-* `street=<housenumber> <streetname>`
-* `city=<city>`
-* `county=<county>`
-* `state=<state>`
-* `country=<country>`
-* `postalcode=<postalcode>`
+| Parameter | Value |
+|----------- | ----- |
+| amenity | name and/or type of POI |
+| street | housenumber and streetname |
+| city | city |
+| county | county |
+| state | state |
+| country | country |
+| postalcode | postal code |
- Alternative query string format split into several parameters for structured requests.
- Structured requests are faster but are less robust against alternative
- OSM tagging schemas. **Do not combine with** `q=<query>` **parameter**.
+The structured form of the search query allows to lookup up an address
+that is already split into its components. Each parameter represents a field
+of the address. All parameters are optional. You should only use the ones
+that are relevant for the address you want to geocode.
-Both query forms accept the additional parameters listed below.
+!!! Attention
+ Cannot be combined with the `q=<query>` parameter. Newer versions of
+ the API will return an error if you do so. Older versions simply return
+ unexpected results.
+
+## Parameters
+
+The following parameters can be used to further restrict the search and
+change the output. They are usable for both forms of the search query.
### Output format
-* `format=[xml|json|jsonv2|geojson|geocodejson]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| format | one of: `xml`, `json`, `jsonv2`, `geojson`, `geocodejson` | `jsonv2` |
-See [Place Output Formats](Output.md) for details on each format. (Default: jsonv2)
+See [Place Output Formats](Output.md) for details on each format.
!!! note
The Nominatim service at
has a different default behaviour for historical reasons. When the
`format` parameter is omitted, the request will be forwarded to the Web UI.
-* `json_callback=<string>`
-Wrap JSON output in a callback function ([JSONP](https://en.wikipedia.org/wiki/JSONP)) i.e. `<string>(<json>)`.
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| json_callback | function name | _unset_ |
+
+When given, then JSON output will be wrapped in a callback function with
+the given name. See [JSONP](https://en.wikipedia.org/wiki/JSONP) for more
+information.
+
Only has an effect for JSON output formats.
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| limit | number | 10 |
+
+Limit the maximum number of returned results. Cannot be more than 40.
+Nominatim may decide to return less results than given, if additional
+results do not sufficiently match the query.
+
+
### Output details
-* `addressdetails=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| addressdetails | 0 or 1 | 0 |
-Include a breakdown of the address into elements. (Default: 0)
+When set to 1, include a breakdown of the address into elements.
+The exact content of the address breakdown depends on the output format.
+!!! tip
+ If you are interested in a stable classification of address categories
+ (suburb, city, state, etc), have a look at the `geocodejson` format.
+ All other formats return classifications according to OSM tagging.
+ There is a much larger set of categories and they are not always consistent,
+ which makes them very hard to work with.
-* `extratags=[0|1]`
-Include additional information in the result if available,
-e.g. wikipedia link, opening hours. (Default: 0)
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| extratags | 0 or 1 | 0 |
+When set to 1, the response include any additional information in the result
+that is available in the database, e.g. wikipedia link, opening hours.
-* `namedetails=[0|1]`
-Include a list of alternative names in the results. These may include
-language variants, references, operator and brand. (Default: 0)
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| namedetails | 0 or 1 | 0 |
+
+When set to 1, include a full list of names for the result. These may include
+language variants, older names, references and brand.
### Language of results
-* `accept-language=<browser language string>`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| accept-language | browser language string | content of "Accept-Language" HTTP header |
+
+Preferred language order for showing search results. This may either be
+a simple comma-separated list of language codes or have the same format
+as the ["Accept-Language" HTTP header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language).
-Preferred language order for showing search results, overrides the value
-specified in the ["Accept-Language" HTTP header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Language).
-Either use a standard RFC2616 accept-language string or a simple
-comma-separated list of language codes.
+!!! tip
+ First-time users of Nominatim tend to be confused that they get different
+ results when using Nominatim in the browser versus in a command-line tool
+ like wget or curl. The command-line tools
+ usually don't send any Accept-Language header, prompting Nominatim
+ to show results in the local language. Browsers on the contrary always
+ send the currently chosen browser language.
-### Result limitation
+### Result restriction
-* `countrycodes=<countrycode>[,<countrycode>][,<countrycode>]...`
+There are two ways to influence the results. *Filters* exclude certain
+kinds of results completely. *Boost parameters* only change the order of the
+results and thus give a preference to some results over others.
-Limit search results to one or more countries. `<countrycode>` must be the
-[ISO 3166-1alpha2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) code,
-e.g. `gb` for the United Kingdom, `de` for Germany.
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| countrycodes | comma-separated list of country codes | _unset_ |
+
+Filter that limits the search results to one or more countries.
+The country code must be the
+[ISO 3166-1alpha2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) code
+of the country, e.g. `gb` for the United Kingdom, `de` for Germany.
Each place in Nominatim is assigned to one country code based
on OSM country boundaries. In rare cases a place may not be in any country
-at all, for example, in international waters.
+at all, for example, when it is in international waters. These places are
+also excluded when the filter is set.
+
+!!! Note
+ This parameter should not be confused with the 'country' parameter of
+ the structured query. The 'country' parameter contains a search term
+ and will be handled with some fuzziness. The `countrycodes` parameter
+ is a hard filter and as such should be preferred. Having both parameters
+ in the same query will work. If the parameters contradict each other,
+ the search will come up empty.
+
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| layer | comma-separated list of: `address`, `poi`, `railway`, `natural`, `manmade` | _unset_ (no restriction) |
+
+The layer filter allows to select places by themes.
-* `exclude_place_ids=<place_id,[place_id],[place_id]`
+The `address` layer contains all places that make up an address:
+address points with house numbers, streets, inhabited places (suburbs, villages,
+cities, states tec.) and administrative boundaries.
+
+The `poi` layer selects all point of interest. This includes classic POIs like
+restaurants, shops, hotels but also less obvious features like recycling bins,
+guideposts or benches.
+
+The `railway` layer includes railway infrastructure like tracks.
+Note that in Nominatim's standard configuration, only very few railway
+features are imported into the database.
+
+The `natural` layer collects features like rivers, lakes and mountains while
+the `manmade` layer functions as a catch-all for features not covered by the
+other layers.
+
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| featureType | one of: `country`, `state`, `city`, `settlement` | _unset_ |
+
+The featureType allows to have a more fine-grained selection for places
+from the address layer. Results can be restricted to places that make up
+the 'state', 'country' or 'city' part of an address. A featureType of
+settlement selects any human inhabited feature from 'state' down to
+'neighbourhood'.
+
+When featureType is set, then results are automatically restricted
+to the address layer (see above).
+
+!!! tip
+ Instead of using the featureType filters `country`, `state` or `city`,
+ you can also use a structured query without the finer-grained parameters
+ amenity or street.
+
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| exclude_place_ids | comma-separated list of place ids |
If you do not want certain OSM objects to appear in the search
result, give a comma separated list of the `place_id`s you want to skip.
previous query only returned a few results, then including those here would
cause the search to return other, less accurate, matches (if possible).
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| viewbox | `<x1>,<y1>,<x2>,<y2>` | _unset_ |
-* `limit=<integer>`
-
-Limit the number of returned results. (Default: 10, Maximum: 50)
+Boost parameter which focuses the search on the given area.
+Any two corner points of the box are accepted as long as they make a proper
+box. `x` is longitude, `y` is latitude.
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| bounded | 0 or 1 | 0 |
-* `viewbox=<x1>,<y1>,<x2>,<y2>`
+When set to 1, then it turns the 'viewbox' parameter (see above) into
+a filter parameter, excluding any results outside the viewbox.
-The preferred area to find search results. Any two corner points of the box
-are accepted as long as they span a real box. `x` is longitude,
-`y` is latitude.
-
-
-* `bounded=[0|1]`
-
-When a viewbox is given, restrict the result to items contained within that
-viewbox (see above). When `viewbox` and `bounded=1` are given, an amenity
-only search is allowed. Give the special keyword for the amenity in square
+When `bounded=1` is given and the viewbox is small enough, then an amenity-only
+search is allowed. Give the special keyword for the amenity in square
brackets, e.g. `[pub]` and a selection of objects of this type is returned.
-There is no guarantee that the result is complete. (Default: 0)
+There is no guarantee that the result returns all objects in the area.
### Polygon output
-* `polygon_geojson=1`
-* `polygon_kml=1`
-* `polygon_svg=1`
-* `polygon_text=1`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| polygon_geojson | 0 or 1 | 0 |
+| polygon_kml | 0 or 1 | 0 |
+| polygon_svg | 0 or 1 | 0 |
+| polygon_text | 0 or 1 | 0 |
-Output geometry of results as a GeoJSON, KML, SVG or WKT. Only one of these
-options can be used at a time. (Default: 0)
+Add the full geometry of the place to the result output. Output formats
+in GeoJSON, KML, SVG or WKT are supported. Only one of these
+options can be used at a time.
-* `polygon_threshold=0.0`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| polygon_threshold | floating-point number | 0.0 |
-Return a simplified version of the output geometry. The parameter is the
+When one of the polygon_* outputs is chosen, return a simplified version
+of the output geometry. The parameter describes the
tolerance in degrees with which the geometry may differ from the original
-geometry. Topology is preserved in the result. (Default: 0.0)
+geometry. Topology is preserved in the geometry.
### Other
-* `email=<valid email address>`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| email | valid email address | _unset_ |
If you are making large numbers of request please include an appropriate email
-address to identify your requests. See Nominatim's [Usage Policy](https://operations.osmfoundation.org/policies/nominatim/) for more details.
+address to identify your requests. See Nominatim's
+[Usage Policy](https://operations.osmfoundation.org/policies/nominatim/) for more details.
-* `dedupe=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| dedupe | 0 or 1 | 1 |
Sometimes you have several objects in OSM identifying the same place or
object in reality. The simplest case is a street being split into many
different OSM ways due to different characteristics. Nominatim will
-attempt to detect such duplicates and only return one match unless
-this parameter is set to 0. (Default: 1)
+attempt to detect such duplicates and only return one match. Setting
+this parameter to 0 disables this deduplication mechanism and
+ensures that all results are returned.
-* `debug=[0|1]`
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| debug | 0 or 1 | 0 |
Output assorted developer debug information. Data on internals of Nominatim's
-"Search Loop" logic, and SQL queries. The output is (rough) HTML format.
-This overrides the specified machine readable format. (Default: 0)
-
+"search loop" logic, and SQL queries. The output is HTML format.
+This overrides the specified machine readable format.
## Examples
-##### XML with kml polygon
+##### XML with KML polygon
-* [https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon_geojson=1&addressdetails=1](https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon_geojson=1&addressdetails=1)
+* [https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon_kml=1&addressdetails=1](https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon_kml=1&addressdetails=1)
```xml
- <searchresults timestamp="Sat, 07 Nov 09 14:42:10 +0000" querystring="135 pilkington, avenue birmingham" polygon="true">
- <place
- place_id="1620612" osm_type="node" osm_id="452010817"
- boundingbox="52.548641204834,52.5488433837891,-1.81612110137939,-1.81592094898224"
- lat="52.5487429714954" lon="-1.81602098644987"
- display_name="135, Pilkington Avenue, Wylde Green, City of Birmingham, West Midlands (county), B72, United Kingdom"
- class="place" type="house">
- <geokml>
- <Polygon>
- <outerBoundaryIs>
- <LinearRing>
- <coordinates>-1.816513,52.548756599999997 -1.816434,52.548747300000002 -1.816429,52.5487629 -1.8163717,52.548756099999999 -1.8163464,52.548834599999999 -1.8164599,52.548848100000001 -1.8164685,52.5488213 -1.8164913,52.548824000000003 -1.816513,52.548756599999997</coordinates>
- </LinearRing>
- </outerBoundaryIs>
- </Polygon>
- </geokml>
- <house_number>135</house_number>
- <road>Pilkington Avenue</road>
- <village>Wylde Green</village>
- <town>Sutton Coldfield</town>
- <city>City of Birmingham</city>
- <county>West Midlands (county)</county>
- <postcode>B72</postcode>
- <country>United Kingdom</country>
- <country_code>gb</country_code>
- </place>
- </searchresults>
+<?xml version="1.0" encoding="UTF-8" ?>
+<searchresults timestamp="Tue, 08 Aug 2023 15:45:41 +00:00"
+ attribution="Data © OpenStreetMap contributors, ODbL 1.0. http://osm.org/copyright"
+ querystring="135 pilkington avenue, birmingham"
+ more_url="https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue%2C+birmingham&polygon_kml=1&addressdetails=1&limit=20&exclude_place_ids=125279639&format=xml"
+ exclude_place_ids="125279639">
+ <place place_id="125279639"
+ osm_type="way"
+ osm_id="90394480"
+ lat="52.5487921"
+ lon="-1.8164308"
+ boundingbox="52.5487473,52.5488481,-1.8165130,-1.8163464"
+ place_rank="30"
+ address_rank="30"
+ display_name="135, Pilkington Avenue, Maney, Sutton Coldfield, Wylde Green, Birmingham, West Midlands Combined Authority, England, B72 1LH, United Kingdom"
+ class="building"
+ type="residential"
+ importance="9.999999994736442e-08">
+ <geokml>
+ <Polygon>
+ <outerBoundaryIs>
+ <LinearRing>
+ <coordinates>-1.816513,52.5487566 -1.816434,52.5487473 -1.816429,52.5487629 -1.8163717,52.5487561 -1.8163464,52.5488346 -1.8164599,52.5488481 -1.8164685,52.5488213 -1.8164913,52.548824 -1.816513,52.5487566</coordinates>
+ </LinearRing>
+ </outerBoundaryIs>
+ </Polygon>
+ </geokml>
+ <house_number>135</house_number>
+ <road>Pilkington Avenue</road>
+ <hamlet>Maney</hamlet>
+ <town>Sutton Coldfield</town>
+ <village>Wylde Green</village>
+ <city>Birmingham</city>
+ <ISO3166-2-lvl8>GB-BIR</ISO3166-2-lvl8>
+ <state_district>West Midlands Combined Authority</state_district>
+ <state>England</state>
+ <ISO3166-2-lvl4>GB-ENG</ISO3166-2-lvl4>
+ <postcode>B72 1LH</postcode>
+ <country>United Kingdom</country>
+ <country_code>gb</country_code>
+ </place>
+</searchresults>
```
##### JSON with SVG polygon
-[https://nominatim.openstreetmap.org/search/Unter%20den%20Linden%201%20Berlin?format=json&addressdetails=1&limit=1&polygon_svg=1](https://nominatim.openstreetmap.org/search/Unter%20den%20Linden%201%20Berlin?format=json&addressdetails=1&limit=1&polygon_svg=1)
+[https://nominatim.openstreetmap.org/search?q=Unter%20den%20Linden%201%20Berlin&format=json&addressdetails=1&limit=1&polygon_svg=1](https://nominatim.openstreetmap.org/search?q=Unter%20den%20Linden%201%20Berlin&format=json&addressdetails=1&limit=1&polygon_svg=1)
```json
- {
- "address": {
- "city": "Berlin",
- "city_district": "Mitte",
- "construction": "Unter den Linden",
- "continent": "European Union",
- "country": "Deutschland",
- "country_code": "de",
- "house_number": "1",
- "neighbourhood": "Scheunenviertel",
- "postcode": "10117",
- "public_building": "Kommandantenhaus",
- "state": "Berlin",
- "suburb": "Mitte"
- },
- "boundingbox": [
- "52.5170783996582",
- "52.5173187255859",
- "13.3975105285645",
- "13.3981599807739"
- ],
- "class": "amenity",
- "display_name": "Kommandantenhaus, 1, Unter den Linden, Scheunenviertel, Mitte, Berlin, 10117, Deutschland, European Union",
- "importance": 0.73606775332943,
- "lat": "52.51719785",
- "licence": "Data \u00a9 OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright",
- "lon": "13.3978352028938",
- "osm_id": "15976890",
- "osm_type": "way",
- "place_id": "30848715",
- "svg": "M 13.397511 -52.517283599999999 L 13.397829400000001 -52.517299800000004 13.398131599999999 -52.517315099999998 13.398159400000001 -52.517112099999999 13.3975388 -52.517080700000001 Z",
- "type": "public_building"
- }
+[
+ {
+ "address": {
+ "ISO3166-2-lvl4": "DE-BE",
+ "borough": "Mitte",
+ "city": "Berlin",
+ "country": "Deutschland",
+ "country_code": "de",
+ "historic": "Kommandantenhaus",
+ "house_number": "1",
+ "neighbourhood": "Friedrichswerder",
+ "postcode": "10117",
+ "road": "Unter den Linden",
+ "suburb": "Mitte"
+ },
+ "boundingbox": [
+ "52.5170798",
+ "52.5173311",
+ "13.3975116",
+ "13.3981577"
+ ],
+ "class": "historic",
+ "display_name": "Kommandantenhaus, 1, Unter den Linden, Friedrichswerder, Mitte, Berlin, 10117, Deutschland",
+ "importance": 0.8135042058306902,
+ "lat": "52.51720765",
+ "licence": "Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright",
+ "lon": "13.397834399325466",
+ "osm_id": 15976890,
+ "osm_type": "way",
+ "place_id": 108681845,
+ "svg": "M 13.3975116 -52.5172905 L 13.397549 -52.5170798 13.397715 -52.5170906 13.3977122 -52.5171064 13.3977392 -52.5171086 13.3977417 -52.5170924 13.3979655 -52.5171069 13.3979623 -52.5171233 13.3979893 -52.5171248 13.3979922 -52.5171093 13.3981577 -52.5171203 13.398121 -52.5173311 13.3978115 -52.5173103 Z",
+ "type": "house"
+ }
+]
```
##### JSON with address details
-[https://nominatim.openstreetmap.org/?addressdetails=1&q=bakery+in+berlin+wedding&format=json&limit=1](https://nominatim.openstreetmap.org/?addressdetails=1&q=bakery+in+berlin+wedding&format=json&limit=1)
+[https://nominatim.openstreetmap.org/search?addressdetails=1&q=bakery+in+berlin+wedding&format=jsonv2&limit=1](https://nominatim.openstreetmap.org/search?addressdetails=1&q=bakery+in+berlin+wedding&format=jsonv2&limit=1)
```json
- {
- "address": {
- "bakery": "B\u00e4cker Kamps",
- "city_district": "Mitte",
- "continent": "European Union",
- "country": "Deutschland",
- "country_code": "de",
- "footway": "Bahnsteig U6",
- "neighbourhood": "Sprengelkiez",
- "postcode": "13353",
- "state": "Berlin",
- "suburb": "Wedding"
- },
- "boundingbox": [
- "52.5460929870605",
- "52.5460968017578",
- "13.3591794967651",
- "13.3591804504395"
- ],
- "class": "shop",
- "display_name": "B\u00e4cker Kamps, Bahnsteig U6, Sprengelkiez, Wedding, Mitte, Berlin, 13353, Deutschland, European Union",
- "icon": "https://nominatim.openstreetmap.org/images/mapicons/shopping_bakery.p.20.png",
- "importance": 0.201,
- "lat": "52.5460941",
- "licence": "Data \u00a9 OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright",
- "lon": "13.35918",
- "osm_id": "317179427",
- "osm_type": "node",
- "place_id": "1453068",
- "type": "bakery"
- }
+[
+ {
+ "address": {
+ "ISO3166-2-lvl4": "DE-BE",
+ "borough": "Mitte",
+ "city": "Berlin",
+ "country": "Deutschland",
+ "country_code": "de",
+ "neighbourhood": "Sprengelkiez",
+ "postcode": "13347",
+ "road": "Lindower Straße",
+ "shop": "Ditsch",
+ "suburb": "Wedding"
+ },
+ "addresstype": "shop",
+ "boundingbox": [
+ "52.5427201",
+ "52.5427654",
+ "13.3668619",
+ "13.3669442"
+ ],
+ "category": "shop",
+ "display_name": "Ditsch, Lindower Straße, Sprengelkiez, Wedding, Mitte, Berlin, 13347, Deutschland",
+ "importance": 9.99999999995449e-06,
+ "lat": "52.54274275",
+ "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://osm.org/copyright",
+ "lon": "13.36690305710228",
+ "name": "Ditsch",
+ "osm_id": 437595031,
+ "osm_type": "way",
+ "place_id": 204751033,
+ "place_rank": 30,
+ "type": "bakery"
+ }
+]
```
##### GeoJSON
# Status
-Useful for checking if the service and database is running. The JSON output also shows
+Report on the state of the service and database. Useful for checking if the
+service is up and running. The JSON output also reports
when the database was last updated.
+## Endpoint
+
+The status API has the following format:
+
+```
+https://nominatim.openstreetmap.org/status
+```
+
+!!! danger "Deprecation warning"
+ The API can also be used with the URL
+ `https://nominatim.openstreetmap.org/status.php`. This is now deprecated
+ and will be removed in future versions.
+
+
## Parameters
-* `format=[text|json]` (defaults to 'text')
+The status endpoint takes a single optional parameter:
+
+| Parameter | Value | Default |
+|-----------| ----- | ------- |
+| format | one of: `text`, `json` | 'text' |
+
+Selects the output format. See below.
## Output
#### Text format
-```
- https://nominatim.openstreetmap.org/status.php
-```
-
-will return HTTP status code 200 and print `OK`.
+When everything is okay, a status code 200 is returned and a simple message: `OK`
-On error it will return HTTP status code 500 and print a message, e.g.
+On error it will return HTTP status code 500 and print a detailed error message, e.g.
`ERROR: Database connection failed`.
#### JSON format
-```
- https://nominatim.openstreetmap.org/status.php?format=json
-```
+Always returns a HTTP code 200, when the status call could be executed.
-will return HTTP code 200 and a structure
+On success a JSON dictionary with the following structure is returned:
```json
{
the API. The `database_version` field contains the version of the data format
in the database.
-On error will also return HTTP status code 200 and a structure with error
-code and message, e.g.
+On error will return a shorter JSON dictionary with the error message
+and status only, e.g.
```json
{
"message": "Database connection failed"
}
```
-
-Possible status codes are
-
- | | message | notes |
- |-----|----------------------|---------------------------------------------------|
- | 700 | "No database" | connection failed |
- | 701 | "Module failed" | database could not load nominatim.so |
- | 702 | "Module call failed" | nominatim.so loaded but calling a function failed |
- | 703 | "Query failed" | test query against a database table failed |
- | 704 | "No value" | test query worked but returned no results |
+++ /dev/null
-#!/bin/sh
-#
-# Extract markdown-formatted documentation from a source file
-#
-# Usage: bash2md.sh <infile> <outfile>
-
-sed '/^#!/d;s:^#\( \|$\)::;s/.*#DOCS://' $1 > $2
-## Configuring the Import
+# Configuring the Import of OSM data
+
+In the very first step of a Nominatim import, OSM data is loaded into the
+database. Nominatim uses [osm2pgsql](https://osm2pgsql.org) for this task.
+It comes with a [flex style](https://osm2pgsql.org/doc/manual.html#the-flex-output)
+specifically tailored to filter and convert OSM data into Nominatim's
+internal data representation. Nominatim ships with a few preset
+configurations for this import, each results in a geocoding database of
+different detail. The
+[Import section](../admin/Import.md#filtering-imported-data) explains
+these default configurations in detail.
+
+If you want to have more control over which OSM data is added to the database,
+you can also create your own custom style. Create a new lua style file, put it
+into your project directory and then set `NOMINATIM_IMPORT_STYLE` to the name
+of the file. Custom style files can be used to modify the existing preset
+configurations or to implement your own configuration from scratch.
+
+The remainder of the page describes how the flex style works and how to
+customize it.
+
+## The `flex-base` lua module
+
+The core of Nominatim's flex import configuration is the `flex-base` module.
+It defines the table layout used by Nominatim and provides standard
+implementations for the import callbacks that help with customizing
+how OSM tags are used by Nominatim.
+
+Every custom style must include this module to make sure that the correct
+tables are created. Thus start your custom style as follows:
+
+``` lua
+local flex = require('flex-base')
+```
-Which OSM objects are added to the database and which of the tags are used
-can be configured via the import style configuration file. This
-is a JSON file which contains a list of rules which are matched against every
-tag of every object and then assign the tag its specific role.
+### Using preset configurations
-The style to use is given by the `NOMINATIM_IMPORT_STYLE` configuration
-option. There are a number of default styles, which are explained in detail
-in the [Import section](../admin/Import.md#filtering-imported-data). These
-standard styles may be referenced by their name.
+If you want to start with one of the existing presets, then you can import
+its settings using the `import_topic()` function:
-You can also create your own custom style. Put the style file into your
-project directory and then set `NOMINATIM_IMPORT_STYLE` to the name of the file.
-It is always recommended to start with one of the standard styles and customize
-those. You find the standard styles under the name `import-<stylename>.style`
-in the standard Nominatim configuration path (usually `/etc/nominatim` or
-`/usr/local/etc/nominatim`).
+```
+local flex = require('flex-base')
-The remainder of the page describes the format of the file.
+flex.import_topic('streets')
+```
-### Configuration Rules
+The `import_topic` function takes an optional second configuration
+parameter. The available options are explained in the
+[themepark section](#using-osm2pgsql-themepark).
+
+!!! note
+ You can also directly import the preset style files, e.g.
+ `local flex = require('import-street')`. It is not possible to
+ set extra configuration this way.
+
+### How processing works
+
+When Nominatim processes an OSM object, it looks for four kinds of tags:
+The _main tags_ classify what kind of place the OSM object represents. One
+OSM object can have more than one main tag. In such case one database entry
+is created for each main tag. _Name tags_ represent searchable names of the
+place. _Address tags_ are used to compute the address hierarchy of the place.
+Address tags are used for searching and for creating a display name of the place.
+_Extra tags_ are any tags that are not directly related to search but
+contain interesting additional information.
+
+!!! danger
+ Some tags in the extratags category are used by Nominatim to better
+ classify the place. You want to make sure these are always present
+ in custom styles.
+
+Configuring the style means deciding which key and/or key/value is used
+in which category.
+
+## Changing the recognized tags
+
+The flex style offers a number of functions to set the classification of
+each OSM tag. Most of these functions can also take a preset string instead
+of a tag description. These presets describe common configurations that
+are also used in the definition of the predefined styles. This section
+lists the configuration functions and the accepted presets.
+
+#### Key match lists
+
+Some of the following functions take _key match lists_. These lists can
+contain three kinds of strings to match against tag keys:
+A string that ends in an asterisk `*` is a prefix match and accordingly matches
+against any key that starts with the given string (minus the `*`).
+A suffix match can be defined similarly with a string that starts with a `*`.
+Any other string is matched exactly against tag keys.
+
+### Main tags
+
+`set/modify_main_tags()` allow to define which tags are used as main tags. It
+takes a lua table parameter which defines for keys and key/value
+combinations, how they are classified.
+
+The following classifications are recognized:
+
+| classification | meaning |
+| :-------------- | :------ |
+| always | Unconditionally use this tag as a main tag. |
+| named | Consider as main tag, when the object has a primary name (see [names](#name-tags) below) |
+| named_with_key | Consider as main tag, when the object has a primary name with a domain prefix. For example, if the main tag is `bridge=yes`, then it will only be added as an extra entry, if there is a tag `bridge:name[:XXX]` for the same object. If this property is set, all names that are not domain-specific are ignored. |
+| fallback | Consider as main tag only when no other main tag was found. Fallback always implies `named`, i.e. fallbacks are only tried for objects with primary names. |
+| delete | Completely ignore the tag in any further processing |
+| extra | Move the tag to extratags and then ignore it for further processing |
+| `<function>`| Advanced handling, see [below](#advanced-main-tag-handling) |
+
+Each key in the table parameter defines an OSM tag key. The value may
+be directly a classification as described above. Then the tag will
+be considered a main tag for any possible value that is not further defined.
+To further restrict which values are acceptable, give a table with the
+permitted values and their kind of main tag. If the table contains a simple
+value without key, then this is used as default for values that are not listed.
+
+`set_main_tags()` will completely replace the current main tag configuration
+with the new configuration. `modify_main_tags()` will merge the new
+configuration with the existing one. Otherwise, the two functions do exactly
+the same.
+
+!!! example
+ ``` lua
+ local flex = require('import-full')
+
+ flex.set_main_tags{
+ boundary = {administrative = 'named'},
+ highway = {'always', street_lamp = 'named', no = 'delete'},
+ landuse = 'fallback'
+ }
+ ```
+
+ In this example an object with a `boundary` tag will only be included
+ when it has a value of `administrative`. Objects with `highway` tags are
+ always included with two exceptions: the troll tag `highway=no` is
+ deleted on the spot. And when the value is `street_lamp` then the object
+ must have a name, too. Finally, if a `landuse` tag is present then
+ it will be used independently of the concrete value when neither boundary
+ nor highway tags were found and the object is named.
-A single rule looks like this:
+##### Presets
-```json
-{
- "keys" : ["key1", "key2", ...],
- "values" : {
- "value1" : "prop",
- "value2" : "prop1,prop2"
- }
-}
-```
+| Name | Description |
+| :----- | :---------- |
+| admin | Basic tag set collecting places and administrative boundaries. This set is needed also to ensure proper address computation and should therefore always be present. You can disable selected place types like `place=locality` after adding this set, if they are not relevant for your use case. |
+| all_boundaries | Extends the set of recognized boundaries and places to all available ones. |
+| natural | Tags for natural features like rivers and mountain peaks. |
+| street/default | Tags for streets. Major streets are always included, minor ones only when they have a name. |
+| street/car | Tags for all streets that can be used by a motor vehicle. |
+| street/all | Includes all highway features named and unnamed. |
+| poi/delete | Adds most POI features with and without name. Some frequent but very domain-specific values are excluded by deleting them. |
+| poi/extra | Like 'poi/delete' but excluded values are moved to extratags. |
+
+
+##### Advanced main tag handling
+
+The groups described above are in fact only a preset for a filtering function
+that is used to make the final decision how a pre-selected main tag is entered
+into Nominatim's internal table. To further customize handling you may also
+supply your own filtering function.
+
+The function takes up to three parameters: a Place object of the object
+being processed, the key of the main tag and the value of the main tag.
+The function may return one of three values:
+
+* `nil` or `false` causes the entry to be ignored
+* the Place object causes the place to be added as is
+* `Place.copy(names=..., address=..., extratags=...) causes the
+ place to be enter into the database but with name/address/extratags
+ set to the given different values.
+
+The Place object has some read-only values that can be used to determine
+the handling:
+
+* **object** is the original OSM object data handed in by osm2pgsql
+* **admin_level** is the content of the admin_level tag, parsed into an integer and normalized to a value between 0 and 15
+* **has_name** is a boolean indicating if the object has a primary name tag
+* **names** is a table with the collected list of name tags
+* **address** is a table with the collected list of address tags
+* **extratags** is a table with the collected list of additional tags to save
+
+!!! example
+ ``` lua
+ local flex = require('flex-base')
+
+ flex.add_topic('street')
+
+ local function no_sidewalks(place, k, v)
+ if place.object.tags.footway == 'sidewalk' then
+ return false
+ end
+
+ -- default behaviour is to have all footways
+ return place
+ end
+
+ flex.modify_main_tags(highway = {'footway' = no_sidewalks}
+ ```
+ This script adds a custom handler for `highway=footway`. It only includes
+ them in the database, when the object doesn't have a tag `footway=sidewalk`
+ indicating that it is just part of a larger street which should already
+ be indexed. Note that it is not necessary to check the key and value
+ of the main tag because the function is only used for the specific
+ main tag.
+
+
+### Ignored tags
+
+The function `ignore_keys()` sets the `delete` classification for keys.
+This function takes a _key match list_ so that it is possible to exclude
+groups of keys.
+
+Note that full matches always take precedence over suffix matches, which
+in turn take precedence over prefix matches.
+
+!!! example
+ ``` lua
+ local flex = require('flex-base')
+
+ flex.add_topic('admin')
+ flex.ignore_keys{'old_name', 'old_name:*'}
+ ```
+
+ This example uses the `admin` preset with the exception that names
+ that are no longer are in current use, are ignored.
+
+##### Presets
+
+| Name | Description |
+| :----- | :---------- |
+| metatags | Tags with meta information about the OSM tag like source, notes and import sources. |
+| name | Non-names that actually describe properties or name parts. These names can throw off search and should always be removed. |
+| address | Extra `addr:*` tags that are not useful for Nominatim. |
+
+
+### Tags for `extratags`
-A rule first defines a list of keys to apply the rule to. This is always a list
-of strings. The string may have four forms. An empty string matches against
-any key. A string that ends in an asterisk `*` is a prefix match and accordingly
-matches against any key that starts with the given string (minus the `*`). A
-suffix match can be defined similarly with a string that starts with a `*`. Any
-other string constitutes an exact match.
+The function `add_for_extratags()` sets the `extra` classification for keys.
+This function takes a
+_key match list_ so that it is possible to move groups of keys to extratags.
-The second part of the rules defines a list of values and the properties that
-apply to a successful match. Value strings may be either empty, which
-means that they match any value, or describe an exact match. Prefix
-or suffix matching of values is not possible.
+Note that full matches always take precedence over suffix matches, which
+in turn take precedence over prefix matches.
-For a rule to match, it has to find a valid combination of keys and values. The
-resulting property is that of the matched values.
+!!! example
+ ``` lua
+ local flex = require('flex-base')
-The rules in a configuration file are processed sequentially and the first
-match for each tag wins.
+ flex.add_topic('street')
+ flex.add_for_extratags{'surface', 'access', 'vehicle', 'maxspeed'}
+ ```
-A rule where key and value are the empty string is special. This defines the
-fallback when none of the rules match. The fallback is always used as a last
-resort when nothing else matches, no matter where the rule appears in the file.
-Defining multiple fallback rules is not allowed. What happens in this case,
-is undefined.
+ This example uses the `street` preset but adds a couple of tags that
+ are of interest about the condition of the street.
-### Tag Properties
+##### Presets
-One or more of the following properties may be given for each tag:
+| Name | Description |
+| :----- | :---------- |
+| required | Tags that Nominatim will use for various computations when present in extratags. Always include these. |
-* `main`
+In addition, all [presets from ignored tags](#presets_1) are accepted.
- A principal tag. A new row will be added for the object with key and value
- as `class` and `type`.
+### General pre-filtering
-* `with_name`
+_(deprecated)_ `set_prefilters()` allows to set the `delete` and `extra`
+classification for main tags.
- When the tag is a principal tag (`main` property set): only really add a new
- row, if there is any name tag found (a reference tag is not sufficient, see
- below).
+This function removes all previously set main tags with `delete` and `extra`
+classification and then adds the newly defined tags.
-* `with_name_key`
+`set_prefilters()` takes a table with four optional fields:
- When the tag is a principal tag (`main` property set): only really add a new
- row, if there is also a name tag that matches the key of the principal tag.
- For example, if the main tag is `bridge=yes`, then it will only be added as
- an extra row, if there is a tag `bridge:name[:XXX]` for the same object.
- If this property is set, all other names that are not domain-specific are
- ignored.
+* __delete_keys__ is a _key match list_ for tags that should be deleted
+* __delete_tags__ contains a table of tag keys pointing to a list of tag
+ values. Tags with matching key/value pairs are deleted.
+* __extra_keys__ is a _key match list_ for tags which should be saved into
+ extratags
+* __extra_tags__ contains a table of tag keys pointing to a list of tag
+ values. Tags with matching key/value pairs are moved to extratags.
-* `fallback`
+!!! danger "Deprecation warning"
+ Use of this function should be replaced with `modify_main_tags()` to
+ set the data from `delete_tags` and `extra_tags`, with `ignore_keys()`
+ for the `delete_keys` parameter and with `add_for_extratags()` for the
+ `extra_keys` parameter.
- When the tag is a principal tag (`main` property set): only really add a new
- row, when no other principal tags for this object have been found. Only one
- fallback tag can win for an object.
+### Name tags
-* `operator`
+`set/modify_name_tags()` allow to define the tags used for naming places. Name tags
+can only be selected by their keys. The import script distinguishes
+between primary and auxiliary names. A primary name is the given name of
+a place. Having a primary name makes a place _named_. This is important
+for main tags that are only included when a name is present. Auxiliary names
+are identifiers like references. They may be searched for but should not
+be included on their own.
- When the tag is a principal tag (`main` property set): also include the
- `operator` tag in the list of names. This is a special construct for an
- out-dated tagging practise in OSM. Fuel stations and chain restaurants
- in particular used to have the name of the chain tagged as `operator`.
- These days the chain can be more commonly found in the `brand` tag but
- there is still enough old data around to warrant this special case.
+The functions take a table with two optional fields `main` and `extra`.
+They take _key match lists_ for primary and auxiliary names respectively.
+A third field `house` can contain tags for names that appear in place of
+house numbers in addresses. This field can only contain complete key names.
+'house tags' are special in that they cause the OSM object to be added to
+the database independently of the presence of other main tags.
-* `name`
+`set_name_tags()` overwrites the current configuration, while
+`modify_name_tags()` replaces the fields that are given. (Be aware that
+the fields are replaced as a whole. `main = {'foo_name'}` will cause
+`foo_name` to become the only recognized primary name. Any previously
+defined primary names are forgotten.)
- Add tag to the list of names.
+!!! example
+ ``` lua
+ local flex = require('flex-base')
-* `ref`
+ flex.set_main_tags{highway = {traffic_light = 'named'}}
+ flex.set_name_tags{main = {'name', 'name:*'},
+ extra = {'ref'}
+ }
+ ```
- Add tag to the list of names as a reference. At the moment this only means
- that the object is not considered to be named for `with_name`.
+ This example creates a search index over traffic lights but will
+ only include those that have a common name and not those which just
+ have some reference ID from the city.
-* `address`
+##### Presets
- Add tag to the list of address tags. If the tag starts with `addr:` or
- `is_in:`, then this prefix is cut off before adding it to the list.
+| Name | Description |
+| :----- | :---------- |
+| core | Basic set of recognized names for all places. |
+| address | Additional names useful when indexing full addresses. |
+| poi | Extended set of recognized names for pois. Use on top of the core set. |
-* `postcode`
+### Address tags
- Add the value as a postcode to the address tags. If multiple tags are
- candidate for postcodes, one wins out and the others are dropped.
+`set/modify_address_tags()` defines the tags that will be used to build
+up the address of an object. Address tags can only be chosen by their key.
+
+The functions take a table with arbitrary fields, each defining
+a key list or _key match list_. Some fields have a special meaning:
+
+| Field | Type | Description |
+| :---------| :-------- | :-----------|
+| main | key list | Tags that make a full address object out of the OSM object. This is usually the house number or variants thereof. If a main address tag appears, then the object will always be included, if necessary with a fallback of `place=house`. If the key has a prefix of `addr:` or `is_in:` this will be stripped. |
+| extra | key match list | Supplementary tags for addresses, tags like `addr:street`, `addr:city` etc. If the key has a prefix of `addr:` or `is_in:` this will be stripped. |
+| interpolation | key list | Tags that identify address interpolation lines. |
+| country | key match list | Tags that may contain the country the place is in. The first found value with a two-letter code will be accepted, all other values are discarded. |
+| _other_ | key match list | Summary field. If a key matches the key match list, then its value will be added to the address tags with the name of the field as key. If multiple tags match, then an arbitrary one wins. |
+
+`set_address_tags()` overwrites the current configuration, while
+`modify_address_tags()` replaces the fields that are given. (Be aware that
+the fields are replaced as a whole.)
+
+!!! example
+ ``` lua
+ local flex = require('import-full')
+
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode'},
+ country = {'country_code', 'ISO3166-1'}
+ }
+ ```
+
+ In this example all tags which begin with `addr:` will be saved in
+ the address tag list. If one of the tags is `addr:housenumber`, the
+ object will fall back to be entered as a `place=house` in the database
+ unless there is another interested main tag to be found.
+
+ Tags with keys `country_code` and `ISO3166-1` are saved with their
+ value under `country` in the address tag list. The same thing happens
+ to postcodes, they will always be saved under the key `postcode` thus
+ normalizing the multitude of keys that are used in the OSM database.
+
+##### Presets
+
+| Name | Description |
+| :----- | :---------- |
+| core | Basic set of tags needed to recognize address relationship for any place. Always include this. |
+| houses | Additional set of tags needed to recognize proper addresses |
+
+### Handling of unclassified tags
+
+`set_unused_handling()` defines what to do with tags that remain after all tags
+have been classified using the functions above. There are two ways in
+which the function can be used:
+
+`set_unused_handling(delete_keys = ..., delete_tags = ...)` deletes all
+keys that match the descriptions in the parameters and moves all remaining
+tags into the extratags list.
+
+`set_unused_handling(extra_keys = ..., extra_tags = ...)` moves all tags
+matching the parameters into the extratags list and then deletes the remaining
+tags. For the format of the parameters see the description in `set_prefilters()`
+above.
+
+When no special handling is set, then unused tags will be discarded with one
+exception: place tags are kept in extratags for administrative boundaries.
+When using a custom setting, you should also make sure that the place tag
+is added for extratags.
+
+!!! example
+ ``` lua
+ local flex = require('import-full')
+
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*', 'tiger:county'}
+ }
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ ```
+
+ In this example all remaining tags except those beginning with `tiger:`
+ are moved to the extratags list. Note that it is not possible to
+ already delete the tiger tags with `set_prefilters()` because that
+ would remove tiger:county before the address tags are processed.
-* `country`
+## Customizing osm2pgsql callbacks
- Add the value as a country code to the address tags. The value must be a
- two letter country code, otherwise it is ignored. If there are multiple
- tags that match, then one wins out and the others are dropped.
+osm2pgsql expects the flex style to implement three callbacks, one process
+function per OSM type. If you want to implement special handling for
+certain OSM types, you can override the default implementations provided
+by the flex-base module.
-* `house`
+### Enabling additional relation types
- If no principle tags can be found for the object, still add the object with
- `class`=`place` and `type`=`house`. Use this for address nodes that have no
- other function.
+OSM relations can represent very diverse
+[types of real-world objects](https://wiki.openstreetmap.org/wiki/Key:type). To
+be able to process them correctly, Nominatim needs to understand how to
+create a geometry for each type. By default, the script knows how to
+process relations of type `multipolygon`, `boundary` and `waterway`. All
+other relation types are ignored.
-* `interpolation`
+To add other types relations, set `RELATION_TYPES` for
+the type to the kind of geometry that should be created. The following
+kinds of geometries can be used:
- Add this object as an address interpolation (appears as `class`=`place` and
- `type`=`houses` in the database).
+* __relation_as_multipolygon__ creates a (Multi)Polygon from the ways in
+ the relation. If the ways do not form a valid area, then the object is
+ silently discarded.
+* __relation_as_multiline__ creates a (Multi)LineString from the ways in
+ the relations. Ways are combined as much as possible without any regards
+ to their order in the relation.
-* `extra`
+!!! Example
+ ``` lua
+ local flex = require('import-full')
- Add tag to the list of extra tags.
+ flex.RELATION_TYPES['site'] = flex.relation_as_multipolygon
+ ```
-* `skip`
+ With this line relations of `type=site` will be included in the index
+ according to main tags found. This only works when the site relation
+ resolves to a valid area. Nodes in the site relation are not part of the
+ geometry.
- Skip the tag completely. Useful when a custom default fallback is defined
- or to define exceptions to rules.
-A rule can define as many of these properties for one match as it likes. For
-example, if the property is `"main,extra"` then the tag will open a new row
-but also have the tag appear in the list of extra tags.
+### Adding additional logic to processing functions
-### Changing the Style of Existing Databases
+The default processing functions are also exported by the flex-base module
+as `process_node`, `process_way` and `process_relation`. These can be used
+to implement your own processing functions with some additional processing
+logic.
+
+!!! Example
+ ``` lua
+ local flex = require('import-full')
+
+ function osm2pgsql.process_relation(object)
+ if object.tags.boundary ~= 'administrative' or object.tags.admin_level ~= '2' then
+ flex.process_relation(object)
+ end
+ end
+ ```
+
+ This example discards all country-level boundaries and uses standard
+ handling for everything else. This can be useful if you want to use
+ your own custom country boundaries.
+
+
+### Customizing the main processing function
+
+!!! danger "Deprecation Warning"
+ The style used to allow overwriting the internal processing function
+ `process_tags()`. While this is currently still possible, it is no longer
+ encouraged and may stop working in future versions. The internal
+ `Place` class should now be considered read-only.
+
+
+## Using osm2pgsql-themepark
+
+The Nominatim osm2pgsql style is designed so that it can also be used as
+a theme for [osm2pgsql-themepark](https://osm2pgsql.org/themepark/). This
+makes it easy to combine Nominatim with other projects like
+[openstreetmap-carto](https://github.com/gravitystorm/openstreetmap-carto)
+in the same database.
+
+To set up one of the preset styles, simply include a topic with the same name:
+
+```
+local themepark = require('themepark')
+themepark:add_topic('nominatim/address')
+```
-There is normally no issue changing the style of a database that is already
+Themepark topics offer two configuration options:
+
+* **street_theme** allows to choose one of the sub topics for streets:
+ * _default_ - include all major streets and named minor paths
+ * _car_ - include all streets physically usable by cars
+ * _all_ - include all major streets and minor paths
+* **with_extratags**, when set to a truthy value, then tags that are
+ not specifically used for address or naming are added to the
+ extratags column
+
+The customization functions described in the
+[Changing recognized tags](#changing-the-recognized-tags) section
+are available from the theme. To access the theme you need to explicitly initialize it.
+
+!!! Example
+ ``` lua
+ local themepark = require('themepark')
+
+ themepark:add_topic('nominatim/full', {with_extratags = true})
+
+ local flex = themepark:init_theme('nominatim')
+
+ flex.modify_main_tags{'amenity' = {
+ 'waste_basket' = 'delete'}
+ }
+ ```
+ This example uses the full Nominatim configuration but disables
+ importing waste baskets.
+
+You may also write a new configuration from scratch. Simply omit including
+a Nominatim topic and only call the required customization functions.
+
+Customizing the osm2pgsql processing functions as explained
+[above](#adding-additional-logic-to-processing-functions) is not possible
+when running under themepark. Instead include other topics that make the
+necessary modifications or add an additional processor before including
+the Nominatim topic.
+
+!!! Example
+ ``` lua
+ local themepark = require('themepark')
+
+ local function discard_country_boundaries(object)
+ if object.tags.boundary == 'administrative' and object.tags.admin_level == '2' then
+ return 'stop'
+ end
+ end
+
+ themepark:add_proc('relation', discard_country_boundaries)
+ -- Order matters here. The topic needs to be added after the custom callback.
+ themepark:add_topic('nominatim/full', {with_extratags = true})
+ ```
+ Discarding country-level boundaries when running under themepark.
+
+## osm2pgsql gazetteer output
+
+Nominatim still allows you to configure the gazetteer output to remain
+backwards compatible with older imports. It will be automatically used
+when the style file name ends in `.style`. For documentation of the
+old import style, please refer to the documentation of older releases
+of Nominatim. Do not use the gazetteer output for new imports. There is no
+guarantee that new versions of Nominatim are fully compatible with the
+gazetteer output.
+
+## Changing the style of existing databases
+
+There is usually no issue changing the style of a database that is already
imported and now kept up-to-date with change files. Just be aware that any
change in the style applies to updates only. If you want to change the data
that is already in the database, then a reimport is necessary.
The main value for importance is derived from page ranking values for Wikipedia
pages for a place. For places that do not have their own
Wikipedia page, a formula is used that derives a static importance from the
-places [search rank](../customize/Ranking#search-rank).
+place's [search rank](../customize/Ranking.md#search-rank).
In a second step, a secondary importance value is added which is meant to
represent how well-known the general area is where the place is located. It
nominatim.org has preprocessed importance tables for the
[primary Wikipedia rankings](https://nominatim.org/data/wikimedia-importance.sql.gz)
-and for a secondary importance based on the number of tile views on openstreetmap.org.
+and for [secondary importance](https://nominatim.org/data/wikimedia-secondary-importance.sql.gz)
+based on Wikipedia importance of the administrative areas.
+
+The source code for creating these files is available in the Github projects
+[osm-search/wikipedia-wikidata](https://github.com/osm-search/wikipedia-wikidata)
+and
+[osm-search/secondary-importance](https://github.com/osm-search/secondary-importance).
### Customizing secondary importance
CREATE INDEX ON secondary_importance USING gist(ST_ConvexHull(gist))
```
-The following raster2pgsql command will create a table that conforms to
-the requirements:
+The following raster2pgsql command will create a table from a tiff file
+that conforms to the requirements:
```
raster2pgsql -I -C -Y -d -t 128x128 input.tiff public.secondary_importance
can be set in your local `.env` configuration
* [Import styles](Import-Styles.md) explains how to write your own import style
in order to control what kind of OSM data will be imported
+* [API Result Formatting](Result-Formatting.md) shows how to change the
+ output of the Nominatim API
* [Place ranking](Ranking.md) describes the configuration around classifing
places in terms of their importance and their role in an address
* [Tokenizers](Tokenizers.md) describes the configuration of the module
--- /dev/null
+# Changing the Appearance of Results in the Server API
+
+The Nominatim Server API offers a number of formatting options that
+present search results in [different output formats](../api/Output.md).
+These results only contain a subset of all the information that Nominatim
+has about the result. This page explains how to adapt the result output
+or add additional result formatting.
+
+## Defining custom result formatting
+
+To change the result output, you need to place a file `api/v1/format.py`
+into your project directory. This file needs to define a single variable
+`dispatch` containing a [FormatDispatcher](#formatdispatcher). This class
+serves to collect the functions for formatting the different result types
+and offers helper functions to apply the formatters.
+
+There are two ways to define the `dispatch` variable. If you want to reuse
+the default output formatting and just make some changes or add an additional
+format type, then import the dispatch object from the default API:
+
+``` python
+from nominatim_api.v1.format import dispatch as dispatch
+```
+
+If you prefer to define a completely new result output, then you can
+create an empty dispatcher object:
+
+``` python
+from nominatim_api import FormatDispatcher
+
+dispatch = FormatDispatcher()
+```
+
+## The formatting function
+
+The dispatcher organises the formatting functions by format and result type.
+The format corresponds to the `format` parameter of the API. It can contain
+one of the predefined format names or you can invent your own new format.
+
+API calls return data classes or an array of a data class which represent
+the result. You need to make sure there are formatters defined for the
+following result types:
+
+* StatusResult (single object, returned by `/status`)
+* DetailedResult (single object, returned by `/details`)
+* SearchResults (list of objects, returned by `/search`)
+* ReverseResults (list of objects, returned by `/reverse` and `/lookup`)
+* RawDataList (simple object, returned by `/deletable` and `/polygons`)
+
+A formatter function has the following signature:
+
+``` python
+def format_func(result: ResultType, options: Mapping[str, Any]) -> str
+```
+
+The options dictionary contains additional information about the original
+query. See the [reference below](#options-for-different-result-types)
+about the possible options.
+
+To set the result formatter for a certain result type and format, you need
+to write the format function and decorate it with the
+[`format_func`](#nominatim_api.FormatDispatcher.format_func)
+decorator.
+
+For example, let us extend the result for the status call in text format
+and add the server URL. Such a formatter would look like this:
+
+``` python
+from nominatim_api import StatusResult
+
+@dispatch.format_func(StatusResult, 'text')
+def _format_status_text(result, _):
+ header = 'Status for server nominatim.openstreetmap.org'
+ if result.status:
+ return f"{header}\n\nERROR: {result.message}"
+
+ return f"{header}\n\nOK"
+```
+
+If your dispatcher is derived from the default one, then this definition
+will overwrite the original formatter function. This way it is possible
+to customize the output of selected results.
+
+## Adding new formats
+
+You may also define a completely different output format. This is as simple
+as adding formatting functions for all result types using the custom
+format name:
+
+``` python
+from nominatim_api import StatusResult
+
+@dispatch.format_func(StatusResult, 'chatty')
+def _format_status_text(result, _):
+ if result.status:
+ return f"The server is currently not running. {result.message}"
+
+ return "Good news! The server is running just fine."
+```
+
+That's all. Nominatim will automatically pick up the new format name and
+will allow the user to use it. There is no need to implement formatter
+functions for all the result types, when you invent a new one. The
+available formats will be determined for each API endpoint separately.
+To find out which formats are available, you can use the `--list-formats`
+option of the CLI tool:
+
+```
+me@machine:planet-project$ nominatim status --list-formats
+2024-08-16 19:54:00: Using project directory: /home/nominatim/planet-project
+text
+json
+chatty
+debug
+me@machine:planet-project$
+```
+
+The `debug` format listed in the last line will always appear. It is a
+special format that enables debug output via the command line (the same
+as the `debug=1` parameter enables for the server API). To not clash
+with this built-in function, you shouldn't name your own format 'debug'.
+
+### Content type of new formats
+
+All responses will be returned with the content type application/json by
+default. If your format produces a different content type, you need
+to configure the content type with the `set_content_type()` function.
+
+For example, the 'chatty' format above returns just simple text. So the
+content type should be set up as:
+
+``` python
+from nominatim_api.server.content_types import CONTENT_TEXT
+
+dispatch.set_content_type('chatty', CONTENT_TEXT)
+```
+
+The `content_types` module used above provides constants for the most
+frequent content types. You set the content type to an arbitrary string,
+if the content type you need is not available.
+
+## Formatting error messages
+
+Any exception thrown during processing of a request is given to
+a special error formatting function. It takes the requested content type,
+the status code and the error message. It should return the error message
+in a form appropriate for the given content type.
+
+You can overwrite the default formatting function with the decorator
+`error_format_func`:
+
+``` python
+import nominatim_api.server.content_types as ct
+
+@dispatch.error_format_func
+def _format_error(content_type: str, msg: str, status: int) -> str:
+ if content_type == ct.CONTENT_XML:
+ return f"""<?xml version="1.0" encoding="UTF-8" ?>
+ <message>{msg}</message>
+ """
+ if content_type == ct.CONTENT_JSON:
+ return f'"{msg}"'
+
+ return f"ERROR: {msg}"
+```
+
+
+## Debugging custom formatters
+
+The easiest way to try out your custom formatter is by using the Nominatim
+CLI commands. Custom formats can be chosen with the `--format` parameter:
+
+```
+me@machine:planet-project$ nominatim status --format chatty
+2024-08-16 19:54:00: Using project directory: /home/nominatim/planet-project
+Good news! The server is running just fine.
+me@machine:planet-project$
+```
+
+They will also emit full error messages when there is a problem with the
+code you need to debug.
+
+!!! danger
+ In some cases, when you make an error with your import statement, the
+ CLI will not give you an error but instead tell you, that the API
+ commands are no longer available:
+
+ me@machine: nominatim status
+ usage: nominatim [-h] [--version] {import,freeze,replication,special-phrases,add-data,index,refresh,admin} ...
+ nominatim: error: argument subcommand: invalid choice: 'status'
+
+ This happens because the CLI tool is meant to still work when the
+ nominatim-api package is not installed. Import errors involving
+ `nominatim_api` are interpreted as "package not installed".
+
+ Use the help command to find out which is the offending import that
+ could not be found:
+
+ me@machine: nominatim -h
+ ... [other help text] ...
+ Nominatim API package not found (was looking for module: nominatim_api.xxx).
+
+## Reference
+
+### FormatDispatcher
+
+::: nominatim_api.FormatDispatcher
+ options:
+ heading_level: 6
+ group_by_category: False
+
+### JsonWriter
+
+::: nominatim_api.utils.json_writer.JsonWriter
+ options:
+ heading_level: 6
+ group_by_category: False
+
+### Options for different result types
+
+This section lists the options that may be handed in with the different result
+types in the v1 version of the Nominatim API.
+
+#### StatusResult
+
+_None._
+
+#### DetailedResult
+
+| Option | Description |
+|-----------------|-------------|
+| locales | [Locale](../library/Result-Handling.md#locale) object for the requested language(s) |
+| group_hierarchy | Setting of [group_hierarchy](../api/Details.md#output-details) parameter |
+| icon_base_url | (optional) URL pointing to icons as set in [NOMINATIM_MAPICON_URL](Settings.md#nominatim_mapicon_url) |
+
+#### SearchResults
+
+| Option | Description |
+|-----------------|-------------|
+| query | Original query string |
+| more_url | URL for requesting additional results for the same query |
+| exclude_place_ids | List of place IDs already returned |
+| viewbox | Setting of [viewbox](../api/Search.md#result-restriction) parameter |
+| extratags | Setting of [extratags](../api/Search.md#output-details) parameter |
+| namedetails | Setting of [namedetails](../api/Search.md#output-details) parameter |
+| addressdetails | Setting of [addressdetails](../api/Search.md#output-details) parameter |
+
+#### ReverseResults
+
+| Option | Description |
+|-----------------|-------------|
+| query | Original query string |
+| extratags | Setting of [extratags](../api/Search.md#output-details) parameter |
+| namedetails | Setting of [namedetails](../api/Search.md#output-details) parameter |
+| addressdetails | Setting of [addressdetails](../api/Search.md#output-details) parameter |
+
+#### RawDataList
+
+_None._
--- /dev/null
+A Nominatim database can be converted into an SQLite database and used as
+a read-only source for geocoding queries. This sections describes how to
+create and use an SQLite database.
+
+!!! danger
+ This feature is in an experimental state at the moment. Use at your own
+ risk.
+
+## Installing prerequisites
+
+To use a SQLite database, you need to install:
+
+* SQLite (>= 3.30)
+* Spatialite (> 5.0.0)
+* aiosqlite
+
+On Ubuntu/Debian, you can run:
+
+ sudo apt install sqlite3 libsqlite3-mod-spatialite libspatialite7
+
+Install the aiosqlite Python package in your virtual environment:
+
+ /srv/nominatim-venv/bin/pip install aiosqlite
+
+## Creating a new SQLite database
+
+Nominatim cannot import directly into SQLite database. Instead you have to
+first create a geocoding database in PostgreSQL by running a
+[regular Nominatim import](../admin/Import.md).
+
+Once this is done, the database can be converted to SQLite with
+
+ nominatim convert -o mydb.sqlite
+
+This will create a database where all geocoding functions are available.
+Depending on what functions you need, the database can be made smaller:
+
+* `--without-reverse` omits indexes only needed for reverse geocoding
+* `--without-search` omit tables and indexes used for forward search
+* `--without-details` leaves out extra information only available in the
+ details API
+
+## Using an SQLite database
+
+Once you have created the database, you can use it by simply pointing the
+database DSN to the SQLite file:
+
+ NOMINATIM_DATABASE_DSN=sqlite:dbname=mydb.sqlite
+
+Please note that SQLite support is only available for the Python frontend. To
+use the test server with an SQLite database, you therefore need to switch
+the frontend engine:
+
+ nominatim serve --engine falcon
+
+You need to install falcon or starlette for this, depending on which engine
+you choose.
+
+The CLI query commands and the library interface already use the new Python
+frontend and therefore work right out of the box.
| **After Changes:** | cannot be changed after import |
Defines the name of the database user that will run search queries. Usually
-this is the user under which the webserver is executed. When running Nominatim
-via php-fpm, you can also define a separate query user. The Postgres user
+this is the user under which the webserver is executed. The Postgres user
needs to be set up before starting the import.
Nominatim grants minimal rights to this user to all tables that are needed
for running geocoding queries.
-#### NOMINATIM_DATABASE_MODULE_PATH
-
-| Summary | |
-| -------------- | --------------------------------------------------- |
-| **Description:** | Directory where to find the PostgreSQL server module |
-| **Format:** | path |
-| **Default:** | _empty_ (use `<project_directory>/module`) |
-| **After Changes:** | run `nominatim refresh --functions` |
-| **Comment:** | Legacy tokenizer only |
-
-Defines the directory in which the PostgreSQL server module `nominatim.so`
-is stored. The directory and module must be accessible by the PostgreSQL
-server.
-
-For information on how to use this setting when working with external databases,
-see [Advanced Installations](../admin/Advanced-Installations.md).
-
-The option is only used by the Legacy tokenizer and ignored otherwise.
-
-
#### NOMINATIM_TOKENIZER
| Summary | |
| -------------- | --------------------------------------------------- |
| **Description:** | Tokenizer used for normalizing and parsing queries and names |
| **Format:** | string |
-| **Default:** | legacy |
+| **Default:** | icu |
| **After Changes:** | cannot be changed after import |
Sets the tokenizer type to use for the import. For more information on
If a relative path is given, then the file is searched first relative to the
project directory and then in the global settings directory.
-#### NOMINATIM_MAX_WORD_FREQUENCY
-
-| Summary | |
-| -------------- | --------------------------------------------------- |
-| **Description:** | Number of occurrences before a word is considered frequent |
-| **Format:** | int |
-| **Default:** | 50000 |
-| **After Changes:** | cannot be changed after import |
-| **Comment:** | Legacy tokenizer only |
-
-The word frequency count is used by the Legacy tokenizer to automatically
-identify _stop words_. Any partial term that occurs more often then what
-is defined in this setting, is effectively ignored during search.
-
#### NOMINATIM_LIMIT_REINDEXING
Currently only affects the initial import of country names and special phrases.
-#### NOMINATIM_TERM_NORMALIZATION
-
-| Summary | |
-| -------------- | --------------------------------------------------- |
-| **Description:** | Rules for normalizing terms for comparisons |
-| **Format:** | string: semicolon-separated list of ICU rules |
-| **Default:** | :: NFD (); [[:Nonspacing Mark:] [:Cf:]] >; :: lower (); [[:Punctuation:][:Space:]]+ > ' '; :: NFC (); |
-| **Comment:** | Legacy tokenizer only |
-
-[Special phrases](Special-Phrases.md) have stricter matching requirements than
-normal search terms. They must appear exactly in the query after this term
-normalization has been applied.
-
-Only has an effect on the Legacy tokenizer. For the ICU tokenizer the rules
-defined in the
-[normalization section](Tokenizers.md#normalization-and-transliteration)
-will be used.
-
-
#### NOMINATIM_USE_US_TIGER_DATA
| Summary | |
NOMINATIM_TABLESPACE_OSM_DATA
: Raw OSM data cache used for import and updates.
-NOMINATIM_TABLESPACE_OSM_DATA
+NOMINATIM_TABLESPACE_OSM_INDEX
: Indexes on the raw OSM data cache.
NOMINATIM_TABLESPACE_PLACE_DATA
used.
-#### NOMINATIM_SEARCH_BATCH_MODE
-
-| Summary | |
-| -------------- | --------------------------------------------------- |
-| **Description:** | Enable a special batch query mode |
-| **Format:** | boolean |
-| **Default:** | no |
-| **After Changes:** | run `nominatim refresh --website` |
-
-This feature is currently undocumented and potentially broken.
-
-
-#### NOMINATIM_SEARCH_NAME_ONLY_THRESHOLD
-
-| Summary | |
-| -------------- | --------------------------------------------------- |
-| **Description:** | Threshold for switching the search index lookup strategy |
-| **Format:** | integer |
-| **Default:** | 500 |
-| **After Changes:** | run `nominatim refresh --website` |
-
-This setting defines the threshold over which a name is no longer considered
-as rare. When searching for places with rare names, only the name is used
-for place lookups. Otherwise the name and any address information is used.
-
-This setting only has an effect after `nominatim refresh --word-counts` has
-been called to compute the word frequencies.
-
-
#### NOMINATIM_LOOKUP_MAX_COUNT
| Summary | |
Setting this parameter to 0 disables polygon output completely.
+
+#### NOMINATIM_SEARCH_WITHIN_COUNTRIES
+
+| Summary | |
+| -------------- | --------------------------------------------------- |
+| **Description:** | Disable search for elements that are not in the country grid |
+| **Format:** | boolean |
+| **Default:** | no |
+| **After Changes:** | run `nominatim refresh --website` |
+
+Enable to search elements just within countries.
+
+When enabled, if, despite not finding a point within the static grid of countries, it
+finds a geometry of a region, do not return the geometry.
+Return "Unable to geocode" instead.
+
+
+#### NOMINATIM_SERVE_LEGACY_URLS
+
+| Summary | |
+| -------------- | --------------------------------------------------- |
+| **Description:** | Enable serving via URLs with a .php suffix |
+| **Format:** | boolean |
+| **Default:** | yes |
+| **Comment:** | Python frontend only |
+
+When enabled, then endpoints are reachable as `/<name>` as well as `/<name>.php`.
+This can be useful when you want to be backwards-compatible with previous
+versions of Nominatim.
+
+
+#### NOMINATIM_API_POOL_SIZE
+
+| Summary | |
+| -------------- | --------------------------------------------------- |
+| **Description:** | Number of parallel database connections per worker |
+| **Format:** | number |
+| **Default:** | 10 |
+| **Comment:** | Python frontend only |
+
+Sets the maximum number of database connections available for a single instance
+of Nominatim. When configuring the maximum number of connections that your
+PostgreSQL database can handle, you need at least
+`NOMINATIM_API_POOL_SIZE` * `<number of configured workers>` connections.
+For configuring the number of workers, refer to the section about
+[Deploying the Python frontend](../admin/Deployment-Python.md).
+
+#### NOMINATIM_QUERY_TIMEOUT
+
+| Summary | |
+| -------------- | --------------------------------------------------- |
+| **Description:** | Timeout for SQL queries to the database |
+| **Format:** | number (seconds) |
+| **Default:** | 10 |
+| **Comment:** | Python frontend only |
+
+When this timeout is set, then all SQL queries that run longer than the
+specified numbers of seconds will be cancelled and the user receives a
+timeout exceptions. Users of the API see a 503 HTTP error.
+
+The timeout does ont apply when using the
+[low-level DB access](../library/Low-Level-DB-Access.md)
+of the library. A timeout can be manually set, if required.
+
+
+#### NOMINATIM_REQUEST_TIMEOUT
+
+| Summary | |
+| -------------- | --------------------------------------------------- |
+| **Description:** | Timeout for search queries |
+| **Format:** | number (seconds) |
+| **Default:** | 60 |
+| **Comment:** | Python frontend only |
+
+When this timeout is set, a search query will finish sending queries
+to the database after the timeout has passed and immediately return the
+results gathered so far.
+
+Note that under high load you may observe that users receive different results
+than usual without seeing an error. This may cause some confusion.
+
### Logging Settings
#### NOMINATIM_LOG_DB
<request time> <execution time in s> <number of results> <type> "<query string>"
Request time is the time when the request was started. The execution time is
-given in ms and corresponds to the time the query took executing in PHP.
+given in seconds and includes the entire time the query was queued and executed
+in the frontend.
type contains the name of the endpoint used.
Can be used as the same time as NOMINATIM_LOG_DB.
+
+#### NOMINATIM_DEBUG_SQL
+
+| Summary | |
+| -------------- | --------------------------------------------------- |
+| **Description:** | Enable printing of raw SQL by SQLAlchemy |
+| **Format:** | boolean |
+| **Default:** | no |
+| **Comment:** | **For developers only.** |
+
+This settings enables
+[SQL debugging](https://docs.sqlalchemy.org/en/20/core/engines.html#dbengine-logging)
+by SQLAlchemy. This can be helpful when debugging some bugs with internal
+query handling. It should only be used together with the CLI query functions.
+Enabling it for server mode may have unintended consequences. Use the `debug`
+parameter instead, which prints information on how the search is executed
+including SQL statements.
## Importing custom special phrases
-But, it is also possible to import some phrases from a csv file.
-To do so, you have access to the following command:
+Special phrases may also be imported from any custom CSV file. The file needs
+to have a header line, use comma as delimiter and define the following
+columns:
+
+ * **phrase**: the keyword to look for
+ * **class**: key of the main tag of the place to find
+ (see [Import styles](Import-Styles.md#how-processing-works)
+ * **type**: value of the main tag
+ * **operator**: type of special phrase, may be one of:
+ * *in*: place is within the place defined by the search term (e.g. "_Hotels in_ Berlin")
+ * *near*: place is near the place defined by the search term (e.g. "_bus stops near_ Big Ben")
+ * *named*: special phrase is a classifier (e.g. "_hotel_ California")
+ * *-*: unspecified, can be any of the above
+
+If the file contains any other columns, then they are silently ignored
+
+To import the CSV file, use the following command:
```sh
nominatim special-phrases --import-from-csv <csv file>
```
Note that the two previous import commands will update the phrases from your database.
-This means that if you import some phrases from a csv file, only the phrases
-present in the csv file will be kept into the database. All other phrases will
+This means that if you import some phrases from a CSV file, only the phrases
+present in the CSV file will be kept in the database. All other phrases will
be removed.
If you want to only add new phrases and not update the other ones you can add
nominatim special-phrases --import-from-csv <csv file> --no-replace
```
-This will add the phrases present in the csv file into the database without
+This will add the phrases present in the CSV file into the database without
removing the other ones.
nominatim add-data --tiger-data tiger-nominatim-preprocessed-latest.csv.tar.gz
- 3. Enable use of the Tiger data in your `.env` by adding:
+ 3. Enable use of the Tiger data in your existing `.env` file by adding:
echo NOMINATIM_USE_US_TIGER_DATA=yes >> .env
4. Apply the new settings:
- nominatim refresh --functions
+ nominatim refresh --functions --website
See the [TIGER-data project](https://github.com/osm-search/TIGER-data) for more
to OSM objects and the terms of an incoming query in order to make sure, they
can be matched appropriately.
-Nominatim offers different tokenizer modules, which behave differently and have
-different configuration options. This sections describes the tokenizers and how
-they can be configured.
+Nominatim currently offers only one tokenizer module, the ICU tokenizer. This section
+describes the tokenizer and how it can be configured.
!!! important
- The use of a tokenizer is tied to a database installation. You need to choose
+ The selection of tokenizer is tied to a database installation. You need to choose
and configure the tokenizer before starting the initial import. Once the import
is done, you cannot switch to another tokenizer anymore. Reconfiguring the
chosen tokenizer is very limited as well. See the comments in each tokenizer
section.
-## Legacy tokenizer
-
-The legacy tokenizer implements the analysis algorithms of older Nominatim
-versions. It uses a special Postgresql module to normalize names and queries.
-This tokenizer is automatically installed and used when upgrading an older
-database. It should not be used for new installations anymore.
-
-### Compiling the PostgreSQL module
-
-The tokeinzer needs a special C module for PostgreSQL which is not compiled
-by default. If you need the legacy tokenizer, compile Nominatim as follows:
-
-```
-mkdir build
-cd build
-cmake -DBUILD_MODULE=on
-make
-```
-
-### Enabling the tokenizer
-
-To enable the tokenizer add the following line to your project configuration:
-
-```
-NOMINATIM_TOKENIZER=legacy
-```
-
-The Postgresql module for the tokenizer is available in the `module` directory
-and also installed with the remainder of the software under
-`lib/nominatim/module/nominatim.so`. You can specify a custom location for
-the module with
-
-```
-NOMINATIM_DATABASE_MODULE_PATH=<path to directory where nominatim.so resides>
-```
-
-This is in particular useful when the database runs on a different server.
-See [Advanced installations](../admin/Advanced-Installations.md#importing-nominatim-to-an-external-postgresql-database) for details.
-
-There are no other configuration options for the legacy tokenizer. All
-normalization functions are hard-coded.
-
## ICU tokenizer
The ICU tokenizer uses the [ICU library](http://site.icu-project.org/) to
See the [Token analysis](#token-analysis) section below for more
information.
-During query time, only normalization and transliteration are relevant.
-An incoming query is first split into name chunks (this usually means splitting
-the string at the commas) and the each part is normalised and transliterated.
-The result is used to look up places in the search index.
+During query time, the tokeinzer is responsible for processing incoming
+queries. This happens in two stages:
+
+1. During **query preprocessing** the incoming text is split into name
+ chunks and normalised. This usually means applying the same normalisation
+ as during the import process but may involve other processing like,
+ for example, word break detection.
+2. The **token analysis** step breaks down the query parts into tokens,
+ looks them up in the database and assigns them possible functions and
+ probabilities.
+
+Query processing can be further customized while the rest of the analysis
+is hard-coded.
### Configuration
Here is an example configuration file:
``` yaml
+query-preprocessing:
+ - normalize
normalization:
- ":: lower ()"
- - "ß > 'ss'" # German szet is unimbigiously equal to double ss
+ - "ß > 'ss'" # German szet is unambiguously equal to double ss
transliteration:
- !include /etc/nominatim/icu-rules/extended-unicode-to-asccii.yaml
- ":: Ascii ()"
The configuration file contains four sections:
`normalization`, `transliteration`, `sanitizers` and `token-analysis`.
+#### Query preprocessing
+
+The section for `query-preprocessing` defines an ordered list of functions
+that are applied to the query before the token analysis.
+
+The following is a list of preprocessors that are shipped with Nominatim.
+
+##### normalize
+
+::: nominatim_api.query_preprocessing.normalize
+ options:
+ members: False
+ heading_level: 6
+ docstring_section_style: spacy
+
+
#### Normalization and Transliteration
The normalization and transliteration sections each define a set of
ICU rules that are applied to the names.
-The **normalisation** rules are applied after sanitation. They should remove
+The **normalization** rules are applied after sanitation. They should remove
any information that is not relevant for search at all. Usual rules to be
applied here are: lower-casing, removing of special characters, cleanup of
spaces.
##### split-name-list
-::: nominatim.tokenizer.sanitizers.split_name_list
- selection:
+::: nominatim_db.tokenizer.sanitizers.split_name_list
+ options:
members: False
- rendering:
heading_level: 6
+ docstring_section_style: spacy
##### strip-brace-terms
-::: nominatim.tokenizer.sanitizers.strip_brace_terms
- selection:
+::: nominatim_db.tokenizer.sanitizers.strip_brace_terms
+ options:
members: False
- rendering:
heading_level: 6
+ docstring_section_style: spacy
##### tag-analyzer-by-language
-::: nominatim.tokenizer.sanitizers.tag_analyzer_by_language
- selection:
+::: nominatim_db.tokenizer.sanitizers.tag_analyzer_by_language
+ options:
members: False
- rendering:
heading_level: 6
+ docstring_section_style: spacy
##### clean-housenumbers
-::: nominatim.tokenizer.sanitizers.clean_housenumbers
- selection:
+::: nominatim_db.tokenizer.sanitizers.clean_housenumbers
+ options:
members: False
- rendering:
heading_level: 6
+ docstring_section_style: spacy
##### clean-postcodes
-::: nominatim.tokenizer.sanitizers.clean_postcodes
- selection:
+::: nominatim_db.tokenizer.sanitizers.clean_postcodes
+ options:
+ members: False
+ heading_level: 6
+ docstring_section_style: spacy
+
+##### clean-tiger-tags
+
+::: nominatim_db.tokenizer.sanitizers.clean_tiger_tags
+ options:
+ members: False
+ heading_level: 6
+ docstring_section_style: spacy
+
+#### delete-tags
+
+::: nominatim_db.tokenizer.sanitizers.delete_tags
+ options:
members: False
- rendering:
heading_level: 6
+ docstring_section_style: spacy
+
+#### tag-japanese
+::: nominatim_db.tokenizer.sanitizers.tag_japanese
+ options:
+ members: False
+ heading_level: 6
+ docstring_section_style: spacy
#### Token Analysis
##### Postcode token analyzer
The analyzer `postcodes` is pupose-made to analyze postcodes. It supports
-a 'lookup' varaint of the token, which produces variants with optional
+a 'lookup' variant of the token, which produces variants with optional
spaces. Use together with the clean-postcodes sanitizer.
The analyzer cannot be customized.
and how to run tests.
!!! Important
- This guide assumes that you develop under the latest version of Ubuntu. You
- can of course also use your favourite distribution. You just might have to
- adapt the commands below slightly, in particular the commands for installing
- additional software.
+ This guide assumes you develop under the latest version of Debian/Ubuntu.
+ You can of course also use your favourite distribution. You just might have
+ to adapt the commands below slightly, in particular the commands for
+ installing additional software.
## Installing Nominatim
The first step is to install Nominatim itself. Please follow the installation
instructions in the [Admin section](../admin/Installation.md). You don't need
-to set up a webserver for development, the webserver that is included with PHP
-is sufficient.
+to set up a webserver for development, the webserver that can be started
+via `nominatim serve` is sufficient.
-If you want to run Nominatim in a VM via Vagrant, use the default `ubuntu` setup.
+If you want to run Nominatim in a VM via Vagrant, use the default `ubuntu24` setup.
Vagrant's libvirt provider runs out-of-the-box under Ubuntu. You also need to
install an NFS daemon to enable directory sharing between host and guest. The
following packages should get you started:
## Prerequisites for testing and documentation
The Nominatim test suite consists of behavioural tests (using behave) and
-unit tests (using PHPUnit for PHP code and pytest for Python code).
-It has the following additional requirements:
+unit tests (using pytest). It has the following additional requirements:
* [behave test framework](https://behave.readthedocs.io) >= 1.2.6
-* [phpunit](https://phpunit.de) (9.5 is known to work)
-* [PHP CodeSniffer](https://github.com/squizlabs/PHP_CodeSniffer)
-* [Pylint](https://pylint.org/) (CI always runs the latest version from pip)
+* [flake8](https://flake8.pycqa.org/en/stable/) (CI always runs the latest version from pip)
* [mypy](http://mypy-lang.org/) (plus typing information for external libs)
* [Python Typing Extensions](https://github.com/python/typing_extensions) (for Python < 3.9)
* [pytest](https://pytest.org)
+* [pytest-asyncio](https://pytest-asyncio.readthedocs.io)
+
+For testing the Python search frontend, you need to install extra dependencies
+depending on your choice of webserver framework:
+
+* [httpx](https://www.python-httpx.org/) (Starlette only)
+* [asgi-lifespan](https://github.com/florimondmanca/asgi-lifespan) (Starlette only)
The documentation is built with mkdocs:
* [mkdocs](https://www.mkdocs.org/) >= 1.1.2
-* [mkdocstrings](https://mkdocstrings.github.io/) >= 0.16
-* [mkdocstrings-python-legacy](https://mkdocstrings.github.io/python-legacy/)
+* [mkdocstrings](https://mkdocstrings.github.io/) >= 0.25
+* [mkdocs-material](https://squidfunk.github.io/mkdocs-material/)
+* [mkdocs-gen-files](https://oprypin.github.io/mkdocs-gen-files/)
-### Installing prerequisites on Ubuntu/Debian
+Please be aware that tests always run against the globally installed
+osm2pgsql, so you need to have this set up. If you want to test against
+the vendored version of osm2pgsql, you need to set the PATH accordingly.
-Some of the Python packages require the newest version which is not yet
-available with the current distributions. Therefore it is recommended to
-install pip to get the newest versions.
+### Installing prerequisites on Ubuntu/Debian
-To install all necessary packages run:
+The Python tools should always be run with the most recent version.
+The easiest way, to handle these Python dependencies is to run your
+development from within a virtual environment.
```sh
-sudo apt install php-cgi phpunit php-codesniffer \
- python3-pip python3-setuptools python3-dev
-
-pip3 install --user behave mkdocs mkdocstrings pytest pylint \
- mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil
+sudo apt install libsqlite3-mod-spatialite osm2pgsql \
+ postgresql-postgis postgresql-postgis-scripts \
+ pkg-config libicu-dev virtualenv
```
-The `mkdocs` executable will be located in `.local/bin`. You may have to add
-this directory to your path, for example by running:
+To set up the virtual environment with all necessary packages run:
-```
-echo 'export PATH=~/.local/bin:$PATH' > ~/.profile
+```sh
+virtualenv ~/nominatim-dev-venv
+~/nominatim-dev-venv/bin/pip install\
+ psutil psycopg[binary] PyICU SQLAlchemy \
+ python-dotenv jinja2 pyYAML datrie behave \
+ mkdocs mkdocstrings mkdocs-gen-files pytest pytest-asyncio flake8 \
+ types-jinja2 types-markupsafe types-psutil types-psycopg2 \
+ types-pygments types-pyyaml types-requests types-ujson \
+ types-urllib3 typing-extensions unicorn falcon starlette \
+ uvicorn mypy osmium aiosqlite
```
-If your distribution does not have PHPUnit 7.3+, you can install it (as well
-as CodeSniffer) via composer:
+Now enter the virtual environment whenever you want to develop:
+```sh
+. ~/nominatim-dev-venv/bin/activate
```
-sudo apt-get install composer
-composer global require "squizlabs/php_codesniffer=*"
-composer global require "phpunit/phpunit=8.*"
-```
-The binaries are found in `.config/composer/vendor/bin`. You need to add this
-to your PATH as well:
+### Running Nominatim during development
+
+The source code for Nominatim can be found in the `src` directory and can
+be run in-place. The source directory features a special script
+`nominatim-cli.py` which does the same as the installed 'nominatim' binary
+but executes against the code in the source tree. For example:
```
-echo 'export PATH=~/.config/composer/vendor/bin:$PATH' > ~/.profile
+me@machine:~$ cd Nominatim
+me@machine:~Nominatim$ ./nominatim-cli.py --version
+Nominatim version 4.4.99-1
```
+Make sure you have activated the virtual environment holding all
+necessary dependencies.
## Executing Tests
All tests are located in the `/test` directory.
-To run all tests just go to the build directory and run make:
+To run all tests, run make from the source root:
```sh
-cd build
-make test
+make tests
```
+There are also make targets for executing only parts of the test suite.
+For example to run linting only use:
+
+```sh
+make lint
+```
+
+The possible testing targets are: mypy, lint, pytest, bdd.
+
For more information about the structure of the tests and how to change and
extend the test suite, see the [Testing chapter](Testing.md).
framework. The master branch is automatically deployed every night on
[https://nominatim.org/release-docs/develop/](https://nominatim.org/release-docs/develop/)
-To build the documentation, go to the build directory and run
+To build the documentation run
```
make doc
-INFO - Cleaning site directory
-INFO - Building documentation to directory: /home/vagrant/build/site-html
```
-This runs `mkdocs build` plus extra transformation of some files and adds
-symlinks (see `CMakeLists.txt` for the exact steps).
-Now you can start webserver for local testing
+For local testing, you can start webserver:
```
build> make serve-doc
to your host:
```
-build> PYTHONPATH=$SRCDIR mkdocs serve --dev-addr 0.0.0.0:8088
+build> mkdocs serve --dev-addr 0.0.0.0:8088
[server:296] Serving on http://0.0.0.0:8088
[handlers:62] Start watching changes
```
implemented, it is not guaranteed to be stable at the moment.
-## Using non-standard sanitizers and token analyzers
+## Using non-standard modules
-Sanitizer names (in the `step` property) and token analysis names (in the
-`analyzer`) may refer to externally supplied modules. There are two ways
+Sanitizer names (in the `step` property), token analysis names (in the
+`analyzer`) and query preprocessor names (in the `step` property)
+may refer to externally supplied modules. There are two ways
to include external modules: through a library or from the project directory.
To include a module from a library, use the absolute import path as name and
somewhere in your project directory and then use the relative path to the
file. Include the whole name of the file including the `.py` ending.
+## Custom query preprocessors
+
+A query preprocessor must export a single factory function `create` with
+the following signature:
+
+``` python
+create(self, config: QueryConfig) -> Callable[[list[Phrase]], list[Phrase]]
+```
+
+The function receives the custom configuration for the preprocessor and
+returns a callable (function or class) with the actual preprocessing
+code. When a query comes in, then the callable gets a list of phrases
+and needs to return the transformed list of phrases. The list and phrases
+may be changed in place or a completely new list may be generated.
+
+The `QueryConfig` is a simple dictionary which contains all configuration
+options given in the yaml configuration of the ICU tokenizer. It is up to
+the function to interpret the values.
+
+A `nominatim_api.search.Phrase` describes a part of the query that contains one or more independent
+search terms. Breaking a query into phrases helps reducing the number of
+possible tokens Nominatim has to take into account. However a phrase break
+is definitive: a multi-term search word cannot go over a phrase break.
+A Phrase object has two fields:
+
+ * `ptype` further refines the type of phrase (see list below)
+ * `text` contains the query text for the phrase
+
+The order of phrases matters to Nominatim when doing further processing.
+Thus, while you may split or join phrases, you should not reorder them
+unless you really know what you are doing.
+
+Phrase types (`nominatim_api.search.PhraseType`) can further help narrowing
+down how the tokens in the phrase are interpreted. The following phrase types
+are known:
+
+::: nominatim_api.search.PhraseType
+ options:
+ heading_level: 6
+
+
## Custom sanitizer modules
A sanitizer module must export a single factory function `create` with the
### Sanitizer configuration
-::: nominatim.tokenizer.sanitizers.config.SanitizerConfig
- rendering:
- show_source: no
+::: nominatim_db.tokenizer.sanitizers.config.SanitizerConfig
+ options:
heading_level: 6
### The main filter function of the sanitizer
The filter function receives a single object of type `ProcessInfo`
which has with three members:
- * `place`: read-only information about the place being processed.
+ * `place: PlaceInfo`: read-only information about the place being processed.
See PlaceInfo below.
- * `names`: The current list of names for the place. Each name is a
- PlaceName object.
- * `address`: The current list of address names for the place. Each name
- is a PlaceName object.
+ * `names: List[PlaceName]`: The current list of names for the place.
+ * `address: List[PlaceName]`: The current list of address names for the place.
While the `place` member is provided for information only, the `names` and
`address` lists are meant to be manipulated by the sanitizer. It may add and
#### PlaceInfo - information about the place
-::: nominatim.data.place_info.PlaceInfo
- rendering:
- show_source: no
+::: nominatim_db.data.place_info.PlaceInfo
+ options:
heading_level: 6
#### PlaceName - extended naming information
-::: nominatim.data.place_name.PlaceName
- rendering:
- show_source: no
+::: nominatim_db.data.place_name.PlaceName
+ options:
heading_level: 6
The following sanitizer removes the directional prefixes from street names
in the US:
-``` python
-import re
-
-def _filter_function(obj):
- if obj.place.country_code == 'us' \
- and obj.place.rank_address >= 26 and obj.place.rank_address <= 27:
- for name in obj.names:
- name.name = re.sub(r'^(north|south|west|east) ',
- '',
- name.name,
- flags=re.IGNORECASE)
-
-def create(config):
- return _filter_function
-```
+!!! example
+ ``` python
+ import re
+
+ def _filter_function(obj):
+ if obj.place.country_code == 'us' \
+ and obj.place.rank_address >= 26 and obj.place.rank_address <= 27:
+ for name in obj.names:
+ name.name = re.sub(r'^(north|south|west|east) ',
+ '',
+ name.name,
+ flags=re.IGNORECASE)
+
+ def create(config):
+ return _filter_function
+ ```
This is the most simple form of a sanitizer module. If defines a single
filter function and implements the required `create()` function by returning
!!! warning
This example is just a simplified show case on how to create a sanitizer.
- It is not really read for real-world use: while the sanitizer would
- correcly transform `West 5th Street` into `5th Street`. it would also
+ It is not really meant for real-world use: while the sanitizer would
+ correctly transform `West 5th Street` into `5th Street`. it would also
shorten a simple `North Street` to `Street`.
For more sanitizer examples, have a look at the sanitizers provided by Nominatim.
They can be found in the directory
-[`nominatim/tokenizer/sanitizers`](https://github.com/osm-search/Nominatim/tree/master/nominatim/tokenizer/sanitizers).
+[`src/nominatim_db/tokenizer/sanitizers`](https://github.com/osm-search/Nominatim/tree/master/src/nominatim_db/tokenizer/sanitizers).
## Custom token analysis module
-::: nominatim.tokenizer.token_analysis.base.AnalysisModule
- rendering:
- show_source: no
+::: nominatim_db.tokenizer.token_analysis.base.AnalysisModule
+ options:
heading_level: 6
-::: nominatim.tokenizer.token_analysis.base.Analyzer
- rendering:
- show_source: no
+::: nominatim_db.tokenizer.token_analysis.base.Analyzer
+ options:
heading_level: 6
### Example: Creating acronym variants for long names
There are two kind of tests in this test suite. There are functional tests
which test the API interface using a BDD test framework and there are unit
-tests for specific PHP functions.
+tests for the Python code.
-This test directory is sturctured as follows:
+This test directory is structured as follows:
```
-+- bdd Functional API tests
| +- db Tests for internal data processing on import and update
| +- api Tests for API endpoints (search, reverse, etc.)
|
- +- php PHP unit tests
+- python Python unit tests
+- testdb Base data for generating API test database
+- testdata Additional test data used by unit tests
```
-## PHP Unit Tests (`test/php`)
-
-Unit tests for PHP code can be found in the `php/` directory. They test selected
-PHP functions. Very low coverage.
-
-To execute the test suite run
-
- cd test/php
- UNIT_TEST_DSN='pgsql:dbname=nominatim_unit_tests' phpunit ../
-
-It will read phpunit.xml which points to the library, test path, bootstrap
-strip and sets other parameters.
-
-It will use (and destroy) a local database 'nominatim_unit_tests'. You can set
-a different connection string with e.g. UNIT_TEST_DSN='pgsql:dbname=foo_unit_tests'.
-
## Python Unit Tests (`test/python`)
Unit tests for Python code can be found in the `python/` directory. The goal is
The tests can be configured with a set of environment variables (`behave -D key=val`):
- * `BUILDDIR` - build directory of Nominatim installation to test
* `TEMPLATE_DB` - name of template database used as a skeleton for
the test databases (db tests)
* `TEST_DB` - name of test database (db tests)
* `API_TEST_DB` - name of the database containing the API test data (api tests)
* `API_TEST_FILE` - OSM file to be imported into the API test database (api tests)
+ * `API_ENGINE` - webframe to use for running search queries, same values as
+ `nominatim serve --engine` parameter
* `DB_HOST` - (optional) hostname of database host
* `DB_PORT` - (optional) port of database on host
* `DB_USER` - (optional) username of database login
* `DB_PASS` - (optional) password for database login
- * `SERVER_MODULE_PATH` - (optional) path on the Postgres server to Nominatim
- module shared library file
* `REMOVE_TEMPLATE` - if true, the template and API database will not be reused
during the next run. Reusing the base templates speeds
up tests considerably but might lead to outdated errors
* extract of Autauga country, Alabama, US (for tests against Tiger data)
* additional data from `test/testdb/additional_api_test.data.osm`
-API tests should only be testing the functionality of the website PHP code.
+API tests should only be testing the functionality of the website frontend code.
Most tests should be formulated as BDD DB creation tests (see below) instead.
-#### Code Coverage
-
-The API tests also support code coverage tests. You need to install
-[PHP_CodeCoverage](https://github.com/sebastianbergmann/php-code-coverage).
-On Debian/Ubuntu run:
-
- apt-get install php-codecoverage php-xdebug
-
-Then run the API tests as follows:
-
- behave api -DPHPCOV=<coverage output dir>
-
-The output directory must be an absolute path. To generate reports, you can use
-the [phpcov](https://github.com/sebastianbergmann/phpcov) tool:
-
- phpcov merge --html=<report output dir> <coverage output dir>
-
### DB Creation Tests (`test/bdd/db`)
These tests check the import and update of the Nominatim database. They do not
These tests check that data is imported correctly into the place table. They
use the same template database as the DB Creation tests, so the same remarks apply.
-
-Note that most testing of the gazetteer output of osm2pgsql is done in the tests
-of osm2pgsql itself. The BDD tests are just there to ensure compatibility of
-the osm2pgsql and Nominatim code.
### Directory Structure
-Nominatim expects two files for a tokenizer:
+Nominatim expects two files containing the Python part of the implementation:
-* `nominatim/tokenizer/<NAME>_tokenizer.py` containing the Python part of the
- implementation
-* `lib-php/tokenizer/<NAME>_tokenizer.php` with the PHP part of the
- implementation
+ * `src/nominatim_db/tokenizer/<NAME>_tokenizer.py` contains the tokenizer
+ code used during import and
+ * `src/nominatim_api/search/<NAME>_tokenizer.py` has the code used during
+ query time.
-where `<NAME>` is a unique name for the tokenizer consisting of only lower-case
+`<NAME>` is a unique name for the tokenizer consisting of only lower-case
letters, digits and underscore. A tokenizer also needs to install some SQL
functions. By convention, these should be placed in `lib-sql/tokenizer`.
If the tokenizer has a default configuration file, this should be saved in
-the `settings/<NAME>_tokenizer.<SUFFIX>`.
+`settings/<NAME>_tokenizer.<SUFFIX>`.
### Configuration and Persistence
time. If they are needed for the runtime then they must be saved into the
`nominatim_properties` table and later loaded from there.
-### The Python module
+### The Python modules
-The Python module is expect to export a single factory function:
+#### `src/nominatim_db/tokenizer/`
+
+The import Python module is expected to export a single factory function:
```python
def create(dsn: str, data_dir: Path) -> AbstractTokenizer
database-specific data. The function must return the instance of the tokenizer
class as defined below.
+#### `src/nominatim_api/search/`
+
+The query-time Python module must also export a factory function:
+
+``` python
+def create_query_analyzer(conn: SearchConnection) -> AbstractQueryAnalyzer
+```
+
+The `conn` parameter contains the current search connection. See the
+[library documentation](../library/Low-Level-DB-Access.md#searchconnection-class)
+for details on the class. The function must return the instance of the tokenizer
+class as defined below.
+
+
### Python Tokenizer Class
-All tokenizers must inherit from `nominatim.tokenizer.base.AbstractTokenizer`
+All tokenizers must inherit from `nominatim_db.tokenizer.base.AbstractTokenizer`
and implement the abstract functions defined there.
-::: nominatim.tokenizer.base.AbstractTokenizer
- rendering:
- heading_level: 4
+::: nominatim_db.tokenizer.base.AbstractTokenizer
+ options:
+ heading_level: 6
### Python Analyzer Class
-::: nominatim.tokenizer.base.AbstractAnalyzer
- rendering:
- heading_level: 4
+::: nominatim_db.tokenizer.base.AbstractAnalyzer
+ options:
+ heading_level: 6
+
+
+### Python Query Analyzer Class
+
+::: nominatim_api.search.query_analyzer_factory.AbstractQueryAnalyzer
+ options:
+ heading_level: 6
### PL/pgSQL Functions
be listed with a semicolon as delimiter. Must be NULL when the place has no
house numbers.
+```sql
+FUNCTION token_is_street_address(info JSONB) RETURNS BOOLEAN
+```
+
+Return true if this is an object that should be parented against a street.
+Only relevant for objects with address rank 30.
+
+```sql
+FUNCTION token_has_addr_street(info JSONB) RETURNS BOOLEAN
+```
+
+Return true if there are street names to match against for finding the
+parent of the object.
+
+
+```sql
+FUNCTION token_has_addr_place(info JSONB) RETURNS BOOLEAN
+```
+
+Return true if there are place names to match against for finding the
+parent of the object.
+
```sql
FUNCTION token_matches_street(info JSONB, street_tokens INTEGER[]) RETURNS BOOLEAN
```
replaces the content of the `token_info` column with the returned value before
the trigger stores the information in the database. May return NULL if no
information should be stored permanently.
-
-### PHP Tokenizer class
-
-The PHP tokenizer class is instantiated once per request and responsible for
-analyzing the incoming query. Multiple requests may be in flight in
-parallel.
-
-The class is expected to be found under the
-name of `\Nominatim\Tokenizer`. To find the class the PHP code includes the file
-`tokenizer/tokenizer.php` in the project directory. This file must be created
-when the tokenizer is first set up on import. The file should initialize any
-configuration variables by setting PHP constants and then require the file
-with the actual implementation of the tokenizer.
-
-The tokenizer class must implement the following functions:
-
-```php
-public function __construct(object &$oDB)
-```
-
-The constructor of the class receives a database connection that can be used
-to query persistent data in the database.
-
-```php
-public function checkStatus()
-```
-
-Check that the tokenizer can access its persistent data structures. If there
-is an issue, throw an `\Exception`.
-
-```php
-public function normalizeString(string $sTerm) : string
-```
-
-Normalize string to a form to be used for comparisons when reordering results.
-Nominatim reweighs results how well the final display string matches the actual
-query. Before comparing result and query, names and query are normalised against
-this function. The tokenizer can thus remove all properties that should not be
-taken into account for reweighing, e.g. special characters or case.
-
-```php
-public function tokensForSpecialTerm(string $sTerm) : array
-```
-
-Return the list of special term tokens that match the given term.
-
-```php
-public function extractTokensFromPhrases(array &$aPhrases) : TokenList
-```
-
-Parse the given phrases, splitting them into word lists and retrieve the
-matching tokens.
-
-The phrase array may take on two forms. In unstructured searches (using `q=`
-parameter) the search query is split at the commas and the elements are
-put into a sorted list. For structured searches the phrase array is an
-associative array where the key designates the type of the term (street, city,
-county etc.) The tokenizer may ignore the phrase type at this stage in parsing.
-Matching phrase type and appropriate search token type will be done later
-when the SearchDescription is built.
-
-For each phrase in the list of phrases, the function must analyse the phrase
-string and then call `setWordSets()` to communicate the result of the analysis.
-A word set is a list of strings, where each string refers to a search token.
-A phrase may have multiple interpretations. Therefore a list of word sets is
-usually attached to the phrase. The search tokens themselves are returned
-by the function in an associative array, where the key corresponds to the
-strings given in the word sets. The value is a list of search tokens. Thus
-a single string in the list of word sets may refer to multiple search tokens.
-
The __search frontend__ implements the actual API. It takes search
and reverse geocoding queries from the user, looks up the data and
-returns the results in the requested format. This part is written in PHP
-and can be found in the `lib/` and `website/` directories.
+returns the results in the requested format. This part is located in the
+`nominatim-api` package. The source code can be found in `src/nominatim_api`.
**with same name**;
kill
else (no)
- :add addr:place to adress;
+ :add addr:place to address;
:**Use closest place**\n**rank 16 to 25**;
kill
endif
-<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" contentScriptType="application/ecmascript" contentStyleType="text/css" height="275px" preserveAspectRatio="none" style="width:785px;height:275px;background:#FFFFFF;" version="1.1" viewBox="0 0 785 275" width="785px" zoomAndPan="magnify"><defs><filter height="300%" id="f1b513ppngo123" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0" in="blurOut2" result="blurOut3"/><feBlend in="SourceGraphic" in2="blurOut3" mode="normal"/></filter></defs><g><ellipse cx="379.5" cy="20" fill="#000000" filter="url(#f1b513ppngo123)" rx="10" ry="10" style="stroke:none;stroke-width:1.0;"/><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="118,50,218,50,230,62,218,74,118,74,106,62,118,50" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="172" y="84.2104">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="100" x="118" y="65.8081">has 'addr:street'?</text><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="108,105.7104,228,105.7104,240,118.5151,228,131.3198,108,131.3198,96,118.5151,108,105.7104" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="120" x="108" y="115.9209">street with that name</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="45" x="111" y="128.7256">nearby?</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="76" y="115.9209">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="14" x="240" y="115.9209">no</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="150" x="11" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="130" x="21" y="162.4585">Use closest street</text><text fill="#000000" font-family="sans-serif" font-size="12" lengthAdjust="spacing" textLength="0" x="25" y="176.4272"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="116" x="25" y="176.4272">with same name</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="106" x="197" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="82" x="211" y="162.4585">Use closest</text><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="44" x="207" y="176.4272">street</text><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="427.75,50,523.75,50,535.75,62,523.75,74,427.75,74,415.75,62,427.75,50" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="479.75" y="84.2104">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="96" x="427.75" y="65.8081">has 'addr:place'?</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="56" x="535.75" y="59.4058">otherwise</text><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="417.75,105.7104,533.75,105.7104,545.75,118.5151,533.75,131.3198,417.75,131.3198,405.75,118.5151,417.75,105.7104" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="116" x="417.75" y="115.9209">place with that name</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="45" x="420.75" y="128.7256">nearby?</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="385.75" y="115.9209">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="14" x="545.75" y="115.9209">no</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="144" x="313" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="124" x="323" y="162.4585">Use closest place</text><text fill="#000000" font-family="sans-serif" font-size="12" lengthAdjust="spacing" textLength="0" x="327" y="176.4272"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="116" x="327" y="176.4272">with same name</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="33.9688" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="179" x="477" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" lengthAdjust="spacing" textLength="159" x="487" y="162.4585">add addr:place to adress</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="144" x="494.5" y="210.2886"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="124" x="504.5" y="231.4272">Use closest place</text><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="91" x="504.5" y="245.396">rank 16 to 25</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="102" x="666" y="157.5972"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="82" x="676" y="178.7358">Use closest</text><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="44" x="676" y="192.7046">street</text><line style="stroke:#383838;stroke-width:1.5;" x1="96" x2="86" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="86" x2="86" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="82,131.3198,86,141.3198,90,131.3198,86,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="240" x2="250" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="250" x2="250" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="246,131.3198,250,141.3198,254,131.3198,250,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="566.5" x2="566.5" y1="175.2886" y2="210.2886"/><polygon fill="#383838" points="562.5,200.2886,566.5,210.2886,570.5,200.2886,566.5,204.2886" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="405.75" x2="385" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="385" x2="385" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="381,131.3198,385,141.3198,389,131.3198,385,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="545.75" x2="566.5" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="566.5" x2="566.5" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="562.5,131.3198,566.5,141.3198,570.5,131.3198,566.5,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="168" x2="168" y1="74" y2="105.7104"/><polygon fill="#383838" points="164,95.7104,168,105.7104,172,95.7104,168,99.7104" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="475.75" x2="475.75" y1="74" y2="105.7104"/><polygon fill="#383838" points="471.75,95.7104,475.75,105.7104,479.75,95.7104,475.75,99.7104" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="230" x2="415.75" y1="62" y2="62"/><polygon fill="#383838" points="405.75,58,415.75,62,405.75,66,409.75,62" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="379.5" x2="379.5" y1="30" y2="35"/><line style="stroke:#383838;stroke-width:1.5;" x1="379.5" x2="168" y1="35" y2="35"/><line style="stroke:#383838;stroke-width:1.5;" x1="168" x2="168" y1="35" y2="50"/><polygon fill="#383838" points="164,40,168,50,172,40,168,44" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="535.75" x2="717" y1="62" y2="62"/><line style="stroke:#383838;stroke-width:1.5;" x1="717" x2="717" y1="62" y2="157.5972"/><polygon fill="#383838" points="713,147.5972,717,157.5972,721,147.5972,717,151.5972" style="stroke:#383838;stroke-width:1.0;"/><!--MD5=[e03d31a5684b671bb715075c57004ccb]
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" contentScriptType="application/ecmascript" contentStyleType="text/css" height="275px" preserveAspectRatio="none" style="width:785px;height:275px;background:#FFFFFF;" version="1.1" viewBox="0 0 785 275" width="785px" zoomAndPan="magnify"><defs><filter height="300%" id="f1b513ppngo123" width="300%" x="-1" y="-1"><feGaussianBlur result="blurOut" stdDeviation="2.0"/><feColorMatrix in="blurOut" result="blurOut2" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 .4 0"/><feOffset dx="4.0" dy="4.0" in="blurOut2" result="blurOut3"/><feBlend in="SourceGraphic" in2="blurOut3" mode="normal"/></filter></defs><g><ellipse cx="379.5" cy="20" fill="#000000" filter="url(#f1b513ppngo123)" rx="10" ry="10" style="stroke:none;stroke-width:1.0;"/><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="118,50,218,50,230,62,218,74,118,74,106,62,118,50" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="172" y="84.2104">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="100" x="118" y="65.8081">has 'addr:street'?</text><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="108,105.7104,228,105.7104,240,118.5151,228,131.3198,108,131.3198,96,118.5151,108,105.7104" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="120" x="108" y="115.9209">street with that name</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="45" x="111" y="128.7256">nearby?</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="76" y="115.9209">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="14" x="240" y="115.9209">no</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="150" x="11" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="130" x="21" y="162.4585">Use closest street</text><text fill="#000000" font-family="sans-serif" font-size="12" lengthAdjust="spacing" textLength="0" x="25" y="176.4272"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="116" x="25" y="176.4272">with same name</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="106" x="197" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="82" x="211" y="162.4585">Use closest</text><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="44" x="207" y="176.4272">street</text><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="427.75,50,523.75,50,535.75,62,523.75,74,427.75,74,415.75,62,427.75,50" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="479.75" y="84.2104">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="96" x="427.75" y="65.8081">has 'addr:place'?</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="56" x="535.75" y="59.4058">otherwise</text><polygon fill="#F8F8F8" filter="url(#f1b513ppngo123)" points="417.75,105.7104,533.75,105.7104,545.75,118.5151,533.75,131.3198,417.75,131.3198,405.75,118.5151,417.75,105.7104" style="stroke:#383838;stroke-width:1.5;"/><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="116" x="417.75" y="115.9209">place with that name</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="45" x="420.75" y="128.7256">nearby?</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="20" x="385.75" y="115.9209">yes</text><text fill="#000000" font-family="sans-serif" font-size="11" lengthAdjust="spacing" textLength="14" x="545.75" y="115.9209">no</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="144" x="313" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="124" x="323" y="162.4585">Use closest place</text><text fill="#000000" font-family="sans-serif" font-size="12" lengthAdjust="spacing" textLength="0" x="327" y="176.4272"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="116" x="327" y="176.4272">with same name</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="33.9688" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="179" x="477" y="141.3198"/><text fill="#000000" font-family="sans-serif" font-size="12" lengthAdjust="spacing" textLength="159" x="487" y="162.4585">add addr:place to address</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="144" x="494.5" y="210.2886"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="124" x="504.5" y="231.4272">Use closest place</text><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="91" x="504.5" y="245.396">rank 16 to 25</text><rect fill="#F8F8F8" filter="url(#f1b513ppngo123)" height="47.9375" rx="12.5" ry="12.5" style="stroke:#383838;stroke-width:1.5;" width="102" x="666" y="157.5972"/><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="82" x="676" y="178.7358">Use closest</text><text fill="#000000" font-family="sans-serif" font-size="12" font-weight="bold" lengthAdjust="spacing" textLength="44" x="676" y="192.7046">street</text><line style="stroke:#383838;stroke-width:1.5;" x1="96" x2="86" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="86" x2="86" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="82,131.3198,86,141.3198,90,131.3198,86,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="240" x2="250" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="250" x2="250" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="246,131.3198,250,141.3198,254,131.3198,250,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="566.5" x2="566.5" y1="175.2886" y2="210.2886"/><polygon fill="#383838" points="562.5,200.2886,566.5,210.2886,570.5,200.2886,566.5,204.2886" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="405.75" x2="385" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="385" x2="385" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="381,131.3198,385,141.3198,389,131.3198,385,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="545.75" x2="566.5" y1="118.5151" y2="118.5151"/><line style="stroke:#383838;stroke-width:1.5;" x1="566.5" x2="566.5" y1="118.5151" y2="141.3198"/><polygon fill="#383838" points="562.5,131.3198,566.5,141.3198,570.5,131.3198,566.5,135.3198" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="168" x2="168" y1="74" y2="105.7104"/><polygon fill="#383838" points="164,95.7104,168,105.7104,172,95.7104,168,99.7104" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="475.75" x2="475.75" y1="74" y2="105.7104"/><polygon fill="#383838" points="471.75,95.7104,475.75,105.7104,479.75,95.7104,475.75,99.7104" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="230" x2="415.75" y1="62" y2="62"/><polygon fill="#383838" points="405.75,58,415.75,62,405.75,66,409.75,62" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="379.5" x2="379.5" y1="30" y2="35"/><line style="stroke:#383838;stroke-width:1.5;" x1="379.5" x2="168" y1="35" y2="35"/><line style="stroke:#383838;stroke-width:1.5;" x1="168" x2="168" y1="35" y2="50"/><polygon fill="#383838" points="164,40,168,50,172,40,168,44" style="stroke:#383838;stroke-width:1.0;"/><line style="stroke:#383838;stroke-width:1.5;" x1="535.75" x2="717" y1="62" y2="62"/><line style="stroke:#383838;stroke-width:1.5;" x1="717" x2="717" y1="62" y2="157.5972"/><polygon fill="#383838" points="713,147.5972,717,157.5972,721,147.5972,717,151.5972" style="stroke:#383838;stroke-width:1.0;"/><!--MD5=[e03d31a5684b671bb715075c57004ccb]\r
@startuml\r
skinparam monochrome true\r
\r
**with same name**;\r
kill\r
else (no)\r
- :add addr:place to adress;\r
+ :add addr:place to address;\r
:**Use closest place**\n**rank 16 to 25**;\r
kill\r
endif\r
\r
\r
@enduml\r
-
-PlantUML version 1.2021.12(Tue Oct 05 18:01:58 CEST 2021)
-(GPL source distribution)
-Java Runtime: OpenJDK Runtime Environment
-JVM: OpenJDK 64-Bit Server VM
-Default Encoding: UTF-8
-Language: en
-Country: US
+\r
+PlantUML version 1.2021.12(Tue Oct 05 18:01:58 CEST 2021)\r
+(GPL source distribution)\r
+Java Runtime: OpenJDK Runtime Environment\r
+JVM: OpenJDK 64-Bit Server VM\r
+Default Encoding: UTF-8\r
+Language: en\r
+Country: US\r
--></g></svg>
\ No newline at end of file
display: none!important
}
+.md-content {
+ max-width: 800px
+}
+
table {
margin-bottom: 12pt
}
.doc-object h6 {
margin-bottom: 0.8em;
- font-size: 120%;
+ font-size: 130%;
}
.doc-object {
margin-bottom: 1.3em;
}
+
+.doc-children .doc-contents {
+ margin-left: 3em;
+}
+
+.md-footer__inner {
+ display: none;
+}
+
+.headerlink {
+ filter: grayscale(100%);
+ font-size: 80%;
+}
-Nominatim (from the Latin, 'by name') is a tool to search OSM data by name and address and to generate synthetic addresses of OSM points (reverse geocoding).
+Nominatim (from the Latin, 'by name') is a tool to search OSM data by name and
+address and to generate synthetic addresses of OSM points (reverse geocoding).
+It has also limited capability to search features by their type
+(pubs, hotels, churches, etc).
-This guide comes in four parts:
+This guide comes in five parts:
* __[API reference](api/Overview.md)__ for users of Nominatim
* __[Administration Guide](admin/Installation.md)__ for those who want
to install their own Nominatim server
* __[Customization Guide](customize/Overview.md)__ for those who want to
adapt their own installation to their special requirements
+ * __[Library Guide](library/Getting-Started.md)__ for Python developers who
+ want to use Nominatim as a library in their project
* __[Developer's Guide](develop/overview.md)__ for developers of the software
--- /dev/null
+# Configuration
+
+When using Nominatim through the library, it can be configured in exactly
+the same way as when running as a service. You may instantiate the library
+against the [project directory](../admin/Import.md#creating-the-project-directory)
+of your Nominatim installation. It contains all files belonging to the
+Nominatim instance. This may include an `.env` file with configuration options.
+Setting configuration parameters via environment variables works as well.
+Alternatively to using the operating system's environment, a set of
+configuration parameters may also be passed to the Nomiantim API object.
+
+Configuration options are resolved in the following order:
+
+* from the OS environment (or the dictionary given in `environ`,
+ (see NominatimAPI.md#nominatim.api.core.NominatimAPI.__init__)
+* from the .env file in the project directory of the installation
+* from the default installation in the configuration directory
+
+For more information on configuration via dotenv and a list of possible
+configuration parameters, see the [Configuration page](../customize/Settings.md).
+
+
+## `Configuration` class
+
+::: nominatim_api.Configuration
+ options:
+ members:
+ - get_bool
+ - get_int
+ - get_str_list
+ - get_path
+ heading_level: 6
+ show_signature_annotations: True
--- /dev/null
+# Getting Started
+
+The Nominatim search frontend is implemented as a Python library and can as
+such directly be used in Python scripts and applications. You don't need to
+set up a web frontend and access it through HTTP calls. The library gives
+direct access to the Nominatim database through similar search functions as
+offered by the web API. In addition, it will give you a more complete and
+detailed view on the search objects stored in the database.
+
+!!! warning
+
+ The Nominatim library is used for accessing a local Nominatim database.
+ It is not meant to be used against web services of Nominatim like the
+ one on https://nominatim.openstreetmap.org. If you need a Python library
+ to access these web services, have a look at
+ [GeoPy](https://geopy.readthedocs.io). Don't forget to consult the
+ usage policy of the service you want to use before accessing such
+ a web service.
+
+## Installation
+
+To use the Nominatim library, you need access to a local Nominatim database.
+Follow the [installation](../admin/Installation.md) and
+[import](../admin/Import.md) instructions to set up your database.
+
+The Nominatim frontend library is contained in the Python package `nominatim-api`.
+You can install the latest released version directly from pip:
+
+ pip install nominatim-api
+
+To install the package from the source tree directly, run:
+
+ pip install packaging/nominatim-api
+
+Usually you would want to run this in a virtual environment.
+
+## A simple search example
+
+To query the Nominatim database you need to first set up a connection. This
+is done by creating an Nominatim API object. This object exposes all the
+search functions of Nominatim that are also known from its web API.
+
+This code snippet implements a simple search for the town of 'Brugge':
+
+!!! example
+ === "NominatimAPIAsync"
+ ``` python
+ import asyncio
+
+ import nominatim_api as napi
+
+ async def search(query):
+ async with napi.NominatimAPIAsync() as api:
+ return await api.search(query)
+
+ results = asyncio.run(search('Brugge'))
+ if not results:
+ print('Cannot find Brugge')
+ else:
+ print(f'Found a place at {results[0].centroid.x},{results[0].centroid.y}')
+ ```
+
+ === "NominatimAPI"
+ ``` python
+ import nominatim_api as napi
+
+ with napi.NominatimAPI() as api:
+ results = api.search('Brugge')
+
+ if not results:
+ print('Cannot find Brugge')
+ else:
+ print(f'Found a place at {results[0].centroid.x},{results[0].centroid.y}')
+ ```
+
+The Nominatim library is designed around
+[asyncio](https://docs.python.org/3/library/asyncio.html). `NominatimAPIAsync`
+provides you with an interface of coroutines.
+If you have many requests to make, coroutines can speed up your applications
+significantly.
+
+For smaller scripts there is also a synchronous wrapper around the API. By
+using `NominatimAPI`, you get exactly the same interface using classic functions.
+
+The examples in this chapter will always show-case both
+implementations. The documentation itself will usually refer only to
+'Nominatim API class' when both flavours are meant. If a functionality is
+available only for the synchronous or asynchronous version, this will be
+explicitly mentioned.
+
+## Defining which database to use
+
+The [Configuration](../admin/Import.md#configuration-setup-in-env)
+section explains how Nominatim is configured using the
+[dotenv](https://github.com/theskumar/python-dotenv) library.
+The same configuration mechanism is used with the
+Nominatim API library. You should therefore be sure you are familiar with
+the section.
+
+There are three different ways, how configuration options can be set for
+a 'Nominatim API class'. When you have set up your Nominatim database, you
+have normally created a [project directory](../admin/Import.md#creating-the-project-directory)
+which stores the various configuration and customization files that Nominatim
+needs. You may pass the location of the project directory to your
+'Nominatim API class' constructor and it will read the .env file in the
+directory and set the configuration accordingly. Here is the simple search
+example, using the configuration from a pre-defined project directory in
+`/srv/nominatim-project`:
+
+!!! example
+ === "NominatimAPIAsync"
+ ``` python
+ import asyncio
+
+ import nominatim_api as napi
+
+ async def search(query):
+ async with napi.NominatimAPIAsync('/srv/nominatim-project') as api:
+ return await api.search(query)
+
+ results = asyncio.run(search('Brugge'))
+ if not results:
+ print('Cannot find Brugge')
+ else:
+ print(f'Found a place at {results[0].centroid.x},{results[0].centroid.y}')
+ ```
+
+ === "NominatimAPI"
+ ``` python
+ import nominatim_api as napi
+
+ with napi.NominatimAPI('/srv/nominatim-project') as api:
+ results = api.search('Brugge')
+
+ if not results:
+ print('Cannot find Brugge')
+ else:
+ print(f'Found a place at {results[0].centroid.x},{results[0].centroid.y}')
+ ```
+
+
+You may also configure Nominatim by setting environment variables.
+Normally Nominatim will check the operating system environment. Lets
+say you want to look up 'Brugge' in the special database named 'belgium' instead of the
+standard 'nominatim' database. You can run the example script above like this:
+
+```
+NOMINATIM_DATABASE_DSN=pgsql:dbname=belgium python3 example.py
+```
+
+The third option to configure the library is to hand in the configuration
+parameters into the 'Nominatim API class'. Changing the database would look
+like this:
+
+!!! example
+ === "NominatimAPIAsync"
+ ``` python
+ import asyncio
+ import nominatim_api as napi
+
+ config_params = {
+ 'NOMINATIM_DATABASE_DSN': 'pgsql:dbname=belgium'
+ }
+
+ async def search(query):
+ async with napi.NominatimAPIAsync(environ=config_params) as api:
+ return await api.search(query)
+
+ results = asyncio.run(search('Brugge'))
+ ```
+
+ === "NominatimAPI"
+ ``` python
+ import nominatim_api as napi
+
+ config_params = {
+ 'NOMINATIM_DATABASE_DSN': 'pgsql:dbname=belgium'
+ }
+
+ with napi.NominatimAPI(environ=config_params) as api:
+ results = api.search('Brugge')
+ ```
+
+When the `environ` parameter is given, then only configuration variables
+from this dictionary will be used. The operating system's environment
+variables will be ignored.
+
+## Presenting results to humans
+
+All search functions return full result objects from the database. Such a
+result object contains lots of details: names, address information, OSM tags etc.
+This gives you lots of flexibility what to do with the results.
+
+One of the most common things to get is some kind of human-readable label
+that describes the result in a compact form. Usually this would be the name
+of the object and some parts of the address to explain where in the world
+it is. To create such a label, you need two things:
+
+* the address details of the place
+* all names for the label adapted to the language you wish to use for display
+
+Again searching for 'Brugge', this time with a nicely formatted result:
+
+!!! example
+ === "NominatimAPIAsync"
+ ``` python
+ import asyncio
+
+ import nominatim_api as napi
+
+ async def search(query):
+ async with napi.NominatimAPIAsync() as api:
+ return await api.search(query, address_details=True)
+
+ results = asyncio.run(search('Brugge'))
+
+ locale = napi.Locales(['fr', 'en'])
+ for i, result in enumerate(results):
+ address_parts = result.address_rows.localize(locale)
+ print(f"{i + 1}. {', '.join(address_parts)}")
+ ```
+
+ === "NominatimAPI"
+ ``` python
+ import nominatim_api as napi
+
+ with napi.NominatimAPI() as api:
+ results = api.search('Brugge', address_details=True)
+
+ locale = napi.Locales(['fr', 'en'])
+ for i, result in enumerate(results):
+ address_parts = result.address_rows.localize(locale)
+ print(f"{i + 1}. {', '.join(address_parts)}")
+ ```
+
+To request information about the address of a result, add the optional
+parameter 'address_details' to your search:
+
+``` python
+>>> results = api.search('Brugge', address_details=True)
+```
+
+An additional field `address_rows` will set in results that are returned.
+It contains a list of all places that make up the address of the place. For
+simplicity, this includes name and house number of the place itself. With
+the names in this list it is possible to create a human-readable description
+of the result. To do that, you first need to decide in which language the
+results should be presented. As with the names in the result itself, the
+places in `address_rows` contain all possible name translation for each row.
+
+The library has a helper class `Locale` which helps extracting a name of a
+place in the preferred language. It takes a single parameter with a list
+of language codes in the order of preference. So
+
+``` python
+locale = napi.Locale(['fr', 'en'])
+```
+
+creates a helper class that returns the name preferably in French. If that is
+not possible, it tries English and eventually falls back to the default `name`
+or `ref`.
+
+The `Locale` object can be applied to a name dictionary to return the best-matching
+name out of it:
+
+``` python
+>>> print(locale.display_name(results[0].names))
+'Brugges'
+```
+
+The `address_row` field has a helper function to apply the function to all
+its members and save the result in the `local_name` field. It also returns
+all the localized names as a convenient simple list. This list can be used
+to create a human-readable output:
+
+``` python
+>>> address_parts = results[0].address_rows.localize(locale)
+>>> print(', '.join(address_parts))
+Bruges, Flandre-Occidentale, Flandre, Belgique
+```
+
+This is a fairly simple way to create a human-readable description. The
+place information in `address_rows` contains further information about each
+place. For example, which OSM `admin_level` was used, what category the place
+belongs to or what rank Nominatim has assigned. Use this to adapt the output
+to local address formats.
+
+For more information on address rows, see
+[detailed address description](Result-Handling.md#detailed-address-description).
--- /dev/null
+# Input Parameter Types
+
+This page describes in more detail some of the input parameter types used
+in the query functions of the API object.
+
+## Place identification
+
+The [details](NominatimAPI.md#nominatim_api.NominatimAPI.details) and
+[lookup](NominatimAPI.md#nominatim_api.NominatimAPI.lookup) functions
+require references to places in the database. Below the possible
+types for place identification are listed. All types are dataclasses.
+
+### PlaceID
+
+::: nominatim_api.PlaceID
+ options:
+ heading_level: 6
+
+### OsmID
+
+::: nominatim_api.OsmID
+ options:
+ heading_level: 6
+
+## Geometry types
+
+::: nominatim_api.GeometryFormat
+ options:
+ heading_level: 6
+ members_order: source
+
+## Geometry input
+
+### Point
+
+::: nominatim_api.Point
+ options:
+ heading_level: 6
+ show_signature_annotations: True
+
+### Bbox
+
+::: nominatim_api.Bbox
+ options:
+ heading_level: 6
+ show_signature_annotations: True
+ members_order: source
+ group_by_category: False
+
+## Layers
+
+Layers allow to restrict the search result to thematic groups. This is
+orthogonal to restriction by address ranks, which groups places by their
+geographic extent.
+
+
+::: nominatim_api.DataLayer
+ options:
+ heading_level: 6
+ members_order: source
--- /dev/null
+# Low-level connections
+
+The `NominatimAPIAsync` class allows to directly access the underlying
+database connection to explore the raw data. Nominatim uses
+[SQLAlchemy](https://docs.sqlalchemy.org/) for building queries. Please
+refer to the documentation of the library to understand how to write SQL.
+
+To get access to a search connection, use the `begin()` function of your
+API object. This returns a `SearchConnection` object described below
+wrapped in a context manager. Its
+`t` property has definitions for all Nominatim search tables. For an
+overview of available tables, refer to the
+[Development Layout](../develop/Database-Layout.md) in in the development
+chapter. Note that only tables that are needed for search are accessible
+as SQLAlchemy tables.
+
+!!! warning
+ The database layout is not part of the API definition and may change
+ without notice. If you play with the low-level access functions, you
+ need to be prepared for such changes.
+
+Here is a simple example, which prints how many places are available in
+the placex table:
+
+```
+import asyncio
+import sqlalchemy as sa
+from nominatim_api import NominatimAPIAsync
+
+async def print_table_size():
+ api = NominatimAPIAsync()
+
+ async with api.begin() as conn:
+ cnt = await conn.scalar(sa.select(sa.func.count()).select_from(conn.t.placex))
+ print(f'placex table has {cnt} rows.')
+
+asyncio.run(print_table_size())
+```
+
+!!! warning
+ Low-level connections may only be used to read data from the database.
+ Do not use it to add or modify data or you might break Nominatim's
+ normal functions.
+
+## SearchConnection class
+
+::: nominatim_api.SearchConnection
+ options:
+ members:
+ - scalar
+ - execute
+ - get_class_table
+ - get_db_property
+ - get_property
+ heading_level: 6
--- /dev/null
+# The Nominatim API classes
+
+The API classes are the core object of the search library. Always instantiate
+one of these classes first. The API classes are **not threadsafe**. You need
+to instantiate a separate instance for each thread.
+
+### NominatimAPI
+
+::: nominatim_api.NominatimAPI
+ options:
+ members:
+ - __init__
+ - config
+ - close
+ - status
+ - details
+ - lookup
+ - reverse
+ - search
+ - search_address
+ - search_category
+ heading_level: 6
+ group_by_category: False
+
+
+### NominatimAPIAsync
+
+::: nominatim_api.NominatimAPIAsync
+ options:
+ members:
+ - __init__
+ - setup_database
+ - close
+ - begin
+ heading_level: 6
+ group_by_category: False
--- /dev/null
+# Result handling
+
+The search functions of the Nominatim API always return a result object
+with the raw information about the place that is available in the
+database. This section discusses data types used in the results and utility
+functions that allow further processing of the results.
+
+## Result fields
+
+### Sources
+
+Nominatim takes the result data from multiple sources. The `source_table` field
+in the result describes, from which source the result was retrieved.
+
+::: nominatim_api.SourceTable
+ options:
+ heading_level: 6
+ members_order: source
+
+### Detailed address description
+
+When the `address_details` parameter is set, then functions return not
+only information about the result place but also about the place that
+make up the address. This information is almost always required when you
+want to present the user with a human-readable description of the result.
+See also [Localization](#localization) below.
+
+The address details are available in the `address_rows` field as a ordered
+list of `AddressLine` objects with the country information last. The list also
+contains the result place itself and some artificial entries, for example,
+for the house number or the country code. This makes processing and creating
+a full address easier.
+
+::: nominatim_api.AddressLine
+ options:
+ heading_level: 6
+ members_order: source
+
+### Detailed search terms
+
+The `details` function can return detailed information about which search terms
+may be used to find a place, when the `keywords` parameter is set. Search
+terms are split into terms for the name of the place and search terms for
+its address.
+
+::: nominatim_api.WordInfo
+ options:
+ heading_level: 6
+
+## Localization
+
+Results are always returned with the full list of available names.
+
+### Locale
+
+::: nominatim_api.Locales
+ options:
+ heading_level: 6
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+from pathlib import Path
+
+import mkdocs_gen_files
+
+VAGRANT_PATH = Path(__file__, '..', '..', 'vagrant').resolve()
+
+for infile in VAGRANT_PATH.glob('Install-on-*.sh'):
+ outfile = f"admin/{infile.stem}.md"
+ title = infile.stem.replace('-', ' ')
+
+ with mkdocs_gen_files.open(outfile, "w") as outfd, infile.open() as infd:
+ print("#", title, file=outfd)
+ has_empty = False
+ for line in infd:
+ line = line.rstrip()
+ docpos = line.find('#DOCS:')
+ if docpos >= 0:
+ line = line[docpos + 6:]
+ elif line == '#' or line.startswith('#!'):
+ line = ''
+ elif line.startswith('# '):
+ line = line[2:]
+ if line or not has_empty:
+ print(line, file=outfd)
+ has_empty = not bool(line)
+
+ mkdocs_gen_files.set_edit_path(outfile, "docs/mk_install_instructions.py")
--- /dev/null
+-- This is just an alias for the Nominatim themepark theme module
+local flex = require('themes/nominatim/init')
+
+function flex.load_topic(name, cfg)
+ local topic_file = debug.getinfo(1, "S").source:sub(2):match("(.*/)") .. 'themes/nominatim/topics/'.. name .. '.lua'
+
+ if topic_file == nil then
+ error('Cannot find topic: ' .. name)
+ end
+
+ loadfile(topic_file)(nil, flex, cfg or {})
+end
+
+return flex
--- /dev/null
+-- This is just an alias for the Nominatim themepark address topic
+local flex = require('flex-base')
+
+flex.load_topic('address')
+
+return flex
--- /dev/null
+-- This is just an alias for the Nominatim themepark admin topic
+local flex = require('flex-base')
+
+flex.load_topic('admin')
+
+return flex
--- /dev/null
+-- This is just an alias for the Nominatim themepark full topic
+local flex = require('flex-base')
+
+flex.load_topic('full', {with_extratags = true})
+
+return flex
--- /dev/null
+-- This is just an alias for the Nominatim themepark full topic
+local flex = require('flex-base')
+
+flex.load_topic('full')
+
+return flex
--- /dev/null
+-- This is just an alias for the Nominatim themepark street topic
+local flex = require('flex-base')
+
+flex.load_topic('street')
+
+return flex
--- /dev/null
+-- Prints taginfo project description in the standard output
+--
+
+-- create fake "osm2pgsql" table for flex-base, originally created by the main C++ program
+osm2pgsql = {}
+function osm2pgsql.define_table(...) end
+
+-- provide path to flex-style lua file
+package.path = arg[0]:match("(.*/)") .. "?.lua;" .. package.path
+local flex = require('import-' .. (arg[1] or 'extratags'))
+local json = require ('dkjson')
+
+local NAME_DESCRIPTIONS = {
+ 'Searchable auxiliary name of the place',
+ main = 'Searchable primary name of the place',
+ house = 'House name part of an address, searchable'
+}
+local ADDRESS_DESCRIPTIONS = {
+ 'Used to determine the address of a place',
+ main = 'Primary key for an address point',
+ postcode = 'Used to determine the postcode of a place',
+ country = 'Used to determine country of a place (only if written as two-letter code)',
+ interpolation = 'Primary key for an address interpolation line'
+}
+
+------------ helper functions ---------------------
+-- Sets the key order for the resulting JSON table
+local function set_keyorder(table, order)
+ setmetatable(table, {
+ __jsonorder = order
+ })
+end
+
+local function get_key_description(key, description)
+ local desc = {}
+ desc.key = key
+ desc.description = description
+ set_keyorder(desc, {'key', 'description'})
+ return desc
+end
+
+local function get_key_value_description(key, value, description)
+ local desc = {key = key, value = value, description = description}
+ set_keyorder(desc, {'key', 'value', 'description'})
+ return desc
+end
+
+local function group_table_to_keys(tags, data, descriptions)
+ for group, values in pairs(data) do
+ local desc = descriptions[group] or descriptions[1]
+ for _, key in pairs(values) do
+ if key:sub(1, 1) ~= '*' and key:sub(#key, #key) ~= '*' then
+ table.insert(tags, get_key_description(key, desc))
+ end
+ end
+ end
+end
+
+-- Prints the collected tags in the required format in JSON
+local function print_taginfo()
+ local taginfo = flex.get_taginfo()
+ local tags = {}
+
+ for k, values in pairs(taginfo.main) do
+ if values[1] == nil or values[1] == 'delete' or values[1] == 'extra' then
+ for v, group in pairs(values) do
+ if type(v) == 'string' and group ~= 'delete' and group ~= 'extra' then
+ local text = 'POI/feature in the search database'
+ if type(group) ~= 'function' then
+ text = 'Fallback ' .. text
+ end
+ table.insert(tags, get_key_value_description(k, v, text))
+ end
+ end
+ elseif type(values[1]) == 'function' or values[1] == 'fallback' then
+ local desc = 'POI/feature in the search database'
+ if values[1] == 'fallback' then
+ desc = 'Fallback ' .. desc
+ end
+ local excp = {}
+ for v, group in pairs(values) do
+ if group == 'delete' or group == 'extra' then
+ table.insert(excp, v)
+ end
+ end
+ if next(excp) ~= nil then
+ desc = desc .. string.format(' (except for values: %s)',
+ table.concat(excp, ', '))
+ end
+ table.insert(tags, get_key_description(k, desc))
+ end
+ end
+
+ group_table_to_keys(tags, taginfo.name, NAME_DESCRIPTIONS)
+ group_table_to_keys(tags, taginfo.address, ADDRESS_DESCRIPTIONS)
+
+ local format = {
+ data_format = 1,
+ data_url = 'https://nominatim.openstreetmap.org/taginfo.json',
+ project = {
+ name = 'Nominatim',
+ description = 'OSM search engine.',
+ project_url = 'https://nominatim.openstreetmap.org',
+ doc_url = 'https://nominatim.org/release-docs/develop/',
+ contact_name = 'Sarah Hoffmann',
+ contact_email = 'lonvia@denofr.de'
+ }
+ }
+ format.tags = tags
+
+ set_keyorder(format, {'data_format', 'data_url', 'project', 'tags'})
+ set_keyorder(format.project, {'name', 'description', 'project_url', 'doc_url',
+ 'contact_name', 'contact_email'})
+
+ print(json.encode(format))
+end
+
+print_taginfo()
--- /dev/null
+-- Nominatim themepark theme.
+--
+-- The Nominatim theme creates a fixed set of import tables for use with
+-- Nominatim. Creation and object processing are directly controlled by
+-- the theme. Topics provide preset configurations. You should add exactly
+-- one topic to your project.
+--
+-- The theme also exports a number of functions that can be used to configure
+-- its behaviour. These may be directly called in the style file after
+-- importing the theme:
+--
+-- local nominatim = themepark:init_theme('nominatim')
+-- nominatim.set_main_tags{boundary = 'always'}
+--
+-- This allows to write your own configuration from scratch. You can also
+-- use it to customize topics. In that case, first add the topic, then
+-- change the configuration:
+--
+-- themepark:add_topic('nominatim/full')
+-- local nominatim = themepark:init_theme('nominatim')
+-- nominatim.ignore_tags{'amenity'}
+
+local module = {}
+
+local MAIN_KEYS = {admin_level = {'delete'}}
+local PRE_FILTER = {prefix = {}, suffix = {}}
+local NAMES = {}
+local NAME_FILTER = nil
+local ADDRESS_TAGS = {}
+local ADDRESS_FILTER = nil
+local EXTRATAGS_FILTER
+local POSTCODE_FALLBACK = true
+
+-- This file can also be directly require'd instead of running it under
+-- the themepark framework. In that case the first parameter is usually
+-- the module name. Lets check for that, so that further down we can call
+-- the low-level osm2pgsql functions instead of themepark functions.
+local themepark = ...
+if type(themepark) ~= 'table' then
+ themepark = nil
+end
+
+-- The single place table.
+local place_table_definition = {
+ name = "place",
+ ids = { type = 'any', id_column = 'osm_id', type_column = 'osm_type' },
+ columns = {
+ { column = 'class', type = 'text', not_null = true },
+ { column = 'type', type = 'text', not_null = true },
+ { column = 'admin_level', type = 'smallint' },
+ { column = 'name', type = 'hstore' },
+ { column = 'address', type = 'hstore' },
+ { column = 'extratags', type = 'hstore' },
+ { column = 'geometry', type = 'geometry', projection = 'WGS84', not_null = true },
+ },
+ data_tablespace = os.getenv("NOMINATIM_TABLESPACE_PLACE_DATA"),
+ index_tablespace = os.getenv("NOMINATIM_TABLESPACE_PLACE_INDEX"),
+ indexes = {}
+}
+
+local insert_row
+local script_path = debug.getinfo(1, "S").source:match("@?(.*/)")
+local PRESETS = loadfile(script_path .. 'presets.lua')()
+
+if themepark then
+ themepark:add_table(place_table_definition)
+ insert_row = function(columns)
+ themepark:insert('place', columns, {}, {})
+ end
+else
+ local place_table = osm2pgsql.define_table(place_table_definition)
+ insert_row = function(columns)
+ place_table:insert(columns)
+ end
+end
+
+------------ Geometry functions for relations ---------------------
+
+function module.relation_as_multipolygon(o)
+ return o:as_multipolygon()
+end
+
+function module.relation_as_multiline(o)
+ return o:as_multilinestring():line_merge()
+end
+
+
+module.RELATION_TYPES = {
+ multipolygon = module.relation_as_multipolygon,
+ boundary = module.relation_as_multipolygon,
+ waterway = module.relation_as_multiline
+}
+
+--------- Built-in place transformation functions --------------------------
+
+local PlaceTransform = {}
+
+-- Special transform meanings which are interpreted elsewhere
+PlaceTransform.fallback = 'fallback'
+PlaceTransform.delete = 'delete'
+PlaceTransform.extra = 'extra'
+
+-- always: unconditionally use that place
+function PlaceTransform.always(place)
+ return place
+end
+
+-- never: unconditionally drop the place
+function PlaceTransform.never()
+ return nil
+end
+
+-- named: use the place if it has a fully-qualified name
+function PlaceTransform.named(place)
+ if place.has_name then
+ return place
+ end
+end
+
+-- named_with_key: use place if there is a name with the main key prefix
+function PlaceTransform.named_with_key(place, k)
+ local names = {}
+ local prefix = k .. ':name'
+ for namek, namev in pairs(place.intags) do
+ if namek:sub(1, #prefix) == prefix
+ and (#namek == #prefix
+ or namek:sub(#prefix + 1, #prefix + 1) == ':') then
+ names[namek:sub(#k + 2)] = namev
+ end
+ end
+
+ if next(names) ~= nil then
+ return place:clone{names=names}
+ end
+end
+
+-- Special transform used with address fallbacks: ignore all names
+-- except for those marked as being part of the address.
+local function address_fallback(place)
+ if next(place.names) == nil or NAMES.house == nil then
+ return place
+ end
+
+ local names = {}
+ for k, v in pairs(place.names) do
+ if NAME_FILTER(k, v) == 'house' then
+ names[k] = v
+ end
+ end
+ return place:clone{names=names}
+end
+
+--------- Built-in extratags transformation functions ---------------
+
+local function default_extratags_filter(p, k)
+ -- Default handling is to copy over place tag for boundaries.
+ -- Nominatim needs this.
+ if k ~= 'boundary' or p.intags.place == nil then
+ return p.extratags
+ end
+
+ local extra = { place = p.intags.place }
+ for kin, vin in pairs(p.extratags) do
+ extra[kin] = vin
+ end
+
+ return extra
+end
+EXTRATAGS_FILTER = default_extratags_filter
+
+----------------- other helper functions -----------------------------
+
+local function lookup_prefilter_classification(k, v)
+ -- full matches
+ local desc = MAIN_KEYS[k]
+ local fullmatch = desc and (desc[v] or desc[1])
+ if fullmatch ~= nil then
+ return fullmatch
+ end
+ -- suffixes
+ for slen, slist in pairs(PRE_FILTER.suffix) do
+ if #k >= slen then
+ local group = slist[k:sub(-slen)]
+ if group ~= nil then
+ return group
+ end
+ end
+ end
+ -- prefixes
+ for slen, slist in pairs(PRE_FILTER.prefix) do
+ if #k >= slen then
+ local group = slist[k:sub(1, slen)]
+ if group ~= nil then
+ return group
+ end
+ end
+ end
+end
+
+
+local function merge_filters_into_main(group, keys, tags)
+ if keys ~= nil then
+ for _, key in pairs(keys) do
+ -- ignore suffix and prefix matches
+ if key:sub(1, 1) ~= '*' and key:sub(#key, #key) ~= '*' then
+ if MAIN_KEYS[key] == nil then
+ MAIN_KEYS[key] = {}
+ end
+ MAIN_KEYS[key][1] = group
+ end
+ end
+ end
+
+ if tags ~= nil then
+ for key, values in pairs(tags) do
+ if MAIN_KEYS[key] == nil then
+ MAIN_KEYS[key] = {}
+ end
+ for _, v in pairs(values) do
+ MAIN_KEYS[key][v] = group
+ end
+ end
+ end
+end
+
+
+local function remove_group_from_main(group)
+ for key, values in pairs(MAIN_KEYS) do
+ for _, ttype in pairs(values) do
+ if ttype == group then
+ values[ttype] = nil
+ end
+ end
+ if next(values) == nil then
+ MAIN_KEYS[key] = nil
+ end
+ end
+end
+
+
+local function add_pre_filter(data)
+ for group, keys in pairs(data) do
+ for _, key in pairs(keys) do
+ local klen = #key - 1
+ if key:sub(1, 1) == '*' then
+ if klen > 0 then
+ if PRE_FILTER.suffix[klen] == nil then
+ PRE_FILTER.suffix[klen] = {}
+ end
+ PRE_FILTER.suffix[klen][key:sub(2)] = group
+ end
+ elseif key:sub(#key, #key) == '*' then
+ if PRE_FILTER.prefix[klen] == nil then
+ PRE_FILTER.prefix[klen] = {}
+ end
+ PRE_FILTER.prefix[klen][key:sub(1, klen)] = group
+ end
+ end
+ end
+end
+
+------------- Place class ------------------------------------------
+
+local Place = {}
+Place.__index = Place
+
+function Place.new(object, geom_func)
+ local self = setmetatable({}, Place)
+ self.object = object
+ self.geom_func = geom_func
+
+ self.admin_level = tonumber(self.object.tags.admin_level or 15) or 15
+ if self.admin_level == nil
+ or self.admin_level <= 0 or self.admin_level > 15
+ or math.floor(self.admin_level) ~= self.admin_level then
+ self.admin_level = 15
+ end
+
+ self.num_entries = 0
+ self.has_name = false
+ self.names = {}
+ self.address = {}
+ self.extratags = {}
+
+ self.intags = {}
+
+ local has_main_tags = false
+ for k, v in pairs(self.object.tags) do
+ local group = lookup_prefilter_classification(k, v)
+ if group == 'extra' then
+ self.extratags[k] = v
+ elseif group ~= 'delete' then
+ self.intags[k] = v
+ if group ~= nil then
+ has_main_tags = true
+ end
+ end
+ end
+
+ if not has_main_tags then
+ -- no interesting tags, don't bother processing
+ self.intags = {}
+ end
+
+ return self
+end
+
+function Place:clean(data)
+ for k, v in pairs(self.intags) do
+ if data.delete ~= nil and data.delete(k, v) then
+ self.intags[k] = nil
+ elseif data.extra ~= nil and data.extra(k, v) then
+ self.extratags[k] = v
+ self.intags[k] = nil
+ end
+ end
+end
+
+function Place:delete(data)
+ if data.match ~= nil then
+ for k, v in pairs(self.intags) do
+ if data.match(k, v) then
+ self.intags[k] = nil
+ end
+ end
+ end
+end
+
+function Place:grab_extratags(data)
+ local count = 0
+
+ if data.match ~= nil then
+ for k, v in pairs(self.intags) do
+ if data.match(k, v) then
+ self.intags[k] = nil
+ self.extratags[k] = v
+ count = count + 1
+ end
+ end
+ end
+
+ return count
+end
+
+local function strip_address_prefix(k)
+ if k:sub(1, 5) == 'addr:' then
+ return k:sub(6)
+ end
+
+ if k:sub(1, 6) == 'is_in:' then
+ return k:sub(7)
+ end
+
+ return k
+end
+
+
+function Place:grab_address_parts(data)
+ local count = 0
+
+ if data.groups ~= nil then
+ for k, v in pairs(self.intags) do
+ local atype = data.groups(k, v)
+
+ if atype ~= nil then
+ if atype == 'main' then
+ self.has_name = true
+ self.address[strip_address_prefix(k)] = v
+ count = count + 1
+ elseif atype == 'extra' then
+ self.address[strip_address_prefix(k)] = v
+ else
+ self.address[atype] = v
+ end
+ self.intags[k] = nil
+ end
+ end
+ end
+
+ return count
+end
+
+
+function Place:grab_name_parts(data)
+ local fallback = nil
+
+ if data.groups ~= nil then
+ for k, v in pairs(self.intags) do
+ local atype = data.groups(k, v)
+
+ if atype ~= nil then
+ self.names[k] = v
+ self.intags[k] = nil
+ if atype == 'main' then
+ self.has_name = true
+ elseif atype == 'house' then
+ self.has_name = true
+ fallback = {'place', 'house', address_fallback}
+ end
+ end
+ end
+ end
+
+ return fallback
+end
+
+
+function Place:write_place(k, v, mfunc)
+ v = v or self.intags[k]
+ if v == nil then
+ return 0
+ end
+
+ local place = mfunc(self, k, v)
+ if place then
+ local res = place:write_row(k, v)
+ self.num_entries = self.num_entries + res
+ return res
+ end
+
+ return 0
+end
+
+function Place:write_row(k, v)
+ if self.geometry == nil then
+ self.geometry = self.geom_func(self.object)
+ end
+ if self.geometry == nil or self.geometry:is_null() then
+ return 0
+ end
+
+ local extratags = EXTRATAGS_FILTER(self, k, v)
+ if not (extratags and next(extratags)) then
+ extratags = nil
+ end
+
+ insert_row{
+ class = k,
+ type = v,
+ admin_level = self.admin_level,
+ name = next(self.names) and self.names,
+ address = next(self.address) and self.address,
+ extratags = extratags,
+ geometry = self.geometry
+ }
+
+ return 1
+end
+
+
+function Place:clone(data)
+ local cp = setmetatable({}, Place)
+ cp.object = self.object
+ cp.geometry = data.geometry or self.geometry
+ cp.geom_func = self.geom_func
+ cp.intags = data.intags or self.intags
+ cp.admin_level = data.admin_level or self.admin_level
+ cp.names = data.names or self.names
+ cp.address = data.address or self.address
+ cp.extratags = data.extratags or self.extratags
+
+ return cp
+end
+
+
+function module.tag_match(data)
+ if data == nil or next(data) == nil then
+ return nil
+ end
+
+ local fullmatches = {}
+ local key_prefixes = {}
+ local key_suffixes = {}
+
+ if data.keys ~= nil then
+ for _, key in pairs(data.keys) do
+ if key:sub(1, 1) == '*' then
+ if #key > 1 then
+ if key_suffixes[#key - 1] == nil then
+ key_suffixes[#key - 1] = {}
+ end
+ key_suffixes[#key - 1][key:sub(2)] = true
+ end
+ elseif key:sub(#key, #key) == '*' then
+ if key_prefixes[#key - 1] == nil then
+ key_prefixes[#key - 1] = {}
+ end
+ key_prefixes[#key - 1][key:sub(1, #key - 1)] = true
+ else
+ fullmatches[key] = true
+ end
+ end
+ end
+
+ if data.tags ~= nil then
+ for k, vlist in pairs(data.tags) do
+ if fullmatches[k] == nil then
+ fullmatches[k] = {}
+ for _, v in pairs(vlist) do
+ fullmatches[k][v] = true
+ end
+ end
+ end
+ end
+
+ return function (k, v)
+ if fullmatches[k] ~= nil and (fullmatches[k] == true or fullmatches[k][v] ~= nil) then
+ return true
+ end
+
+ for slen, slist in pairs(key_suffixes) do
+ if #k >= slen and slist[k:sub(-slen)] ~= nil then
+ return true
+ end
+ end
+
+ for slen, slist in pairs(key_prefixes) do
+ if #k >= slen and slist[k:sub(1, slen)] ~= nil then
+ return true
+ end
+ end
+
+ return false
+ end
+end
+
+
+function module.tag_group(data)
+ if data == nil or next(data) == nil then
+ return nil
+ end
+
+ local fullmatches = {}
+ local key_prefixes = {}
+ local key_suffixes = {}
+
+ for group, tags in pairs(data) do
+ for _, key in pairs(tags) do
+ if key:sub(1, 1) == '*' then
+ if #key > 1 then
+ if key_suffixes[#key - 1] == nil then
+ key_suffixes[#key - 1] = {}
+ end
+ key_suffixes[#key - 1][key:sub(2)] = group
+ end
+ elseif key:sub(#key, #key) == '*' then
+ if key_prefixes[#key - 1] == nil then
+ key_prefixes[#key - 1] = {}
+ end
+ key_prefixes[#key - 1][key:sub(1, #key - 1)] = group
+ else
+ fullmatches[key] = group
+ end
+ end
+ end
+
+ return function (k)
+ local val = fullmatches[k]
+ if val ~= nil then
+ return val
+ end
+
+ for slen, slist in pairs(key_suffixes) do
+ if #k >= slen then
+ val = slist[k:sub(-slen)]
+ if val ~= nil then
+ return val
+ end
+ end
+ end
+
+ for slen, slist in pairs(key_prefixes) do
+ if #k >= slen then
+ val = slist[k:sub(1, slen)]
+ if val ~= nil then
+ return val
+ end
+ end
+ end
+ end
+end
+
+-- Returns prefix part of the keys, and reject suffix matching keys
+local function process_key(key)
+ if key:sub(1, 1) == '*' then
+ return nil
+ end
+ if key:sub(#key, #key) == '*' then
+ return key:sub(1, #key - 2)
+ end
+ return key
+end
+
+-- Process functions for all data types
+function module.process_node(object)
+
+ local function geom_func(o)
+ return o:as_point()
+ end
+
+ module.process_tags(Place.new(object, geom_func))
+end
+
+function module.process_way(object)
+
+ local function geom_func(o)
+ local geom = o:as_polygon()
+
+ if geom:is_null() then
+ geom = o:as_linestring()
+ if geom:is_null() or geom:length() > 30 then
+ return nil
+ end
+ end
+
+ return geom
+ end
+
+ module.process_tags(Place.new(object, geom_func))
+end
+
+function module.process_relation(object)
+ local geom_func = module.RELATION_TYPES[object.tags.type]
+
+ if geom_func ~= nil then
+ module.process_tags(Place.new(object, geom_func))
+ end
+end
+
+-- The process functions are used by default by osm2pgsql.
+if themepark then
+ themepark:add_proc('node', module.process_node)
+ themepark:add_proc('way', module.process_way)
+ themepark:add_proc('relation', module.process_relation)
+else
+ osm2pgsql.process_node = module.process_node
+ osm2pgsql.process_way = module.process_way
+ osm2pgsql.process_relation = module.process_relation
+end
+
+function module.process_tags(o)
+ if next(o.intags) == nil then
+ return -- shortcut when pre-filtering has removed all tags
+ end
+
+ -- Exception for boundary/place double tagging
+ if o.intags.boundary == 'administrative' then
+ o:grab_extratags{match = function (k, v)
+ return k == 'place' and v:sub(1,3) ~= 'isl'
+ end}
+ end
+
+ -- name keys
+ local fallback = o:grab_name_parts{groups=NAME_FILTER}
+
+ -- address keys
+ if o:grab_address_parts{groups=ADDRESS_FILTER} > 0 and fallback == nil then
+ fallback = {'place', 'house', address_fallback}
+ end
+ if o.address.country ~= nil and #o.address.country ~= 2 then
+ o.address['country'] = nil
+ end
+ if POSTCODE_FALLBACK and fallback == nil and o.address.postcode ~= nil then
+ fallback = {'place', 'postcode', PlaceTransform.always}
+ end
+
+ if o.address.interpolation ~= nil then
+ o:write_place('place', 'houses', PlaceTransform.always)
+ return
+ end
+
+ -- collect main keys
+ for k, v in pairs(o.intags) do
+ local ktable = MAIN_KEYS[k]
+ if ktable then
+ local ktype = ktable[v] or ktable[1]
+ if type(ktype) == 'function' then
+ o:write_place(k, v, ktype)
+ elseif ktype == 'fallback' and o.has_name then
+ fallback = {k, v, PlaceTransform.named}
+ end
+ end
+ end
+
+ if fallback ~= nil and o.num_entries == 0 then
+ o:write_place(fallback[1], fallback[2], fallback[3])
+ end
+end
+
+--------- Convenience functions for simple style configuration -----------------
+
+function module.set_prefilters(data)
+ remove_group_from_main('delete')
+ merge_filters_into_main('delete', data.delete_keys, data.delete_tags)
+
+ remove_group_from_main('extra')
+ merge_filters_into_main('extra', data.extra_keys, data.extra_tags)
+
+ PRE_FILTER = {prefix = {}, suffix = {}}
+ add_pre_filter{delete = data.delete_keys, extra = data.extra_keys}
+end
+
+
+function module.ignore_keys(data)
+ if type(data) == 'string' then
+ local preset = data
+ data = PRESETS.IGNORE_KEYS[data]
+ if data == nil then
+ error('Unknown preset for ignored keys: ' .. preset)
+ end
+ end
+ merge_filters_into_main('delete', data)
+ add_pre_filter{delete = data}
+end
+
+
+function module.add_for_extratags(data)
+ if type(data) == 'string' then
+ local preset = data
+ data = PRESETS.EXTRATAGS[data] or PRESETS.IGNORE_KEYS[data]
+ if data == nil then
+ error('Unknown preset for extratags: ' .. preset)
+ end
+ end
+ merge_filters_into_main('extra', data)
+ add_pre_filter{extra = data}
+end
+
+
+function module.set_main_tags(data)
+ for key, values in pairs(MAIN_KEYS) do
+ for _, ttype in pairs(values) do
+ if ttype == 'fallback' or type(ttype) == 'function' then
+ values[ttype] = nil
+ end
+ end
+ if next(values) == nil then
+ MAIN_KEYS[key] = nil
+ end
+ end
+ module.modify_main_tags(data)
+end
+
+
+function module.modify_main_tags(data)
+ if type(data) == 'string' then
+ local preset = data
+ if data:sub(1, 7) == 'street/' then
+ data = PRESETS.MAIN_TAGS_STREETS[data:sub(8)]
+ elseif data:sub(1, 4) == 'poi/' then
+ data = PRESETS.MAIN_TAGS_POIS(data:sub(5))
+ else
+ data = PRESETS.MAIN_TAGS[data]
+ end
+ if data == nil then
+ error('Unknown preset for main tags: ' .. preset)
+ end
+ end
+
+ for k, v in pairs(data) do
+ if MAIN_KEYS[k] == nil then
+ MAIN_KEYS[k] = {}
+ end
+ if type(v) == 'function' then
+ MAIN_KEYS[k][1] = v
+ elseif type(v) == 'string' then
+ MAIN_KEYS[k][1] = PlaceTransform[v]
+ elseif type(v) == 'table' then
+ for subk, subv in pairs(v) do
+ if type(subv) == 'function' then
+ MAIN_KEYS[k][subk] = subv
+ else
+ MAIN_KEYS[k][subk] = PlaceTransform[subv]
+ end
+ end
+ end
+ end
+end
+
+
+function module.modify_name_tags(data)
+ if type(data) == 'string' then
+ local preset = data
+ data = PRESETS.NAME_TAGS[data]
+ if data == nil then
+ error('Unknown preset for name keys: ' .. preset)
+ end
+ end
+
+ for k,v in pairs(data) do
+ if next(v) then
+ NAMES[k] = v
+ else
+ NAMES[k] = nil
+ end
+ end
+ NAME_FILTER = module.tag_group(NAMES)
+ remove_group_from_main('fallback:name')
+ if data.house ~= nil then
+ merge_filters_into_main('fallback:name', data.house)
+ end
+end
+
+
+function module.set_name_tags(data)
+ NAMES = {}
+ module.modify_name_tags(data)
+end
+
+
+function module.set_address_tags(data)
+ ADDRESS_TAGS = {}
+ module.modify_address_tags(data)
+end
+
+
+function module.modify_address_tags(data)
+ if type(data) == 'string' then
+ local preset = data
+ data = PRESETS.ADDRESS_TAGS[data]
+ if data == nil then
+ error('Unknown preset for address keys: ' .. preset)
+ end
+ end
+
+ for k, v in pairs(data) do
+ if k == 'postcode_fallback' then
+ POSTCODE_FALLBACK = v
+ elseif next(v) == nil then
+ ADDRESS_TAGS[k] = nil
+ else
+ ADDRESS_TAGS[k] = v
+ end
+ end
+
+ ADDRESS_FILTER = module.tag_group(ADDRESS_TAGS)
+
+ remove_group_from_main('fallback:address')
+ merge_filters_into_main('fallback:address', data.main)
+ merge_filters_into_main('fallback:address', data.interpolation)
+ remove_group_from_main('fallback:postcode')
+ if POSTCODE_FALLBACK then
+ merge_filters_into_main('fallback:postcode', data.postcode)
+ end
+end
+
+
+function module.set_address_tags(data)
+ ADDRESS_TAGS_SOURCE = {}
+ module.modify_address_tags(data)
+end
+
+
+function module.set_postcode_fallback(enable)
+ if POSTCODE_FALLBACK ~= enable then
+ remove_group_from_main('fallback:postcode')
+ if enable then
+ merge_filters_into_main('fallback:postcode', ADDRESS_TAGS.postcode)
+ end
+ end
+ POSTCODE_FALLBACK = enable
+end
+
+
+function module.set_unused_handling(data)
+ if type(data) == 'function' then
+ EXTRATAGS_FILTER = data
+ elseif data == nil then
+ EXTRATAGS_FILTER = default_extratags_filter
+ elseif data.extra_keys == nil and data.extra_tags == nil then
+ local delfilter = module.tag_match{keys = data.delete_keys, tags = data.delete_tags}
+ EXTRATAGS_FILTER = function (p, k)
+ local extra = {}
+ for kin, vin in pairs(p.intags) do
+ if kin ~= k and not delfilter(kin, vin) then
+ extra[kin] = vin
+ end
+ end
+ if next(extra) == nil then
+ return p.extratags
+ end
+ for kextra, vextra in pairs(p.extratags) do
+ extra[kextra] = vextra
+ end
+ return extra
+ end
+ elseif data.delete_keys == nil and data.delete_tags == nil then
+ local incfilter = module.tag_match{keys = data.extra_keys, tags = data.extra_tags}
+ EXTRATAGS_FILTER = function (p, k)
+ local extra = {}
+ for kin, vin in pairs(p.intags) do
+ if kin ~= k and incfilter(kin, vin) then
+ extra[kin] = vin
+ end
+ end
+ if next(extra) == nil then
+ return p.extratags
+ end
+ for kextra, vextra in pairs(p.extratags) do
+ extra[kextra] = vextra
+ end
+ return extra
+ end
+ else
+ error("unused handler can have only 'extra_keys' or 'delete_keys' set.")
+ end
+end
+
+function module.set_relation_types(data)
+ module.RELATION_TYPES = {}
+ for k, v in data do
+ if v == 'multipolygon' then
+ module.RELATION_TYPES[k] = module.relation_as_multipolygon
+ elseif v == 'multiline' then
+ module.RELATION_TYPES[k] = module.relation_as_multiline
+ end
+ end
+end
+
+
+function module.get_taginfo()
+ return {main = MAIN_KEYS, name = NAMES, address = ADDRESS_TAGS}
+end
+
+return module
--- /dev/null
+-- Defines defaults used in the topic definitions.
+
+local module = {}
+
+-- Helper functions
+
+local function group_merge(group1, group2)
+ for name, values in pairs(group2) do
+ if group1[name] == nil then
+ group1[name] = values
+ else
+ for _, v in pairs(values) do
+ table.insert(group1[name], v)
+ end
+ end
+ end
+
+ return group1
+end
+
+-- Customized main tag filter functions
+
+local EXCLUDED_FOOTWAYS = { sidewalk = 1, crossing = 1, link = 1, traffic_aisle }
+
+local function filter_footways(place)
+ if place.has_name then
+ local footway = place.object.tags.footway
+ if footway == nil or EXCLUDED_FOOTWAYS[footway] ~= 1 then
+ return place
+ end
+ end
+ return false
+end
+
+local function include_when_tag_present(key, value, named)
+ if named then
+ return function(place)
+ if place.has_name and place.intags[key] == value then
+ return place
+ end
+ return false
+ end
+ else
+ return function(place)
+ if place.intags[key] == value then
+ return place
+ end
+ return false
+ end
+ end
+end
+
+local function exclude_when_key_present(key, named)
+ if named then
+ return function(place)
+ if place.has_name and place.intags[key] == nil then
+ return place
+ end
+ return false
+ end
+ else
+ return function(place)
+ if place.intags[key] == nil then
+ return place
+ end
+ return false
+ end
+
+ end
+end
+
+local function lock_transform(place)
+ if place.object.tags.waterway ~= nil then
+ local name = place.object.tags.lock_name
+ if name ~= nil then
+ return place:clone{names={name=name, ref=place.object.tags.lock_ref}}
+ end
+ end
+
+ return false
+end
+
+-- Main tag definition
+
+module.MAIN_TAGS = {}
+
+module.MAIN_TAGS.admin = {
+ boundary = {administrative = 'named'},
+ landuse = {residential = 'fallback',
+ farm = 'fallback',
+ farmyard = 'fallback',
+ industrial = 'fallback',
+ commercial = 'fallback',
+ allotments = 'fallback',
+ retail = 'fallback'},
+ place = {county = 'always',
+ district = 'always',
+ municipality = 'always',
+ city = 'always',
+ town = 'always',
+ borough = 'always',
+ village = 'always',
+ suburb = 'always',
+ hamlet = 'always',
+ croft = 'always',
+ subdivision = 'always',
+ allotments = 'always',
+ neighbourhood = 'always',
+ quarter = 'always',
+ isolated_dwelling = 'always',
+ farm = 'always',
+ city_block = 'always',
+ locality = 'always'}
+}
+
+module.MAIN_TAGS.all_boundaries = {
+ boundary = {'named',
+ place = 'delete',
+ land_area = 'delete',
+ postal_code = 'always'},
+ landuse = 'fallback',
+ place = 'always'
+}
+
+module.MAIN_TAGS.natural = {
+ waterway = {'named',
+ riverbank = 'delete'},
+ natural = {'named',
+ yes = 'delete',
+ no = 'delete',
+ coastline = 'delete',
+ saddle = 'fallback',
+ water = exclude_when_key_present('water', true)},
+ mountain_pass = {'always',
+ no = 'delete'},
+ water = {include_when_tag_present('natural', 'water', true),
+ river = 'never',
+ stream = 'never',
+ canal = 'never',
+ ditch = 'never',
+ drain = 'never',
+ fish_pass = 'never',
+ yes = 'delete',
+ intermittent = 'delete',
+ tidal = 'delete'
+ }
+}
+
+module.MAIN_TAGS_POIS = function (group)
+ group = group or 'delete'
+ return {
+ aerialway = {'always',
+ no = group,
+ pylon = group},
+ aeroway = {'always',
+ no = group},
+ amenity = {'always',
+ no = group,
+ parking_space = group,
+ parking_entrance = group,
+ waste_disposal = group,
+ hunting_stand = group},
+ building = {'fallback',
+ no = group},
+ bridge = {'named_with_key',
+ no = group},
+ club = {'always',
+ no = group},
+ craft = {'always',
+ no = group},
+ emergency = {'always',
+ no = group,
+ yes = group,
+ fire_hydrant = group},
+ healthcare = {'fallback',
+ yes = group,
+ no = group},
+ highway = {'always',
+ no = group,
+ turning_circle = group,
+ mini_roundabout = group,
+ noexit = group,
+ crossing = group,
+ give_way = group,
+ stop = group,
+ turning_loop = group,
+ passing_place = group,
+ street_lamp = 'named',
+ traffic_signals = 'named'},
+ historic = {'always',
+ yes = group,
+ no = group},
+ information = {include_when_tag_present('tourism', 'information'),
+ yes = 'delete',
+ route_marker = 'never',
+ trail_blaze = 'never'},
+ junction = {'fallback',
+ no = group},
+ leisure = {'always',
+ nature_reserve = 'fallback',
+ swimming_pool = 'named',
+ no = group},
+ lock = {yes = lock_transform},
+ man_made = {pier = 'always',
+ tower = 'always',
+ bridge = 'always',
+ works = 'named',
+ water_tower = 'always',
+ dyke = 'named',
+ adit = 'named',
+ lighthouse = 'always',
+ watermill = 'always',
+ tunnel = 'always'},
+ military = {'always',
+ yes = group,
+ no = group},
+ office = {'always',
+ no = group},
+ railway = {'named',
+ rail = group,
+ no = group,
+ abandoned = group,
+ disused = group,
+ razed = group,
+ level_crossing = group,
+ switch = group,
+ signal = group,
+ buffer_stop = group},
+ shop = {'always',
+ no = group},
+ tourism = {'always',
+ no = group,
+ yes = group,
+ information = exclude_when_key_present('information')},
+ tunnel = {'named_with_key',
+ no = group}
+} end
+
+module.MAIN_TAGS_STREETS = {}
+
+module.MAIN_TAGS_STREETS.default = {
+ place = {square = 'always'},
+ highway = {motorway = 'always',
+ trunk = 'always',
+ primary = 'always',
+ secondary = 'always',
+ tertiary = 'always',
+ unclassified = 'always',
+ residential = 'always',
+ road = 'always',
+ living_street = 'always',
+ pedestrian = 'always',
+ service = 'named',
+ cycleway = 'named',
+ path = 'named',
+ footway = filter_footways,
+ steps = 'named',
+ bridleway = 'named',
+ track = 'named',
+ motorway_link = 'named',
+ trunk_link = 'named',
+ primary_link = 'named',
+ secondary_link = 'named',
+ tertiary_link = 'named'}
+}
+
+module.MAIN_TAGS_STREETS.car = {
+ place = {square = 'always'},
+ highway = {motorway = 'always',
+ trunk = 'always',
+ primary = 'always',
+ secondary = 'always',
+ tertiary = 'always',
+ unclassified = 'always',
+ residential = 'always',
+ road = 'always',
+ living_street = 'always',
+ service = 'always',
+ track = 'always',
+ motorway_link = 'always',
+ trunk_link = 'always',
+ primary_link = 'always',
+ secondary_link = 'always',
+ tertiary_link = 'always'}
+}
+
+module.MAIN_TAGS_STREETS.all = {
+ place = {square = 'always'},
+ highway = {motorway = 'always',
+ trunk = 'always',
+ primary = 'always',
+ secondary = 'always',
+ tertiary = 'always',
+ unclassified = 'always',
+ residential = 'always',
+ road = 'always',
+ living_street = 'always',
+ pedestrian = 'always',
+ service = 'always',
+ cycleway = 'always',
+ path = 'always',
+ footway = 'always',
+ steps = 'always',
+ bridleway = 'always',
+ track = 'always',
+ motorway_link = 'always',
+ trunk_link = 'always',
+ primary_link = 'always',
+ secondary_link = 'always',
+ tertiary_link = 'always'}
+}
+
+
+-- name tags
+
+module.NAME_TAGS = {}
+
+module.NAME_TAGS.core = {main = {'name', 'name:*',
+ 'int_name', 'int_name:*',
+ 'nat_name', 'nat_name:*',
+ 'reg_name', 'reg_name:*',
+ 'loc_name', 'loc_name:*',
+ 'old_name', 'old_name:*',
+ 'alt_name', 'alt_name:*', 'alt_name_*',
+ 'official_name', 'official_name:*',
+ 'place_name', 'place_name:*',
+ 'short_name', 'short_name:*'},
+ extra = {'ref', 'int_ref', 'nat_ref', 'reg_ref',
+ 'loc_ref', 'old_ref', 'ISO3166-2'}
+ }
+module.NAME_TAGS.address = {house = {'addr:housename'}}
+module.NAME_TAGS.poi = group_merge({main = {'brand'},
+ extra = {'iata', 'icao'}},
+ module.NAME_TAGS.core)
+
+-- Address tagging
+
+module.ADDRESS_TAGS = {}
+
+module.ADDRESS_TAGS.core = { extra = {'addr:*', 'is_in:*', 'tiger:county'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode',
+ 'tiger:zip_left', 'tiger:zip_right'},
+ country = {'country_code', 'ISO3166-1',
+ 'addr:country_code', 'is_in:country_code',
+ 'addr:country', 'is_in:country'}
+ }
+
+module.ADDRESS_TAGS.houses = { main = {'addr:housenumber',
+ 'addr:conscriptionnumber',
+ 'addr:streetnumber'},
+ interpolation = {'addr:interpolation'}
+ }
+
+-- Ignored tags (prefiltered away)
+
+module.IGNORE_KEYS = {}
+
+module.IGNORE_KEYS.metatags = {'note', 'note:*', 'source', 'source:*', '*source',
+ 'attribution', 'comment', 'fixme', 'created_by',
+ 'tiger:cfcc', 'tiger:reviewed', 'nysgissam:*',
+ 'NHD:*', 'nhd:*', 'gnis:*', 'geobase:*', 'yh:*',
+ 'osak:*', 'naptan:*', 'CLC:*', 'import', 'it:fvg:*',
+ 'lacounty:*', 'ref:linz:*',
+ 'ref:bygningsnr', 'ref:ruian:*', 'building:ruian:type',
+ 'type',
+ 'is_in:postcode'}
+module.IGNORE_KEYS.name = {'*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
+ 'name:etymology', 'name:etymology:*',
+ 'name:signed', 'name:botanical'}
+module.IGNORE_KEYS.address = {'addr:street:*', 'addr:city:*', 'addr:district:*',
+ 'addr:province:*', 'addr:subdistrict:*', 'addr:place:*',
+ 'addr:TW:dataset'}
+
+-- Extra tags (prefiltered away)
+
+module.EXTRATAGS = {}
+
+module.EXTRATAGS.required = {'wikipedia', 'wikipedia:*', 'wikidata', 'capital'}
+
+return module
--- /dev/null
+local _, flex, cfg = ...
+
+flex.set_main_tags('admin')
+flex.modify_main_tags('street/' .. (cfg.street_theme or 'default'))
+flex.modify_main_tags{boundary = {postal_code = 'always'}}
+
+flex.set_name_tags('core')
+flex.modify_name_tags('address')
+
+flex.set_address_tags('core')
+flex.modify_address_tags('houses')
+
+flex.ignore_keys('metatags')
+flex.add_for_extratags('required')
+
+if cfg.with_extratags then
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ flex.add_for_extratags('name')
+ flex.add_for_extratags('address')
+else
+ flex.ignore_keys('name')
+ flex.ignore_keys('address')
+end
--- /dev/null
+local _, flex, cfg = ...
+
+flex.set_main_tags('admin')
+
+flex.set_name_tags('core')
+
+flex.set_address_tags('core')
+flex.set_postcode_fallback(false)
+
+flex.ignore_keys('metatags')
+flex.add_for_extratags('required')
+
+if cfg.with_extratags then
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ flex.add_for_extratags('name')
+ flex.add_for_extratags('address')
+else
+ flex.ignore_keys('name')
+ flex.ignore_keys('address')
+end
--- /dev/null
+local _, flex, cfg = ...
+
+local group
+if cfg.with_extratags then
+ group = 'extra'
+else
+ group = 'delete'
+end
+
+flex.set_main_tags('all_boundaries')
+flex.modify_main_tags('natural')
+flex.modify_main_tags('street/' .. (cfg.street_theme or 'default'))
+flex.modify_main_tags('poi/' .. group)
+
+flex.set_name_tags('core')
+flex.modify_name_tags('address')
+flex.modify_name_tags('poi')
+
+flex.set_address_tags('core')
+flex.modify_address_tags('houses')
+
+flex.ignore_keys('metatags')
+flex.add_for_extratags('required')
+
+if cfg.with_extratags then
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ flex.add_for_extratags('name')
+ flex.add_for_extratags('address')
+else
+ flex.ignore_keys('name')
+ flex.ignore_keys('address')
+end
--- /dev/null
+local _, flex, cfg = ...
+
+flex.set_main_tags('admin')
+flex.modify_main_tags('street/' .. (cfg.street_theme or 'default'))
+flex.modify_main_tags{boundary = {postal_code = 'always'}}
+
+flex.set_name_tags('core')
+
+flex.set_address_tags('core')
+flex.set_postcode_fallback(false)
+
+flex.ignore_keys('metatags')
+flex.add_for_extratags('required')
+
+if cfg.with_extratags then
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ flex.add_for_extratags('name')
+ flex.add_for_extratags('address')
+else
+ flex.ignore_keys('name')
+ flex.ignore_keys('address')
+end
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/ClassTypes.php');
-
-/**
- * Detailed list of address parts for a single result
- */
-class AddressDetails
-{
- private $iPlaceID;
- private $aAddressLines;
-
- public function __construct(&$oDB, $iPlaceID, $sHousenumber, $mLangPref)
- {
- $this->iPlaceID = $iPlaceID;
-
- if (is_array($mLangPref)) {
- $mLangPref = $oDB->getArraySQL($oDB->getDBQuotedList($mLangPref));
- }
-
- if (!isset($sHousenumber)) {
- $sHousenumber = -1;
- }
-
- $sSQL = 'SELECT *,';
- $sSQL .= ' get_name_by_language(name,'.$mLangPref.') as localname';
- $sSQL .= ' FROM get_addressdata('.$iPlaceID.','.$sHousenumber.')';
- $sSQL .= ' ORDER BY rank_address DESC, isaddress DESC';
-
- $this->aAddressLines = $oDB->getAll($sSQL);
- }
-
- private static function isAddress($aLine)
- {
- return $aLine['isaddress'] || $aLine['type'] == 'country_code';
- }
-
- public function getAddressDetails($bAll = false)
- {
- if ($bAll) {
- return $this->aAddressLines;
- }
-
- return array_filter($this->aAddressLines, array(__CLASS__, 'isAddress'));
- }
-
- public function getLocaleAddress()
- {
- $aParts = array();
- $sPrevResult = '';
-
- foreach ($this->aAddressLines as $aLine) {
- if ($aLine['isaddress'] && $sPrevResult != $aLine['localname']) {
- $sPrevResult = $aLine['localname'];
- $aParts[] = $sPrevResult;
- }
- }
-
- return join(', ', $aParts);
- }
-
- public function getAddressNames()
- {
- $aAddress = array();
-
- foreach ($this->aAddressLines as $aLine) {
- if (!self::isAddress($aLine)) {
- continue;
- }
-
- $sTypeLabel = ClassTypes\getLabelTag($aLine);
-
- $sName = null;
- if (isset($aLine['localname']) && $aLine['localname']!=='') {
- $sName = $aLine['localname'];
- } elseif (isset($aLine['housenumber']) && $aLine['housenumber']!=='') {
- $sName = $aLine['housenumber'];
- }
-
- if (isset($sName)
- && (!isset($aAddress[$sTypeLabel])
- || $aLine['class'] == 'place')
- ) {
- $aAddress[$sTypeLabel] = $sName;
-
- if (!empty($aLine['name'])) {
- $this->addSubdivisionCode($aAddress, $aLine['admin_level'], $aLine['name']);
- }
- }
- }
-
- return $aAddress;
- }
-
- /**
- * Annotates the given json with geocodejson address information fields.
- *
- * @param array $aJson Json hash to add the fields to.
- *
- * Geocodejson has the following fields:
- * street, locality, postcode, city, district,
- * county, state, country
- *
- * Postcode and housenumber are added by type, district is not used.
- * All other fields are set according to address rank.
- */
- public function addGeocodeJsonAddressParts(&$aJson)
- {
- foreach (array_reverse($this->aAddressLines) as $aLine) {
- if (!$aLine['isaddress']) {
- continue;
- }
-
- if (!isset($aLine['localname']) || $aLine['localname'] == '') {
- continue;
- }
-
- if ($aLine['type'] == 'postcode' || $aLine['type'] == 'postal_code') {
- $aJson['postcode'] = $aLine['localname'];
- continue;
- }
-
- if ($aLine['type'] == 'house_number') {
- $aJson['housenumber'] = $aLine['localname'];
- continue;
- }
-
- if ($this->iPlaceID == $aLine['place_id']) {
- continue;
- }
-
- $iRank = (int)$aLine['rank_address'];
-
- if ($iRank > 25 && $iRank < 28) {
- $aJson['street'] = $aLine['localname'];
- } elseif ($iRank >= 22 && $iRank <= 25) {
- $aJson['locality'] = $aLine['localname'];
- } elseif ($iRank >= 17 && $iRank <= 21) {
- $aJson['district'] = $aLine['localname'];
- } elseif ($iRank >= 13 && $iRank <= 16) {
- $aJson['city'] = $aLine['localname'];
- } elseif ($iRank >= 10 && $iRank <= 12) {
- $aJson['county'] = $aLine['localname'];
- } elseif ($iRank >= 5 && $iRank <= 9) {
- $aJson['state'] = $aLine['localname'];
- } elseif ($iRank == 4) {
- $aJson['country'] = $aLine['localname'];
- }
- }
- }
-
- public function getAdminLevels()
- {
- $aAddress = array();
- foreach (array_reverse($this->aAddressLines) as $aLine) {
- if (self::isAddress($aLine)
- && isset($aLine['admin_level'])
- && $aLine['admin_level'] < 15
- && !isset($aAddress['level'.$aLine['admin_level']])
- ) {
- $aAddress['level'.$aLine['admin_level']] = $aLine['localname'];
- }
- }
- return $aAddress;
- }
-
- public function debugInfo()
- {
- return $this->aAddressLines;
- }
-
- private function addSubdivisionCode(&$aAddress, $iAdminLevel, $nameDetails)
- {
- if (is_string($nameDetails)) {
- $nameDetails = json_decode('{' . str_replace('"=>"', '":"', $nameDetails) . '}', true);
- }
- if (!empty($nameDetails['ISO3166-2'])) {
- $aAddress["ISO3166-2-lvl$iAdminLevel"] = $nameDetails['ISO3166-2'];
- }
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim\ClassTypes;
-
-/**
- * Create a label tag for the given place that can be used as an XML name.
- *
- * @param array[] $aPlace Information about the place to label.
- *
- * A label tag groups various object types together under a common
- * label. The returned value is lower case and has no spaces
- */
-function getLabelTag($aPlace, $sCountry = null)
-{
- $iRank = (int) ($aPlace['rank_address'] ?? 30);
- $sLabel;
- if (isset($aPlace['place_type'])) {
- $sLabel = $aPlace['place_type'];
- } elseif ($aPlace['class'] == 'boundary' && $aPlace['type'] == 'administrative') {
- $sLabel = getBoundaryLabel($iRank/2, $sCountry);
- } elseif ($aPlace['type'] == 'postal_code') {
- $sLabel = 'postcode';
- } elseif ($iRank < 26) {
- $sLabel = $aPlace['type'];
- } elseif ($iRank < 28) {
- $sLabel = 'road';
- } elseif ($aPlace['class'] == 'place'
- && ($aPlace['type'] == 'house_number' ||
- $aPlace['type'] == 'house_name' ||
- $aPlace['type'] == 'country_code')
- ) {
- $sLabel = $aPlace['type'];
- } else {
- $sLabel = $aPlace['class'];
- }
-
- return strtolower(str_replace(' ', '_', $sLabel));
-}
-
-/**
- * Create a label for the given place.
- *
- * @param array[] $aPlace Information about the place to label.
- */
-function getLabel($aPlace, $sCountry = null)
-{
- if (isset($aPlace['place_type'])) {
- return ucwords(str_replace('_', ' ', $aPlace['place_type']));
- }
-
- if ($aPlace['class'] == 'boundary' && $aPlace['type'] == 'administrative') {
- return getBoundaryLabel(($aPlace['rank_address'] ?? 30)/2, $sCountry ?? null);
- }
-
- // Return a label only for 'important' class/type combinations
- if (getImportance($aPlace) !== null) {
- return ucwords(str_replace('_', ' ', $aPlace['type']));
- }
-
- return null;
-}
-
-
-/**
- * Return a simple label for an administrative boundary for the given country.
- *
- * @param int $iAdminLevel Content of admin_level tag.
- * @param string $sCountry Country code of the country where the object is
- * in. May be null, in which case a world-wide
- * fallback is used.
- * @param string $sFallback String to return if no explicit string is listed.
- *
- * @return string
- */
-function getBoundaryLabel($iAdminLevel, $sCountry, $sFallback = 'Administrative')
-{
- static $aBoundaryList = array (
- 'default' => array (
- 1 => 'Continent',
- 2 => 'Country',
- 3 => 'Region',
- 4 => 'State',
- 5 => 'State District',
- 6 => 'County',
- 7 => 'Municipality',
- 8 => 'City',
- 9 => 'City District',
- 10 => 'Suburb',
- 11 => 'Neighbourhood',
- 12 => 'City Block'
- ),
- 'no' => array (
- 3 => 'State',
- 4 => 'County'
- ),
- 'se' => array (
- 3 => 'State',
- 4 => 'County'
- )
- );
-
- if (isset($aBoundaryList[$sCountry])
- && isset($aBoundaryList[$sCountry][$iAdminLevel])
- ) {
- return $aBoundaryList[$sCountry][$iAdminLevel];
- }
-
- return $aBoundaryList['default'][$iAdminLevel] ?? $sFallback;
-}
-
-/**
- * Return an estimated radius of how far the object node extends.
- *
- * @param array[] $aPlace Information about the place. This must be a node
- * feature.
- *
- * @return float The radius around the feature in degrees.
- */
-function getDefRadius($aPlace)
-{
- $aSpecialRadius = array(
- 'place:continent' => 25,
- 'place:country' => 7,
- 'place:state' => 2.6,
- 'place:province' => 2.6,
- 'place:region' => 1.0,
- 'place:county' => 0.7,
- 'place:city' => 0.16,
- 'place:municipality' => 0.16,
- 'place:island' => 0.32,
- 'place:postcode' => 0.16,
- 'place:town' => 0.04,
- 'place:village' => 0.02,
- 'place:hamlet' => 0.02,
- 'place:district' => 0.02,
- 'place:borough' => 0.02,
- 'place:suburb' => 0.02,
- 'place:locality' => 0.01,
- 'place:neighbourhood'=> 0.01,
- 'place:quarter' => 0.01,
- 'place:city_block' => 0.01,
- 'landuse:farm' => 0.01,
- 'place:farm' => 0.01,
- 'place:airport' => 0.015,
- 'aeroway:aerodrome' => 0.015,
- 'railway:station' => 0.005
- );
-
- $sClassPlace = $aPlace['class'].':'.$aPlace['type'];
-
- return $aSpecialRadius[$sClassPlace] ?? 0.00005;
-}
-
-/**
- * Get the icon to use with the given object.
- */
-function getIcon($aPlace)
-{
- $aIcons = array(
- 'boundary:administrative' => 'poi_boundary_administrative',
- 'place:city' => 'poi_place_city',
- 'place:town' => 'poi_place_town',
- 'place:village' => 'poi_place_village',
- 'place:hamlet' => 'poi_place_village',
- 'place:suburb' => 'poi_place_village',
- 'place:locality' => 'poi_place_village',
- 'place:airport' => 'transport_airport2',
- 'aeroway:aerodrome' => 'transport_airport2',
- 'railway:station' => 'transport_train_station2',
- 'amenity:place_of_worship' => 'place_of_worship_unknown3',
- 'amenity:pub' => 'food_pub',
- 'amenity:bar' => 'food_bar',
- 'amenity:university' => 'education_university',
- 'tourism:museum' => 'tourist_museum',
- 'amenity:arts_centre' => 'tourist_art_gallery2',
- 'tourism:zoo' => 'tourist_zoo',
- 'tourism:theme_park' => 'poi_point_of_interest',
- 'tourism:attraction' => 'poi_point_of_interest',
- 'leisure:golf_course' => 'sport_golf',
- 'historic:castle' => 'tourist_castle',
- 'amenity:hospital' => 'health_hospital',
- 'amenity:school' => 'education_school',
- 'amenity:theatre' => 'tourist_theatre',
- 'amenity:library' => 'amenity_library',
- 'amenity:fire_station' => 'amenity_firestation3',
- 'amenity:police' => 'amenity_police2',
- 'amenity:bank' => 'money_bank2',
- 'amenity:post_office' => 'amenity_post_office',
- 'tourism:hotel' => 'accommodation_hotel2',
- 'amenity:cinema' => 'tourist_cinema',
- 'tourism:artwork' => 'tourist_art_gallery2',
- 'historic:archaeological_site' => 'tourist_archaeological2',
- 'amenity:doctors' => 'health_doctors',
- 'leisure:sports_centre' => 'sport_leisure_centre',
- 'leisure:swimming_pool' => 'sport_swimming_outdoor',
- 'shop:supermarket' => 'shopping_supermarket',
- 'shop:convenience' => 'shopping_convenience',
- 'amenity:restaurant' => 'food_restaurant',
- 'amenity:fast_food' => 'food_fastfood',
- 'amenity:cafe' => 'food_cafe',
- 'tourism:guest_house' => 'accommodation_bed_and_breakfast',
- 'amenity:pharmacy' => 'health_pharmacy_dispensing',
- 'amenity:fuel' => 'transport_fuel',
- 'natural:peak' => 'poi_peak',
- 'natural:wood' => 'landuse_coniferous_and_deciduous',
- 'shop:bicycle' => 'shopping_bicycle',
- 'shop:clothes' => 'shopping_clothes',
- 'shop:hairdresser' => 'shopping_hairdresser',
- 'shop:doityourself' => 'shopping_diy',
- 'shop:estate_agent' => 'shopping_estateagent2',
- 'shop:car' => 'shopping_car',
- 'shop:garden_centre' => 'shopping_garden_centre',
- 'shop:car_repair' => 'shopping_car_repair',
- 'shop:bakery' => 'shopping_bakery',
- 'shop:butcher' => 'shopping_butcher',
- 'shop:apparel' => 'shopping_clothes',
- 'shop:laundry' => 'shopping_laundrette',
- 'shop:beverages' => 'shopping_alcohol',
- 'shop:alcohol' => 'shopping_alcohol',
- 'shop:optician' => 'health_opticians',
- 'shop:chemist' => 'health_pharmacy',
- 'shop:gallery' => 'tourist_art_gallery2',
- 'shop:jewelry' => 'shopping_jewelry',
- 'tourism:information' => 'amenity_information',
- 'historic:ruins' => 'tourist_ruin',
- 'amenity:college' => 'education_school',
- 'historic:monument' => 'tourist_monument',
- 'historic:memorial' => 'tourist_monument',
- 'historic:mine' => 'poi_mine',
- 'tourism:caravan_site' => 'accommodation_caravan_park',
- 'amenity:bus_station' => 'transport_bus_station',
- 'amenity:atm' => 'money_atm2',
- 'tourism:viewpoint' => 'tourist_view_point',
- 'tourism:guesthouse' => 'accommodation_bed_and_breakfast',
- 'railway:tram' => 'transport_tram_stop',
- 'amenity:courthouse' => 'amenity_court',
- 'amenity:recycling' => 'amenity_recycling',
- 'amenity:dentist' => 'health_dentist',
- 'natural:beach' => 'tourist_beach',
- 'railway:tram_stop' => 'transport_tram_stop',
- 'amenity:prison' => 'amenity_prison',
- 'highway:bus_stop' => 'transport_bus_stop2'
- );
-
- $sClassPlace = $aPlace['class'].':'.$aPlace['type'];
-
- return $aIcons[$sClassPlace] ?? null;
-}
-
-/**
- * Get an icon for the given object with its full URL.
- */
-function getIconFile($aPlace)
-{
- if (CONST_MapIcon_URL === false) {
- return null;
- }
-
- $sIcon = getIcon($aPlace);
-
- if (!isset($sIcon)) {
- return null;
- }
-
- return CONST_MapIcon_URL.'/'.$sIcon.'.p.20.png';
-}
-
-/**
- * Return a class importance value for the given place.
- *
- * @param array[] $aPlace Information about the place.
- *
- * @return int An importance value. The lower the value, the more
- * important the class.
- */
-function getImportance($aPlace)
-{
- static $aWithImportance = null;
-
- if ($aWithImportance === null) {
- $aWithImportance = array_flip(array(
- 'boundary:administrative',
- 'place:country',
- 'place:state',
- 'place:province',
- 'place:county',
- 'place:city',
- 'place:region',
- 'place:island',
- 'place:town',
- 'place:village',
- 'place:hamlet',
- 'place:suburb',
- 'place:locality',
- 'landuse:farm',
- 'place:farm',
- 'highway:motorway_junction',
- 'highway:motorway',
- 'highway:trunk',
- 'highway:primary',
- 'highway:secondary',
- 'highway:tertiary',
- 'highway:residential',
- 'highway:unclassified',
- 'highway:living_street',
- 'highway:service',
- 'highway:track',
- 'highway:road',
- 'highway:byway',
- 'highway:bridleway',
- 'highway:cycleway',
- 'highway:pedestrian',
- 'highway:footway',
- 'highway:steps',
- 'highway:motorway_link',
- 'highway:trunk_link',
- 'highway:primary_link',
- 'landuse:industrial',
- 'landuse:residential',
- 'landuse:retail',
- 'landuse:commercial',
- 'place:airport',
- 'aeroway:aerodrome',
- 'railway:station',
- 'amenity:place_of_worship',
- 'amenity:pub',
- 'amenity:bar',
- 'amenity:university',
- 'tourism:museum',
- 'amenity:arts_centre',
- 'tourism:zoo',
- 'tourism:theme_park',
- 'tourism:attraction',
- 'leisure:golf_course',
- 'historic:castle',
- 'amenity:hospital',
- 'amenity:school',
- 'amenity:theatre',
- 'amenity:public_building',
- 'amenity:library',
- 'amenity:townhall',
- 'amenity:community_centre',
- 'amenity:fire_station',
- 'amenity:police',
- 'amenity:bank',
- 'amenity:post_office',
- 'leisure:park',
- 'amenity:park',
- 'landuse:park',
- 'landuse:recreation_ground',
- 'tourism:hotel',
- 'tourism:motel',
- 'amenity:cinema',
- 'tourism:artwork',
- 'historic:archaeological_site',
- 'amenity:doctors',
- 'leisure:sports_centre',
- 'leisure:swimming_pool',
- 'shop:supermarket',
- 'shop:convenience',
- 'amenity:restaurant',
- 'amenity:fast_food',
- 'amenity:cafe',
- 'tourism:guest_house',
- 'amenity:pharmacy',
- 'amenity:fuel',
- 'natural:peak',
- 'waterway:waterfall',
- 'natural:wood',
- 'natural:water',
- 'landuse:forest',
- 'landuse:cemetery',
- 'landuse:allotments',
- 'landuse:farmyard',
- 'railway:rail',
- 'waterway:canal',
- 'waterway:river',
- 'waterway:stream',
- 'shop:bicycle',
- 'shop:clothes',
- 'shop:hairdresser',
- 'shop:doityourself',
- 'shop:estate_agent',
- 'shop:car',
- 'shop:garden_centre',
- 'shop:car_repair',
- 'shop:newsagent',
- 'shop:bakery',
- 'shop:furniture',
- 'shop:butcher',
- 'shop:apparel',
- 'shop:electronics',
- 'shop:department_store',
- 'shop:books',
- 'shop:yes',
- 'shop:outdoor',
- 'shop:mall',
- 'shop:florist',
- 'shop:charity',
- 'shop:hardware',
- 'shop:laundry',
- 'shop:shoes',
- 'shop:beverages',
- 'shop:dry_cleaning',
- 'shop:carpet',
- 'shop:computer',
- 'shop:alcohol',
- 'shop:optician',
- 'shop:chemist',
- 'shop:gallery',
- 'shop:mobile_phone',
- 'shop:sports',
- 'shop:jewelry',
- 'shop:pet',
- 'shop:beauty',
- 'shop:stationery',
- 'shop:shopping_centre',
- 'shop:general',
- 'shop:electrical',
- 'shop:toys',
- 'shop:jeweller',
- 'shop:betting',
- 'shop:household',
- 'shop:travel_agency',
- 'shop:hifi',
- 'amenity:shop',
- 'tourism:information',
- 'place:house',
- 'place:house_name',
- 'place:house_number',
- 'place:country_code',
- 'leisure:pitch',
- 'highway:unsurfaced',
- 'historic:ruins',
- 'amenity:college',
- 'historic:monument',
- 'railway:subway',
- 'historic:memorial',
- 'leisure:nature_reserve',
- 'leisure:common',
- 'waterway:lock_gate',
- 'natural:fell',
- 'amenity:nightclub',
- 'highway:path',
- 'leisure:garden',
- 'landuse:reservoir',
- 'leisure:playground',
- 'leisure:stadium',
- 'historic:mine',
- 'natural:cliff',
- 'tourism:caravan_site',
- 'amenity:bus_station',
- 'amenity:kindergarten',
- 'highway:construction',
- 'amenity:atm',
- 'amenity:emergency_phone',
- 'waterway:lock',
- 'waterway:riverbank',
- 'natural:coastline',
- 'tourism:viewpoint',
- 'tourism:hostel',
- 'tourism:bed_and_breakfast',
- 'railway:halt',
- 'railway:platform',
- 'railway:tram',
- 'amenity:courthouse',
- 'amenity:recycling',
- 'amenity:dentist',
- 'natural:beach',
- 'place:moor',
- 'amenity:grave_yard',
- 'waterway:drain',
- 'landuse:grass',
- 'landuse:village_green',
- 'natural:bay',
- 'railway:tram_stop',
- 'leisure:marina',
- 'highway:stile',
- 'natural:moor',
- 'railway:light_rail',
- 'railway:narrow_gauge',
- 'natural:land',
- 'amenity:village_hall',
- 'waterway:dock',
- 'amenity:veterinary',
- 'landuse:brownfield',
- 'leisure:track',
- 'railway:historic_station',
- 'landuse:construction',
- 'amenity:prison',
- 'landuse:quarry',
- 'amenity:telephone',
- 'highway:traffic_signals',
- 'natural:heath',
- 'historic:house',
- 'amenity:social_club',
- 'landuse:military',
- 'amenity:health_centre',
- 'historic:building',
- 'amenity:clinic',
- 'highway:services',
- 'amenity:ferry_terminal',
- 'natural:marsh',
- 'natural:hill',
- 'highway:raceway',
- 'amenity:taxi',
- 'amenity:take_away',
- 'amenity:car_rental',
- 'place:islet',
- 'amenity:nursery',
- 'amenity:nursing_home',
- 'amenity:toilets',
- 'amenity:hall',
- 'waterway:boatyard',
- 'highway:mini_roundabout',
- 'historic:manor',
- 'tourism:chalet',
- 'amenity:bicycle_parking',
- 'amenity:hotel',
- 'waterway:weir',
- 'natural:wetland',
- 'natural:cave_entrance',
- 'amenity:crematorium',
- 'tourism:picnic_site',
- 'landuse:wood',
- 'landuse:basin',
- 'natural:tree',
- 'leisure:slipway',
- 'landuse:meadow',
- 'landuse:piste',
- 'amenity:care_home',
- 'amenity:club',
- 'amenity:medical_centre',
- 'historic:roman_road',
- 'historic:fort',
- 'railway:subway_entrance',
- 'historic:yes',
- 'highway:gate',
- 'leisure:fishing',
- 'historic:museum',
- 'amenity:car_wash',
- 'railway:level_crossing',
- 'leisure:bird_hide',
- 'natural:headland',
- 'tourism:apartments',
- 'amenity:shopping',
- 'natural:scrub',
- 'natural:fen',
- 'building:yes',
- 'mountain_pass:yes',
- 'amenity:parking',
- 'highway:bus_stop',
- 'place:postcode',
- 'amenity:post_box',
- 'place:houses',
- 'railway:preserved',
- 'waterway:derelict_canal',
- 'amenity:dead_pub',
- 'railway:disused_station',
- 'railway:abandoned',
- 'railway:disused'
- ));
- }
-
- $sClassPlace = $aPlace['class'].':'.$aPlace['type'];
-
- return $aWithImportance[$sClassPlace] ?? null;
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/DatabaseError.php');
-
-/**
- * Uses PDO to access the database specified in the CONST_Database_DSN
- * setting.
- */
-class DB
-{
- protected $connection;
-
- public function __construct($sDSN = null)
- {
- $this->sDSN = $sDSN ?? getSetting('DATABASE_DSN');
- }
-
- public function connect($bNew = false, $bPersistent = true)
- {
- if (isset($this->connection) && !$bNew) {
- return true;
- }
- $aConnOptions = array(
- \PDO::ATTR_ERRMODE => \PDO::ERRMODE_EXCEPTION,
- \PDO::ATTR_DEFAULT_FETCH_MODE => \PDO::FETCH_ASSOC,
- \PDO::ATTR_PERSISTENT => $bPersistent
- );
-
- // https://secure.php.net/manual/en/ref.pdo-pgsql.connection.php
- try {
- $conn = new \PDO($this->sDSN, null, null, $aConnOptions);
- } catch (\PDOException $e) {
- $sMsg = 'Failed to establish database connection:' . $e->getMessage();
- throw new \Nominatim\DatabaseError($sMsg, 500, null, $e->getMessage());
- }
-
- $conn->exec("SET DateStyle TO 'sql,european'");
- $conn->exec("SET client_encoding TO 'utf-8'");
- // Disable JIT and parallel workers. They interfere badly with search SQL.
- $conn->exec("UPDATE pg_settings SET setting = -1 WHERE name = 'jit_above_cost'");
- $conn->exec("UPDATE pg_settings SET setting = 0 WHERE name = 'max_parallel_workers_per_gather'");
- $iMaxExecution = ini_get('max_execution_time');
- if ($iMaxExecution > 0) {
- $conn->setAttribute(\PDO::ATTR_TIMEOUT, $iMaxExecution); // seconds
- }
-
- $this->connection = $conn;
- return true;
- }
-
- // returns the number of rows that were modified or deleted by the SQL
- // statement. If no rows were affected returns 0.
- public function exec($sSQL, $aInputVars = null, $sErrMessage = 'Database query failed')
- {
- $val = null;
- try {
- if (isset($aInputVars)) {
- $stmt = $this->connection->prepare($sSQL);
- $stmt->execute($aInputVars);
- } else {
- $val = $this->connection->exec($sSQL);
- }
- } catch (\PDOException $e) {
- throw new \Nominatim\DatabaseError($sErrMessage, 500, null, $e, $sSQL);
- }
- return $val;
- }
-
- /**
- * Executes query. Returns first row as array.
- * Returns false if no result found.
- *
- * @param string $sSQL
- *
- * @return array[]
- */
- public function getRow($sSQL, $aInputVars = null, $sErrMessage = 'Database query failed')
- {
- try {
- $stmt = $this->getQueryStatement($sSQL, $aInputVars, $sErrMessage);
- $row = $stmt->fetch();
- } catch (\PDOException $e) {
- throw new \Nominatim\DatabaseError($sErrMessage, 500, null, $e, $sSQL);
- }
- return $row;
- }
-
- /**
- * Executes query. Returns first value of first result.
- * Returns false if no results found.
- *
- * @param string $sSQL
- *
- * @return array[]
- */
- public function getOne($sSQL, $aInputVars = null, $sErrMessage = 'Database query failed')
- {
- try {
- $stmt = $this->getQueryStatement($sSQL, $aInputVars, $sErrMessage);
- $row = $stmt->fetch(\PDO::FETCH_NUM);
- if ($row === false) {
- return false;
- }
- } catch (\PDOException $e) {
- throw new \Nominatim\DatabaseError($sErrMessage, 500, null, $e, $sSQL);
- }
- return $row[0];
- }
-
- /**
- * Executes query. Returns array of results (arrays).
- * Returns empty array if no results found.
- *
- * @param string $sSQL
- *
- * @return array[]
- */
- public function getAll($sSQL, $aInputVars = null, $sErrMessage = 'Database query failed')
- {
- try {
- $stmt = $this->getQueryStatement($sSQL, $aInputVars, $sErrMessage);
- $rows = $stmt->fetchAll();
- } catch (\PDOException $e) {
- throw new \Nominatim\DatabaseError($sErrMessage, 500, null, $e, $sSQL);
- }
- return $rows;
- }
-
- /**
- * Executes query. Returns array of the first value of each result.
- * Returns empty array if no results found.
- *
- * @param string $sSQL
- *
- * @return array[]
- */
- public function getCol($sSQL, $aInputVars = null, $sErrMessage = 'Database query failed')
- {
- $aVals = array();
- try {
- $stmt = $this->getQueryStatement($sSQL, $aInputVars, $sErrMessage);
-
- while (($val = $stmt->fetchColumn(0)) !== false) { // returns first column or false
- $aVals[] = $val;
- }
- } catch (\PDOException $e) {
- throw new \Nominatim\DatabaseError($sErrMessage, 500, null, $e, $sSQL);
- }
- return $aVals;
- }
-
- /**
- * Executes query. Returns associate array mapping first value to second value of each result.
- * Returns empty array if no results found.
- *
- * @param string $sSQL
- *
- * @return array[]
- */
- public function getAssoc($sSQL, $aInputVars = null, $sErrMessage = 'Database query failed')
- {
- try {
- $stmt = $this->getQueryStatement($sSQL, $aInputVars, $sErrMessage);
-
- $aList = array();
- while ($aRow = $stmt->fetch(\PDO::FETCH_NUM)) {
- $aList[$aRow[0]] = $aRow[1];
- }
- } catch (\PDOException $e) {
- throw new \Nominatim\DatabaseError($sErrMessage, 500, null, $e, $sSQL);
- }
- return $aList;
- }
-
- /**
- * Executes query. Returns a PDO statement to iterate over.
- *
- * @param string $sSQL
- *
- * @return PDOStatement
- */
- public function getQueryStatement($sSQL, $aInputVars = null, $sErrMessage = 'Database query failed')
- {
- try {
- if (isset($aInputVars)) {
- $stmt = $this->connection->prepare($sSQL);
- $stmt->execute($aInputVars);
- } else {
- $stmt = $this->connection->query($sSQL);
- }
- } catch (\PDOException $e) {
- throw new \Nominatim\DatabaseError($sErrMessage, 500, null, $e, $sSQL);
- }
- return $stmt;
- }
-
- /**
- * St. John's Way => 'St. John\'s Way'
- *
- * @param string $sVal Text to be quoted.
- *
- * @return string
- */
- public function getDBQuoted($sVal)
- {
- return $this->connection->quote($sVal);
- }
-
- /**
- * Like getDBQuoted, but takes an array.
- *
- * @param array $aVals List of text to be quoted.
- *
- * @return array[]
- */
- public function getDBQuotedList($aVals)
- {
- return array_map(function ($sVal) {
- return $this->getDBQuoted($sVal);
- }, $aVals);
- }
-
- /**
- * [1,2,'b'] => 'ARRAY[1,2,'b']''
- *
- * @param array $aVals List of text to be quoted.
- *
- * @return string
- */
- public function getArraySQL($a)
- {
- return 'ARRAY['.join(',', $a).']';
- }
-
- /**
- * Check if a table exists in the database. Returns true if it does.
- *
- * @param string $sTableName
- *
- * @return boolean
- */
- public function tableExists($sTableName)
- {
- $sSQL = 'SELECT count(*) FROM pg_tables WHERE tablename = :tablename';
- return ($this->getOne($sSQL, array(':tablename' => $sTableName)) == 1);
- }
-
- /**
- * Deletes a table. Returns true if deleted or didn't exist.
- *
- * @param string $sTableName
- *
- * @return boolean
- */
- public function deleteTable($sTableName)
- {
- return $this->exec('DROP TABLE IF EXISTS '.$sTableName.' CASCADE') == 0;
- }
-
- /**
- * Tries to connect to the database but on failure doesn't throw an exception.
- *
- * @return boolean
- */
- public function checkConnection()
- {
- $bExists = true;
- try {
- $this->connect(true);
- } catch (\Nominatim\DatabaseError $e) {
- $bExists = false;
- }
- return $bExists;
- }
-
- /**
- * e.g. 9.6, 10, 11.2
- *
- * @return float
- */
- public function getPostgresVersion()
- {
- $sVersionString = $this->getOne('SHOW server_version_num');
- preg_match('#([0-9]?[0-9])([0-9][0-9])[0-9][0-9]#', $sVersionString, $aMatches);
- return (float) ($aMatches[1].'.'.$aMatches[2]);
- }
-
- /**
- * e.g. 2, 2.2
- *
- * @return float
- */
- public function getPostgisVersion()
- {
- $sVersionString = $this->getOne('select postgis_lib_version()');
- preg_match('#^([0-9]+)[.]([0-9]+)[.]#', $sVersionString, $aMatches);
- return (float) ($aMatches[1].'.'.$aMatches[2]);
- }
-
- /**
- * Returns an associate array of postgresql database connection settings. Keys can
- * be 'database', 'hostspec', 'port', 'username', 'password'.
- * Returns empty array on failure, thus check if at least 'database' is set.
- *
- * @return array[]
- */
- public static function parseDSN($sDSN)
- {
- // https://secure.php.net/manual/en/ref.pdo-pgsql.connection.php
- $aInfo = array();
- if (preg_match('/^pgsql:(.+)$/', $sDSN, $aMatches)) {
- foreach (explode(';', $aMatches[1]) as $sKeyVal) {
- list($sKey, $sVal) = explode('=', $sKeyVal, 2);
- if ($sKey == 'host') {
- $sKey = 'hostspec';
- } elseif ($sKey == 'dbname') {
- $sKey = 'database';
- } elseif ($sKey == 'user') {
- $sKey = 'username';
- }
- $aInfo[$sKey] = $sVal;
- }
- }
- return $aInfo;
- }
-
- /**
- * Takes an array of settings and return the DNS string. Key names can be
- * 'database', 'hostspec', 'port', 'username', 'password' but aliases
- * 'dbname', 'host' and 'user' are also supported.
- *
- * @return string
- *
- */
- public static function generateDSN($aInfo)
- {
- $sDSN = sprintf(
- 'pgsql:host=%s;port=%s;dbname=%s;user=%s;password=%s;',
- $aInfo['host'] ?? $aInfo['hostspec'] ?? '',
- $aInfo['port'] ?? '',
- $aInfo['dbname'] ?? $aInfo['database'] ?? '',
- $aInfo['user'] ?? '',
- $aInfo['password'] ?? ''
- );
- $sDSN = preg_replace('/\b\w+=;/', '', $sDSN);
- $sDSN = preg_replace('/;\Z/', '', $sDSN);
-
- return $sDSN;
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-class DatabaseError extends \Exception
-{
-
- public function __construct($message, $code, $previous, $oPDOErr, $sSql = null)
- {
- parent::__construct($message, $code, $previous);
- // https://secure.php.net/manual/en/class.pdoexception.php
- $this->oPDOErr = $oPDOErr;
- $this->sSql = $sSql;
- }
-
- public function __toString()
- {
- return __CLASS__ . ": [{$this->code}]: {$this->message}\n";
- }
-
- public function getSqlError()
- {
- return $this->oPDOErr->getMessage();
- }
-
- public function getSqlDebugDump()
- {
- if (CONST_Debug) {
- return var_export($this->oPDOErr, true);
- } else {
- return $this->sSql;
- }
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-class Debug
-{
- public static function newFunction($sHeading)
- {
- echo "<pre><h2>Debug output for $sHeading</h2></pre>\n";
- }
-
- public static function newSection($sHeading)
- {
- echo "<hr><pre><h3>$sHeading</h3></pre>\n";
- }
-
- public static function printVar($sHeading, $mVar)
- {
- echo '<pre><b>'.$sHeading. ':</b> ';
- Debug::outputVar($mVar, str_repeat(' ', strlen($sHeading) + 3));
- echo "</pre>\n";
- }
-
- public static function fmtArrayVals($aArr)
- {
- return array('__debug_format' => 'array_vals', 'data' => $aArr);
- }
-
- public static function printDebugArray($sHeading, $oVar)
- {
-
- if ($oVar === null) {
- Debug::printVar($sHeading, 'null');
- } else {
- Debug::printVar($sHeading, $oVar->debugInfo());
- }
- }
-
- public static function printDebugTable($sHeading, $aVar)
- {
- echo '<b>'.$sHeading.":</b>\n";
- echo "<table border='1'>\n";
- if (!empty($aVar)) {
- echo " <tr>\n";
- $aKeys = array();
- $aInfo = reset($aVar);
- if (!is_array($aInfo)) {
- $aInfo = $aInfo->debugInfo();
- }
- foreach ($aInfo as $sKey => $mVal) {
- echo ' <th><small>'.$sKey.'</small></th>'."\n";
- $aKeys[] = $sKey;
- }
- echo " </tr>\n";
- foreach ($aVar as $oRow) {
- $aInfo = $oRow;
- if (!is_array($oRow)) {
- $aInfo = $oRow->debugInfo();
- }
- echo " <tr>\n";
- foreach ($aKeys as $sKey) {
- echo ' <td><pre>';
- if (isset($aInfo[$sKey])) {
- Debug::outputVar($aInfo[$sKey], '');
- }
- echo '</pre></td>'."\n";
- }
- echo " </tr>\n";
- }
- }
- echo "</table>\n";
- }
-
- public static function printGroupedSearch($aSearches, $aWordsIDs)
- {
- echo '<table border="1">';
- echo '<tr><th>rank</th><th>Name Tokens</th><th>Name Not</th>';
- echo '<th>Address Tokens</th><th>Address Not</th>';
- echo '<th>country</th><th>operator</th>';
- echo '<th>class</th><th>type</th><th>postcode</th><th>housenumber</th></tr>';
- foreach ($aSearches as $aRankedSet) {
- foreach ($aRankedSet as $aRow) {
- $aRow->dumpAsHtmlTableRow($aWordsIDs);
- }
- }
- echo '</table>';
- }
-
- public static function printGroupTable($sHeading, $aVar)
- {
- echo '<b>'.$sHeading.":</b>\n";
- echo "<table border='1'>\n";
- if (!empty($aVar)) {
- echo " <tr>\n";
- echo ' <th><small>Group</small></th>'."\n";
- $aKeys = array();
- $aInfo = reset($aVar)[0];
- if (!is_array($aInfo)) {
- $aInfo = $aInfo->debugInfo();
- }
- foreach ($aInfo as $sKey => $mVal) {
- echo ' <th><small>'.$sKey.'</small></th>'."\n";
- $aKeys[] = $sKey;
- }
- echo " </tr>\n";
- foreach ($aVar as $sGrpKey => $aGroup) {
- foreach ($aGroup as $oRow) {
- $aInfo = $oRow;
- if (!is_array($oRow)) {
- $aInfo = $oRow->debugInfo();
- }
- echo " <tr>\n";
- echo ' <td><pre>'.$sGrpKey.'</pre></td>'."\n";
- foreach ($aKeys as $sKey) {
- echo ' <td><pre>';
- if (!empty($aInfo[$sKey])) {
- Debug::outputVar($aInfo[$sKey], '');
- }
- echo '</pre></td>'."\n";
- }
- echo " </tr>\n";
- }
- }
- }
- echo "</table>\n";
- }
-
- public static function printSQL($sSQL)
- {
- echo '<p><tt><font color="#aaa">'.$sSQL.'</font></tt></p>'."\n";
- }
-
- private static function outputVar($mVar, $sPreNL)
- {
- if (is_array($mVar) && !isset($mVar['__debug_format'])) {
- $sPre = '';
- foreach ($mVar as $mKey => $aValue) {
- echo $sPre;
- $iKeyLen = Debug::outputSimpleVar($mKey);
- echo ' => ';
- Debug::outputVar(
- $aValue,
- $sPreNL.str_repeat(' ', $iKeyLen + 4)
- );
- $sPre = "\n".$sPreNL;
- }
- } elseif (is_array($mVar) && isset($mVar['__debug_format'])) {
- if (!empty($mVar['data'])) {
- $sPre = '';
- foreach ($mVar['data'] as $mValue) {
- echo $sPre;
- Debug::outputSimpleVar($mValue);
- $sPre = ', ';
- }
- }
- } elseif (is_object($mVar) && method_exists($mVar, 'debugInfo')) {
- Debug::outputVar($mVar->debugInfo(), $sPreNL);
- } elseif (is_a($mVar, 'stdClass')) {
- Debug::outputVar(json_decode(json_encode($mVar), true), $sPreNL);
- } else {
- Debug::outputSimpleVar($mVar);
- }
- }
-
- private static function outputSimpleVar($mVar)
- {
- if (is_bool($mVar)) {
- echo '<i>'.($mVar ? 'True' : 'False').'</i>';
- return $mVar ? 4 : 5;
- }
-
- if (is_string($mVar)) {
- echo "'$mVar'";
- return strlen($mVar) + 2;
- }
-
- echo (string)$mVar;
- return strlen((string)$mVar);
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-class Debug
-{
- public static function __callStatic($name, $arguments)
- {
- // nothing
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/PlaceLookup.php');
-require_once(CONST_LibDir.'/Phrase.php');
-require_once(CONST_LibDir.'/ReverseGeocode.php');
-require_once(CONST_LibDir.'/SearchDescription.php');
-require_once(CONST_LibDir.'/SearchContext.php');
-require_once(CONST_LibDir.'/SearchPosition.php');
-require_once(CONST_LibDir.'/TokenList.php');
-require_once(CONST_TokenizerDir.'/tokenizer.php');
-
-class Geocode
-{
- protected $oDB;
-
- protected $oPlaceLookup;
- protected $oTokenizer;
-
- protected $aLangPrefOrder = array();
-
- protected $aExcludePlaceIDs = array();
-
- protected $iLimit = 20;
- protected $iFinalLimit = 10;
- protected $iOffset = 0;
- protected $bFallback = false;
-
- protected $aCountryCodes = false;
-
- protected $bBoundedSearch = false;
- protected $aViewBox = false;
- protected $aRoutePoints = false;
- protected $aRouteWidth = false;
-
- protected $iMaxRank = 20;
- protected $iMinAddressRank = 0;
- protected $iMaxAddressRank = 30;
- protected $aAddressRankList = array();
-
- protected $sAllowedTypesSQLList = false;
-
- protected $sQuery = false;
- protected $aStructuredQuery = false;
-
-
- public function __construct(&$oDB)
- {
- $this->oDB =& $oDB;
- $this->oPlaceLookup = new PlaceLookup($this->oDB);
- $this->oTokenizer = new \Nominatim\Tokenizer($this->oDB);
- }
-
- public function setLanguagePreference($aLangPref)
- {
- $this->aLangPrefOrder = $aLangPref;
- }
-
- public function getMoreUrlParams()
- {
- if ($this->aStructuredQuery) {
- $aParams = $this->aStructuredQuery;
- } else {
- $aParams = array('q' => $this->sQuery);
- }
-
- $aParams = array_merge($aParams, $this->oPlaceLookup->getMoreUrlParams());
-
- if ($this->aExcludePlaceIDs) {
- $aParams['exclude_place_ids'] = implode(',', $this->aExcludePlaceIDs);
- }
-
- if ($this->bBoundedSearch) {
- $aParams['bounded'] = '1';
- }
-
- if ($this->aCountryCodes) {
- $aParams['countrycodes'] = implode(',', $this->aCountryCodes);
- }
-
- if ($this->aViewBox) {
- $aParams['viewbox'] = join(',', $this->aViewBox);
- }
-
- return $aParams;
- }
-
- public function setLimit($iLimit = 10)
- {
- if ($iLimit > 50) {
- $iLimit = 50;
- } elseif ($iLimit < 1) {
- $iLimit = 1;
- }
-
- $this->iFinalLimit = $iLimit;
- $this->iLimit = $iLimit + max($iLimit, 10);
- }
-
- public function setFeatureType($sFeatureType)
- {
- switch ($sFeatureType) {
- case 'country':
- $this->setRankRange(4, 4);
- break;
- case 'state':
- $this->setRankRange(8, 8);
- break;
- case 'city':
- $this->setRankRange(14, 16);
- break;
- case 'settlement':
- $this->setRankRange(8, 20);
- break;
- }
- }
-
- public function setRankRange($iMin, $iMax)
- {
- $this->iMinAddressRank = $iMin;
- $this->iMaxAddressRank = $iMax;
- }
-
- public function setViewbox($aViewbox)
- {
- $aBox = array_map('floatval', $aViewbox);
-
- $this->aViewBox[0] = max(-180.0, min($aBox[0], $aBox[2]));
- $this->aViewBox[1] = max(-90.0, min($aBox[1], $aBox[3]));
- $this->aViewBox[2] = min(180.0, max($aBox[0], $aBox[2]));
- $this->aViewBox[3] = min(90.0, max($aBox[1], $aBox[3]));
-
- if ($this->aViewBox[2] - $this->aViewBox[0] < 0.000000001
- || $this->aViewBox[3] - $this->aViewBox[1] < 0.000000001
- ) {
- userError("Bad parameter 'viewbox'. Not a box.");
- }
- }
-
- private function viewboxImportanceFactor($fX, $fY)
- {
- if (!$this->aViewBox) {
- return 1;
- }
-
- $fWidth = ($this->aViewBox[2] - $this->aViewBox[0])/2;
- $fHeight = ($this->aViewBox[3] - $this->aViewBox[1])/2;
-
- $fXDist = abs($fX - ($this->aViewBox[0] + $this->aViewBox[2])/2);
- $fYDist = abs($fY - ($this->aViewBox[1] + $this->aViewBox[3])/2);
-
- if ($fXDist <= $fWidth && $fYDist <= $fHeight) {
- return 1;
- }
-
- if ($fXDist <= $fWidth * 3 && $fYDist <= 3 * $fHeight) {
- return 0.5;
- }
-
- return 0.25;
- }
-
- public function setQuery($sQueryString)
- {
- $this->sQuery = $sQueryString;
- $this->aStructuredQuery = false;
- }
-
- public function getQueryString()
- {
- return $this->sQuery;
- }
-
-
- public function loadParamArray($oParams, $sForceGeometryType = null)
- {
- $this->bBoundedSearch = $oParams->getBool('bounded', $this->bBoundedSearch);
-
- $this->setLimit($oParams->getInt('limit', $this->iFinalLimit));
- $this->iOffset = $oParams->getInt('offset', $this->iOffset);
-
- $this->bFallback = $oParams->getBool('fallback', $this->bFallback);
-
- // List of excluded Place IDs - used for more accurate pageing
- $sExcluded = $oParams->getStringList('exclude_place_ids');
- if ($sExcluded) {
- foreach ($sExcluded as $iExcludedPlaceID) {
- $iExcludedPlaceID = (int)$iExcludedPlaceID;
- if ($iExcludedPlaceID) {
- $aExcludePlaceIDs[$iExcludedPlaceID] = $iExcludedPlaceID;
- }
- }
-
- if (isset($aExcludePlaceIDs)) {
- $this->aExcludePlaceIDs = $aExcludePlaceIDs;
- }
- }
-
- // Only certain ranks of feature
- $sFeatureType = $oParams->getString('featureType');
- if (!$sFeatureType) {
- $sFeatureType = $oParams->getString('featuretype');
- }
- if ($sFeatureType) {
- $this->setFeatureType($sFeatureType);
- }
-
- // Country code list
- $sCountries = $oParams->getStringList('countrycodes');
- if ($sCountries) {
- foreach ($sCountries as $sCountryCode) {
- if (preg_match('/^[a-zA-Z][a-zA-Z]$/', $sCountryCode)) {
- $aCountries[] = strtolower($sCountryCode);
- }
- }
- if (isset($aCountries)) {
- $this->aCountryCodes = $aCountries;
- }
- }
-
- $aViewbox = $oParams->getStringList('viewboxlbrt');
- if ($aViewbox) {
- if (count($aViewbox) != 4) {
- userError("Bad parameter 'viewboxlbrt'. Expected 4 coordinates.");
- }
- $this->setViewbox($aViewbox);
- } else {
- $aViewbox = $oParams->getStringList('viewbox');
- if ($aViewbox) {
- if (count($aViewbox) != 4) {
- userError("Bad parameter 'viewbox'. Expected 4 coordinates.");
- }
- $this->setViewBox($aViewbox);
- } else {
- $aRoute = $oParams->getStringList('route');
- $fRouteWidth = $oParams->getFloat('routewidth');
- if ($aRoute && $fRouteWidth) {
- $this->aRoutePoints = $aRoute;
- $this->aRouteWidth = $fRouteWidth;
- }
- }
- }
-
- $this->oPlaceLookup->loadParamArray($oParams, $sForceGeometryType);
- $this->oPlaceLookup->setIncludeAddressDetails($oParams->getBool('addressdetails', false));
- }
-
- public function setQueryFromParams($oParams)
- {
- // Search query
- $sQuery = $oParams->getString('q');
- if (!$sQuery) {
- $this->setStructuredQuery(
- $oParams->getString('amenity'),
- $oParams->getString('street'),
- $oParams->getString('city'),
- $oParams->getString('county'),
- $oParams->getString('state'),
- $oParams->getString('country'),
- $oParams->getString('postalcode')
- );
- } else {
- $this->setQuery($sQuery);
- }
- }
-
- public function loadStructuredAddressElement($sValue, $sKey, $iNewMinAddressRank, $iNewMaxAddressRank, $aItemListValues)
- {
- $sValue = trim($sValue);
- if (!$sValue) {
- return false;
- }
- $this->aStructuredQuery[$sKey] = $sValue;
- if ($this->iMinAddressRank == 0 && $this->iMaxAddressRank == 30) {
- $this->iMinAddressRank = $iNewMinAddressRank;
- $this->iMaxAddressRank = $iNewMaxAddressRank;
- }
- if ($aItemListValues) {
- $this->aAddressRankList = array_merge($this->aAddressRankList, $aItemListValues);
- }
- return true;
- }
-
- public function setStructuredQuery($sAmenity = false, $sStreet = false, $sCity = false, $sCounty = false, $sState = false, $sCountry = false, $sPostalCode = false)
- {
- $this->sQuery = false;
-
- // Reset
- $this->iMinAddressRank = 0;
- $this->iMaxAddressRank = 30;
- $this->aAddressRankList = array();
-
- $this->aStructuredQuery = array();
- $this->sAllowedTypesSQLList = false;
-
- $this->loadStructuredAddressElement($sAmenity, 'amenity', 26, 30, false);
- $this->loadStructuredAddressElement($sStreet, 'street', 26, 30, false);
- $this->loadStructuredAddressElement($sCity, 'city', 14, 24, false);
- $this->loadStructuredAddressElement($sCounty, 'county', 9, 13, false);
- $this->loadStructuredAddressElement($sState, 'state', 8, 8, false);
- $this->loadStructuredAddressElement($sPostalCode, 'postalcode', 5, 11, array(5, 11));
- $this->loadStructuredAddressElement($sCountry, 'country', 4, 4, false);
-
- if (!empty($this->aStructuredQuery)) {
- $this->sQuery = join(', ', $this->aStructuredQuery);
- if ($this->iMaxAddressRank < 30) {
- $this->sAllowedTypesSQLList = '(\'place\',\'boundary\')';
- }
- }
- }
-
- public function fallbackStructuredQuery()
- {
- $aParams = $this->aStructuredQuery;
-
- if (!$aParams || count($aParams) == 1) {
- return false;
- }
-
- $aOrderToFallback = array('postalcode', 'street', 'city', 'county', 'state');
-
- foreach ($aOrderToFallback as $sType) {
- if (isset($aParams[$sType])) {
- unset($aParams[$sType]);
- $this->setStructuredQuery(@$aParams['amenity'], @$aParams['street'], @$aParams['city'], @$aParams['county'], @$aParams['state'], @$aParams['country'], @$aParams['postalcode']);
- return true;
- }
- }
-
- return false;
- }
-
- public function getGroupedSearches($aSearches, $aPhrases, $oValidTokens)
- {
- /*
- Calculate all searches using oValidTokens i.e.
- 'Wodsworth Road, Sheffield' =>
-
- Phrase Wordset
- 0 0 (wodsworth road)
- 0 1 (wodsworth)(road)
- 1 0 (sheffield)
-
- Score how good the search is so they can be ordered
- */
- foreach ($aPhrases as $iPhrase => $oPhrase) {
- $aNewPhraseSearches = array();
- $oPosition = new SearchPosition(
- $oPhrase->getPhraseType(),
- $iPhrase,
- count($aPhrases)
- );
-
- foreach ($oPhrase->getWordSets() as $aWordset) {
- $aWordsetSearches = $aSearches;
-
- // Add all words from this wordset
- foreach ($aWordset as $iToken => $sToken) {
- $aNewWordsetSearches = array();
- $oPosition->setTokenPosition($iToken, count($aWordset));
-
- foreach ($aWordsetSearches as $oCurrentSearch) {
- foreach ($oValidTokens->get($sToken) as $oSearchTerm) {
- if ($oSearchTerm->isExtendable($oCurrentSearch, $oPosition)) {
- $aNewSearches = $oSearchTerm->extendSearch(
- $oCurrentSearch,
- $oPosition
- );
-
- foreach ($aNewSearches as $oSearch) {
- if ($oSearch->getRank() < $this->iMaxRank) {
- $aNewWordsetSearches[] = $oSearch;
- }
- }
- }
- }
- }
- // Sort and cut
- usort($aNewWordsetSearches, array('Nominatim\SearchDescription', 'bySearchRank'));
- $aWordsetSearches = array_slice($aNewWordsetSearches, 0, 50);
- }
-
- $aNewPhraseSearches = array_merge($aNewPhraseSearches, $aNewWordsetSearches);
- usort($aNewPhraseSearches, array('Nominatim\SearchDescription', 'bySearchRank'));
-
- $aSearchHash = array();
- foreach ($aNewPhraseSearches as $iSearch => $aSearch) {
- $sHash = serialize($aSearch);
- if (isset($aSearchHash[$sHash])) {
- unset($aNewPhraseSearches[$iSearch]);
- } else {
- $aSearchHash[$sHash] = 1;
- }
- }
-
- $aNewPhraseSearches = array_slice($aNewPhraseSearches, 0, 50);
- }
-
- // Re-group the searches by their score, junk anything over 20 as just not worth trying
- $aGroupedSearches = array();
- foreach ($aNewPhraseSearches as $aSearch) {
- $iRank = $aSearch->getRank();
- if ($iRank < $this->iMaxRank) {
- if (!isset($aGroupedSearches[$iRank])) {
- $aGroupedSearches[$iRank] = array();
- }
- $aGroupedSearches[$iRank][] = $aSearch;
- }
- }
- ksort($aGroupedSearches);
-
- $iSearchCount = 0;
- $aSearches = array();
- foreach ($aGroupedSearches as $aNewSearches) {
- $iSearchCount += count($aNewSearches);
- $aSearches = array_merge($aSearches, $aNewSearches);
- if ($iSearchCount > 50) {
- break;
- }
- }
- }
-
- // Revisit searches, drop bad searches and give penalty to unlikely combinations.
- $aGroupedSearches = array();
- foreach ($aSearches as $oSearch) {
- if (!$oSearch->isValidSearch()) {
- continue;
- }
-
- $iRank = $oSearch->getRank();
- if (!isset($aGroupedSearches[$iRank])) {
- $aGroupedSearches[$iRank] = array();
- }
- $aGroupedSearches[$iRank][] = $oSearch;
- }
- ksort($aGroupedSearches);
-
- return $aGroupedSearches;
- }
-
- /* Perform the actual query lookup.
-
- Returns an ordered list of results, each with the following fields:
- osm_type: type of corresponding OSM object
- N - node
- W - way
- R - relation
- P - postcode (internally computed)
- osm_id: id of corresponding OSM object
- class: general object class (corresponds to tag key of primary OSM tag)
- type: subclass of object (corresponds to tag value of primary OSM tag)
- admin_level: see https://wiki.openstreetmap.org/wiki/Admin_level
- rank_search: rank in search hierarchy
- (see also https://wiki.openstreetmap.org/wiki/Nominatim/Development_overview#Country_to_street_level)
- rank_address: rank in address hierarchy (determines orer in address)
- place_id: internal key (may differ between different instances)
- country_code: ISO country code
- langaddress: localized full address
- placename: localized name of object
- ref: content of ref tag (if available)
- lon: longitude
- lat: latitude
- importance: importance of place based on Wikipedia link count
- addressimportance: cumulated importance of address elements
- extra_place: type of place (for admin boundaries, if there is a place tag)
- aBoundingBox: bounding Box
- label: short description of the object class/type (English only)
- name: full name (currently the same as langaddress)
- foundorder: secondary ordering for places with same importance
- */
-
-
- public function lookup()
- {
- Debug::newFunction('Geocode::lookup');
- if (!$this->sQuery && !$this->aStructuredQuery) {
- return array();
- }
-
- Debug::printDebugArray('Geocode', $this);
-
- $oCtx = new SearchContext();
-
- if ($this->aRoutePoints) {
- $oCtx->setViewboxFromRoute(
- $this->oDB,
- $this->aRoutePoints,
- $this->aRouteWidth,
- $this->bBoundedSearch
- );
- } elseif ($this->aViewBox) {
- $oCtx->setViewboxFromBox($this->aViewBox, $this->bBoundedSearch);
- }
- if ($this->aExcludePlaceIDs) {
- $oCtx->setExcludeList($this->aExcludePlaceIDs);
- }
- if ($this->aCountryCodes) {
- $oCtx->setCountryList($this->aCountryCodes);
- }
-
- Debug::newSection('Query Preprocessing');
-
- $sQuery = $this->sQuery;
- if (!preg_match('//u', $sQuery)) {
- userError('Query string is not UTF-8 encoded.');
- }
-
- // Do we have anything that looks like a lat/lon pair?
- $sQuery = $oCtx->setNearPointFromQuery($sQuery);
-
- if ($sQuery || $this->aStructuredQuery) {
- // Start with a single blank search
- $aSearches = array(new SearchDescription($oCtx));
-
- if ($sQuery) {
- $sQuery = $aSearches[0]->extractKeyValuePairs($sQuery);
- }
-
- $sSpecialTerm = '';
- if ($sQuery) {
- preg_match_all(
- '/\\[([\\w ]*)\\]/u',
- $sQuery,
- $aSpecialTermsRaw,
- PREG_SET_ORDER
- );
- if (!empty($aSpecialTermsRaw)) {
- Debug::printVar('Special terms', $aSpecialTermsRaw);
- }
-
- foreach ($aSpecialTermsRaw as $aSpecialTerm) {
- $sQuery = str_replace($aSpecialTerm[0], ' ', $sQuery);
- if (!$sSpecialTerm) {
- $sSpecialTerm = $aSpecialTerm[1];
- }
- }
- }
- if (!$sSpecialTerm && $this->aStructuredQuery
- && isset($this->aStructuredQuery['amenity'])) {
- $sSpecialTerm = $this->aStructuredQuery['amenity'];
- unset($this->aStructuredQuery['amenity']);
- }
-
- if ($sSpecialTerm && !$aSearches[0]->hasOperator()) {
- $aTokens = $this->oTokenizer->tokensForSpecialTerm($sSpecialTerm);
-
- if (!empty($aTokens)) {
- $aNewSearches = array();
- $oPosition = new SearchPosition('', 0, 1);
- $oPosition->setTokenPosition(0, 1);
-
- foreach ($aSearches as $oSearch) {
- foreach ($aTokens as $oToken) {
- $aNewSearches = array_merge(
- $aNewSearches,
- $oToken->extendSearch($oSearch, $oPosition)
- );
- }
- }
- $aSearches = $aNewSearches;
- }
- }
-
- // Split query into phrases
- // Commas are used to reduce the search space by indicating where phrases split
- $aPhrases = array();
- if ($this->aStructuredQuery) {
- foreach ($this->aStructuredQuery as $iPhrase => $sPhrase) {
- $aPhrases[] = new Phrase($sPhrase, $iPhrase);
- }
- } else {
- foreach (explode(',', $sQuery) as $sPhrase) {
- $aPhrases[] = new Phrase($sPhrase, '');
- }
- }
-
- Debug::printDebugArray('Search context', $oCtx);
- Debug::printDebugArray('Base search', empty($aSearches) ? null : $aSearches[0]);
-
- Debug::newSection('Tokenization');
- $oValidTokens = $this->oTokenizer->extractTokensFromPhrases($aPhrases);
-
- if ($oValidTokens->count() > 0) {
- $oCtx->setFullNameWords($oValidTokens->getFullWordIDs());
-
- $aPhrases = array_filter($aPhrases, function ($oPhrase) {
- return $oPhrase->getWordSets() !== null;
- });
-
- // Any words that have failed completely?
- // TODO: suggestions
-
- Debug::printGroupTable('Valid Tokens', $oValidTokens->debugInfo());
- Debug::printDebugTable('Phrases', $aPhrases);
-
- Debug::newSection('Search candidates');
-
- $aGroupedSearches = $this->getGroupedSearches($aSearches, $aPhrases, $oValidTokens);
-
- if (!$this->aStructuredQuery) {
- // Reverse phrase array and also reverse the order of the wordsets in
- // the first and final phrase. Don't bother about phrases in the middle
- // because order in the address doesn't matter.
- $aPhrases = array_reverse($aPhrases);
- $aPhrases[0]->invertWordSets();
- if (count($aPhrases) > 1) {
- $aPhrases[count($aPhrases)-1]->invertWordSets();
- }
- $aReverseGroupedSearches = $this->getGroupedSearches($aSearches, $aPhrases, $oValidTokens);
-
- foreach ($aReverseGroupedSearches as $aSearches) {
- foreach ($aSearches as $aSearch) {
- if (!isset($aGroupedSearches[$aSearch->getRank()])) {
- $aGroupedSearches[$aSearch->getRank()] = array();
- }
- $aGroupedSearches[$aSearch->getRank()][] = $aSearch;
- }
- }
-
- ksort($aGroupedSearches);
- }
- } else {
- // Re-group the searches by their score, junk anything over 20 as just not worth trying
- $aGroupedSearches = array();
- foreach ($aSearches as $aSearch) {
- if ($aSearch->getRank() < $this->iMaxRank) {
- if (!isset($aGroupedSearches[$aSearch->getRank()])) {
- $aGroupedSearches[$aSearch->getRank()] = array();
- }
- $aGroupedSearches[$aSearch->getRank()][] = $aSearch;
- }
- }
- ksort($aGroupedSearches);
- }
-
- // Filter out duplicate searches
- $aSearchHash = array();
- foreach ($aGroupedSearches as $iGroup => $aSearches) {
- foreach ($aSearches as $iSearch => $aSearch) {
- $sHash = serialize($aSearch);
- if (isset($aSearchHash[$sHash])) {
- unset($aGroupedSearches[$iGroup][$iSearch]);
- if (empty($aGroupedSearches[$iGroup])) {
- unset($aGroupedSearches[$iGroup]);
- }
- } else {
- $aSearchHash[$sHash] = 1;
- }
- }
- }
-
- Debug::printGroupedSearch(
- $aGroupedSearches,
- $oValidTokens->debugTokenByWordIdList()
- );
-
- // Start the search process
- $iGroupLoop = 0;
- $iQueryLoop = 0;
- $aNextResults = array();
- foreach ($aGroupedSearches as $iGroupedRank => $aSearches) {
- $iGroupLoop++;
- $aResults = $aNextResults;
- foreach ($aSearches as $oSearch) {
- $iQueryLoop++;
-
- Debug::newSection("Search Loop, group $iGroupLoop, loop $iQueryLoop");
- Debug::printGroupedSearch(
- array($iGroupedRank => array($oSearch)),
- $oValidTokens->debugTokenByWordIdList()
- );
-
- $aNewResults = $oSearch->query(
- $this->oDB,
- $this->iMinAddressRank,
- $this->iMaxAddressRank,
- $this->iLimit
- );
-
- // The same result may appear in different rounds, only
- // use the one with minimal rank.
- foreach ($aNewResults as $iPlace => $oRes) {
- if (!isset($aResults[$iPlace])
- || $aResults[$iPlace]->iResultRank > $oRes->iResultRank) {
- $aResults[$iPlace] = $oRes;
- }
- }
-
- if ($iQueryLoop > 20) {
- break;
- }
- }
-
- if (!empty($aResults)) {
- $aSplitResults = Result::splitResults($aResults);
- Debug::printVar('Split results', $aSplitResults);
- if ($iGroupLoop <= 4
- && reset($aSplitResults['head'])->iResultRank > 0
- && $iGroupedRank !== array_key_last($aGroupedSearches)) {
- // Haven't found an exact match for the query yet.
- // Therefore add result from the next group level.
- $aNextResults = $aSplitResults['head'];
- foreach ($aNextResults as $oRes) {
- $oRes->iResultRank--;
- }
- foreach ($aSplitResults['tail'] as $oRes) {
- $oRes->iResultRank--;
- $aNextResults[$oRes->iId] = $oRes;
- }
- $aResults = array();
- } else {
- $aResults = $aSplitResults['head'];
- }
- }
-
- if (!empty($aResults) && ($this->iMinAddressRank != 0 || $this->iMaxAddressRank != 30)) {
- // Need to verify passes rank limits before dropping out of the loop (yuk!)
- // reduces the number of place ids, like a filter
- // rank_address is 30 for interpolated housenumbers
- $aFilterSql = array();
- $sPlaceIds = Result::joinIdsByTable($aResults, Result::TABLE_PLACEX);
- if ($sPlaceIds) {
- $sSQL = 'SELECT place_id FROM placex ';
- $sSQL .= 'WHERE place_id in ('.$sPlaceIds.') ';
- $sSQL .= ' AND (';
- $sSQL .= " placex.rank_address between $this->iMinAddressRank and $this->iMaxAddressRank ";
- $sSQL .= " OR placex.rank_search between $this->iMinAddressRank and $this->iMaxAddressRank ";
- if ($this->aAddressRankList) {
- $sSQL .= ' OR placex.rank_address in ('.join(',', $this->aAddressRankList).')';
- }
- $sSQL .= ')';
- $aFilterSql[] = $sSQL;
- }
- $sPlaceIds = Result::joinIdsByTable($aResults, Result::TABLE_POSTCODE);
- if ($sPlaceIds) {
- $sSQL = ' SELECT place_id FROM location_postcode lp ';
- $sSQL .= 'WHERE place_id in ('.$sPlaceIds.') ';
- $sSQL .= " AND (lp.rank_address between $this->iMinAddressRank and $this->iMaxAddressRank ";
- if ($this->aAddressRankList) {
- $sSQL .= ' OR lp.rank_address in ('.join(',', $this->aAddressRankList).')';
- }
- $sSQL .= ') ';
- $aFilterSql[] = $sSQL;
- }
-
- $aFilteredIDs = array();
- if ($aFilterSql) {
- $sSQL = join(' UNION ', $aFilterSql);
- Debug::printSQL($sSQL);
- $aFilteredIDs = $this->oDB->getCol($sSQL);
- }
-
- $tempIDs = array();
- foreach ($aResults as $oResult) {
- if (($this->iMaxAddressRank == 30 &&
- ($oResult->iTable == Result::TABLE_OSMLINE
- || $oResult->iTable == Result::TABLE_TIGER))
- || in_array($oResult->iId, $aFilteredIDs)
- ) {
- $tempIDs[$oResult->iId] = $oResult;
- }
- }
- $aResults = $tempIDs;
- }
-
- if (!empty($aResults) || $iGroupLoop > 4 || $iQueryLoop > 30) {
- break;
- }
- }
- } else {
- // Just interpret as a reverse geocode
- $oReverse = new ReverseGeocode($this->oDB);
- $oReverse->setZoom(18);
-
- $oLookup = $oReverse->lookupPoint($oCtx->sqlNear, false);
-
- Debug::printVar('Reverse search', $oLookup);
-
- if ($oLookup) {
- $aResults = array($oLookup->iId => $oLookup);
- }
- }
-
- // No results? Done
- if (empty($aResults)) {
- if ($this->bFallback && $this->fallbackStructuredQuery()) {
- return $this->lookup();
- }
-
- return array();
- }
-
- if ($this->aAddressRankList) {
- $this->oPlaceLookup->setAddressRankList($this->aAddressRankList);
- }
- $this->oPlaceLookup->setAllowedTypesSQLList($this->sAllowedTypesSQLList);
- $this->oPlaceLookup->setLanguagePreference($this->aLangPrefOrder);
- if ($oCtx->hasNearPoint()) {
- $this->oPlaceLookup->setAnchorSql($oCtx->sqlNear);
- }
-
- $aSearchResults = $this->oPlaceLookup->lookup($aResults);
-
- $aRecheckWords = preg_split('/\b[\s,\\-]*/u', $sQuery);
- foreach ($aRecheckWords as $i => $sWord) {
- if (!preg_match('/[\pL\pN]/', $sWord)) {
- unset($aRecheckWords[$i]);
- }
- }
-
- Debug::printVar('Recheck words', $aRecheckWords);
-
- foreach ($aSearchResults as $iIdx => $aResult) {
- $fRadius = ClassTypes\getDefRadius($aResult);
-
- $aOutlineResult = $this->oPlaceLookup->getOutlines($aResult['place_id'], $aResult['lon'], $aResult['lat'], $fRadius);
- if ($aOutlineResult) {
- $aResult = array_merge($aResult, $aOutlineResult);
- }
-
- // Is there an icon set for this type of result?
- $sIcon = ClassTypes\getIconFile($aResult);
- if (isset($sIcon)) {
- $aResult['icon'] = $sIcon;
- }
-
- $sLabel = ClassTypes\getLabel($aResult);
- if (isset($sLabel)) {
- $aResult['label'] = $sLabel;
- }
- $aResult['name'] = $aResult['langaddress'];
-
- if ($oCtx->hasNearPoint()) {
- $aResult['importance'] = 0.001;
- $aResult['foundorder'] = $aResult['addressimportance'];
- } else {
- if ($aResult['importance'] == 0) {
- $aResult['importance'] = 0.0001;
- }
- $aResult['importance'] *= $this->viewboxImportanceFactor(
- $aResult['lon'],
- $aResult['lat']
- );
-
- // secondary ordering (for results with same importance (the smaller the better):
- // - approximate importance of address parts
- if (isset($aResult['addressimportance']) && $aResult['addressimportance']) {
- $aResult['foundorder'] = -$aResult['addressimportance']/10;
- } else {
- $aResult['foundorder'] = -$aResult['importance'];
- }
- // - number of exact matches from the query
- $aResult['foundorder'] -= $aResults[$aResult['place_id']]->iExactMatches;
- // - importance of the class/type
- $iClassImportance = ClassTypes\getImportance($aResult);
- if (isset($iClassImportance)) {
- $aResult['foundorder'] += 0.0001 * $iClassImportance;
- } else {
- $aResult['foundorder'] += 0.01;
- }
- // - rank
- $aResult['foundorder'] -= 0.00001 * (30 - $aResult['rank_search']);
-
- // Adjust importance for the number of exact string matches in the result
- $iCountWords = 0;
- $sAddress = $aResult['langaddress'];
- foreach ($aRecheckWords as $i => $sWord) {
- if (stripos($sAddress, $sWord)!==false) {
- $iCountWords++;
- if (preg_match('/(^|,)\s*'.preg_quote($sWord, '/').'\s*(,|$)/', $sAddress)) {
- $iCountWords += 0.1;
- }
- }
- }
-
- // 0.1 is a completely arbitrary number but something in the range 0.1 to 0.5 would seem right
- $aResult['importance'] = $aResult['importance'] + ($iCountWords*0.1);
- }
- $aSearchResults[$iIdx] = $aResult;
- }
- uasort($aSearchResults, 'byImportance');
- Debug::printVar('Pre-filter results', $aSearchResults);
-
- $aOSMIDDone = array();
- $aClassTypeNameDone = array();
- $aToFilter = $aSearchResults;
- $aSearchResults = array();
-
- foreach ($aToFilter as $aResult) {
- $this->aExcludePlaceIDs[$aResult['place_id']] = $aResult['place_id'];
- if (!$this->oPlaceLookup->doDeDupe() || (!isset($aOSMIDDone[$aResult['osm_type'].$aResult['osm_id']])
- && !isset($aClassTypeNameDone[$aResult['osm_type'].$aResult['class'].$aResult['type'].$aResult['name'].$aResult['admin_level']]))
- ) {
- $aOSMIDDone[$aResult['osm_type'].$aResult['osm_id']] = true;
- $aClassTypeNameDone[$aResult['osm_type'].$aResult['class'].$aResult['type'].$aResult['name'].$aResult['admin_level']] = true;
- $aSearchResults[] = $aResult;
- }
-
- // Absolute limit on number of results
- if (count($aSearchResults) >= $this->iFinalLimit) {
- break;
- }
- }
-
- Debug::printVar('Post-filter results', $aSearchResults);
- return $aSearchResults;
- } // end lookup()
-
- public function debugInfo()
- {
- return array(
- 'Query' => $this->sQuery,
- 'Structured query' => $this->aStructuredQuery,
- 'Name keys' => Debug::fmtArrayVals($this->aLangPrefOrder),
- 'Excluded place IDs' => Debug::fmtArrayVals($this->aExcludePlaceIDs),
- 'Limit (for searches)' => $this->iLimit,
- 'Limit (for results)'=> $this->iFinalLimit,
- 'Country codes' => Debug::fmtArrayVals($this->aCountryCodes),
- 'Bounded search' => $this->bBoundedSearch,
- 'Viewbox' => Debug::fmtArrayVals($this->aViewBox),
- 'Route points' => Debug::fmtArrayVals($this->aRoutePoints),
- 'Route width' => $this->aRouteWidth,
- 'Max rank' => $this->iMaxRank,
- 'Min address rank' => $this->iMinAddressRank,
- 'Max address rank' => $this->iMaxAddressRank,
- 'Address rank list' => Debug::fmtArrayVals($this->aAddressRankList)
- );
- }
-} // end class
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-class ParameterParser
-{
- private $aParams;
-
-
- public function __construct($aParams = null)
- {
- $this->aParams = ($aParams === null) ? $_GET : $aParams;
- }
-
- public function getBool($sName, $bDefault = false)
- {
- if (!isset($this->aParams[$sName])
- || !is_string($this->aParams[$sName])
- || strlen($this->aParams[$sName]) == 0
- ) {
- return $bDefault;
- }
-
- return (bool) $this->aParams[$sName];
- }
-
- public function getInt($sName, $bDefault = false)
- {
- if (!isset($this->aParams[$sName]) || is_array($this->aParams[$sName])) {
- return $bDefault;
- }
-
- if (!preg_match('/^[+-]?[0-9]+$/', $this->aParams[$sName])) {
- userError("Integer number expected for parameter '$sName'");
- }
-
- return (int) $this->aParams[$sName];
- }
-
- public function getFloat($sName, $bDefault = false)
- {
- if (!isset($this->aParams[$sName]) || is_array($this->aParams[$sName])) {
- return $bDefault;
- }
-
- if (!preg_match('/^[+-]?[0-9]*\.?[0-9]+$/', $this->aParams[$sName])) {
- userError("Floating-point number expected for parameter '$sName'");
- }
-
- return (float) $this->aParams[$sName];
- }
-
- public function getString($sName, $bDefault = false)
- {
- if (!isset($this->aParams[$sName])
- || !is_string($this->aParams[$sName])
- || strlen($this->aParams[$sName]) == 0
- ) {
- return $bDefault;
- }
-
- return $this->aParams[$sName];
- }
-
- public function getSet($sName, $aValues, $sDefault = false)
- {
- if (!isset($this->aParams[$sName])
- || !is_string($this->aParams[$sName])
- || strlen($this->aParams[$sName]) == 0
- ) {
- return $sDefault;
- }
-
- if (!in_array($this->aParams[$sName], $aValues, true)) {
- userError("Parameter '$sName' must be one of: ".join(', ', $aValues));
- }
-
- return $this->aParams[$sName];
- }
-
- public function getStringList($sName, $aDefault = false)
- {
- $sValue = $this->getString($sName);
-
- if ($sValue) {
- // removes all NULL, FALSE and Empty Strings but leaves 0 (zero) values
- return array_values(array_filter(explode(',', $sValue), 'strlen'));
- }
-
- return $aDefault;
- }
-
- public function getPreferredLanguages($sFallback = null)
- {
- if ($sFallback === null && isset($_SERVER['HTTP_ACCEPT_LANGUAGE'])) {
- $sFallback = $_SERVER['HTTP_ACCEPT_LANGUAGE'];
- }
-
- $aLanguages = array();
- $sLangString = $this->getString('accept-language', $sFallback);
-
- if ($sLangString
- && preg_match_all('/(([a-z]{1,8})([-_][a-z]{1,8})?)\s*(;\s*q\s*=\s*(1|0\.[0-9]+))?/i', $sLangString, $aLanguagesParse, PREG_SET_ORDER)
- ) {
- foreach ($aLanguagesParse as $iLang => $aLanguage) {
- $aLanguages[$aLanguage[1]] = isset($aLanguage[5])?(float)$aLanguage[5]:1 - ($iLang/100);
- if (!isset($aLanguages[$aLanguage[2]])) {
- $aLanguages[$aLanguage[2]] = $aLanguages[$aLanguage[1]]/10;
- }
- }
- arsort($aLanguages);
- }
- if (empty($aLanguages) && CONST_Default_Language) {
- $aLanguages[CONST_Default_Language] = 1;
- }
-
- foreach ($aLanguages as $sLanguage => $fLanguagePref) {
- $this->addNameTag($aLangPrefOrder, 'name:'.$sLanguage);
- }
- $this->addNameTag($aLangPrefOrder, 'name');
- $this->addNameTag($aLangPrefOrder, 'brand');
- foreach ($aLanguages as $sLanguage => $fLanguagePref) {
- $this->addNameTag($aLangPrefOrder, 'official_name:'.$sLanguage);
- $this->addNameTag($aLangPrefOrder, 'short_name:'.$sLanguage);
- }
- $this->addNameTag($aLangPrefOrder, 'official_name');
- $this->addNameTag($aLangPrefOrder, 'short_name');
- $this->addNameTag($aLangPrefOrder, 'ref');
- $this->addNameTag($aLangPrefOrder, 'type');
- return $aLangPrefOrder;
- }
-
- private function addNameTag(&$aLangPrefOrder, $sTag)
- {
- $aLangPrefOrder[$sTag] = $sTag;
- $aLangPrefOrder['_place_'.$sTag] = '_place_'.$sTag;
- }
-
- public function hasSetAny($aParamNames)
- {
- foreach ($aParamNames as $sName) {
- if ($this->getBool($sName)) {
- return true;
- }
- }
-
- return false;
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-/**
- * Segment of a query string.
- *
- * The parts of a query strings are usually separated by commas.
- */
-class Phrase
-{
- // Complete phrase as a string (guaranteed to have no leading or trailing
- // spaces).
- private $sPhrase;
- // Element type for structured searches.
- private $sPhraseType;
- // Possible segmentations of the phrase.
- private $aWordSets;
-
- public function __construct($sPhrase, $sPhraseType)
- {
- $this->sPhrase = trim($sPhrase);
- $this->sPhraseType = $sPhraseType;
- }
-
- /**
- * Get the original phrase of the string.
- */
- public function getPhrase()
- {
- return $this->sPhrase;
- }
-
- /**
- * Return the element type of the phrase.
- *
- * @return string Pharse type if the phrase comes from a structured query
- * or empty string otherwise.
- */
- public function getPhraseType()
- {
- return $this->sPhraseType;
- }
-
- public function setWordSets($aWordSets)
- {
- $this->aWordSets = $aWordSets;
- }
-
- /**
- * Return the array of possible segmentations of the phrase.
- *
- * @return string[][] Array of segmentations, each consisting of an
- * array of terms.
- */
- public function getWordSets()
- {
- return $this->aWordSets;
- }
-
- /**
- * Invert the set of possible segmentations.
- *
- * @return void
- */
- public function invertWordSets()
- {
- foreach ($this->aWordSets as $i => $aSet) {
- $this->aWordSets[$i] = array_reverse($aSet);
- }
- }
-
- public function debugInfo()
- {
- return array(
- 'Type' => $this->sPhraseType,
- 'Phrase' => $this->sPhrase,
- 'WordSets' => $this->aWordSets
- );
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/AddressDetails.php');
-require_once(CONST_LibDir.'/Result.php');
-
-class PlaceLookup
-{
- protected $oDB;
-
- protected $aLangPrefOrderSql = "''";
-
- protected $bAddressDetails = false;
- protected $bExtraTags = false;
- protected $bNameDetails = false;
-
- protected $bIncludePolygonAsText = false;
- protected $bIncludePolygonAsGeoJSON = false;
- protected $bIncludePolygonAsKML = false;
- protected $bIncludePolygonAsSVG = false;
- protected $fPolygonSimplificationThreshold = 0.0;
-
- protected $sAnchorSql = null;
- protected $sAddressRankListSql = null;
- protected $sAllowedTypesSQLList = null;
- protected $bDeDupe = true;
-
-
- public function __construct(&$oDB)
- {
- $this->oDB =& $oDB;
- }
-
- public function doDeDupe()
- {
- return $this->bDeDupe;
- }
-
- public function setIncludeAddressDetails($b)
- {
- $this->bAddressDetails = $b;
- }
-
- public function loadParamArray($oParams, $sGeomType = null)
- {
- $aLangs = $oParams->getPreferredLanguages();
- $this->aLangPrefOrderSql =
- 'ARRAY['.join(',', $this->oDB->getDBQuotedList($aLangs)).']';
-
- $this->bExtraTags = $oParams->getBool('extratags', false);
- $this->bNameDetails = $oParams->getBool('namedetails', false);
-
- $this->bDeDupe = $oParams->getBool('dedupe', $this->bDeDupe);
-
- if ($sGeomType === null || $sGeomType == 'geojson') {
- $this->bIncludePolygonAsGeoJSON = $oParams->getBool('polygon_geojson');
- }
-
- if ($oParams->getString('format', '') !== 'geojson') {
- if ($sGeomType === null || $sGeomType == 'text') {
- $this->bIncludePolygonAsText = $oParams->getBool('polygon_text');
- }
- if ($sGeomType === null || $sGeomType == 'kml') {
- $this->bIncludePolygonAsKML = $oParams->getBool('polygon_kml');
- }
- if ($sGeomType === null || $sGeomType == 'svg') {
- $this->bIncludePolygonAsSVG = $oParams->getBool('polygon_svg');
- }
- }
- $this->fPolygonSimplificationThreshold
- = $oParams->getFloat('polygon_threshold', 0.0);
-
- $iWantedTypes =
- ($this->bIncludePolygonAsText ? 1 : 0) +
- ($this->bIncludePolygonAsGeoJSON ? 1 : 0) +
- ($this->bIncludePolygonAsKML ? 1 : 0) +
- ($this->bIncludePolygonAsSVG ? 1 : 0);
- if ($iWantedTypes > CONST_PolygonOutput_MaximumTypes) {
- if (CONST_PolygonOutput_MaximumTypes) {
- userError('Select only '.CONST_PolygonOutput_MaximumTypes.' polgyon output option');
- } else {
- userError('Polygon output is disabled');
- }
- }
- }
-
- public function getMoreUrlParams()
- {
- $aParams = array();
-
- if ($this->bAddressDetails) {
- $aParams['addressdetails'] = '1';
- }
- if ($this->bExtraTags) {
- $aParams['extratags'] = '1';
- }
- if ($this->bNameDetails) {
- $aParams['namedetails'] = '1';
- }
-
- if ($this->bIncludePolygonAsText) {
- $aParams['polygon_text'] = '1';
- }
- if ($this->bIncludePolygonAsGeoJSON) {
- $aParams['polygon_geojson'] = '1';
- }
- if ($this->bIncludePolygonAsKML) {
- $aParams['polygon_kml'] = '1';
- }
- if ($this->bIncludePolygonAsSVG) {
- $aParams['polygon_svg'] = '1';
- }
-
- if ($this->fPolygonSimplificationThreshold > 0.0) {
- $aParams['polygon_threshold'] = $this->fPolygonSimplificationThreshold;
- }
-
- if (!$this->bDeDupe) {
- $aParams['dedupe'] = '0';
- }
-
- return $aParams;
- }
-
- public function setAnchorSql($sPoint)
- {
- $this->sAnchorSql = $sPoint;
- }
-
- public function setAddressRankList($aList)
- {
- $this->sAddressRankListSql = '('.join(',', $aList).')';
- }
-
- public function setAllowedTypesSQLList($sSql)
- {
- $this->sAllowedTypesSQLList = $sSql;
- }
-
- public function setLanguagePreference($aLangPrefOrder)
- {
- $this->aLangPrefOrderSql = $this->oDB->getArraySQL(
- $this->oDB->getDBQuotedList($aLangPrefOrder)
- );
- }
-
- private function addressImportanceSql($sGeometry, $sPlaceId)
- {
- if ($this->sAnchorSql) {
- $sSQL = 'ST_Distance('.$this->sAnchorSql.','.$sGeometry.')';
- } else {
- $sSQL = '(SELECT max(ai_p.importance * (ai_p.rank_address + 2))';
- $sSQL .= ' FROM place_addressline ai_s, placex ai_p';
- $sSQL .= ' WHERE ai_s.place_id = '.$sPlaceId;
- $sSQL .= ' AND ai_p.place_id = ai_s.address_place_id ';
- $sSQL .= ' AND ai_s.isaddress ';
- $sSQL .= ' AND ai_p.importance is not null)';
- }
-
- return $sSQL.' AS addressimportance,';
- }
-
- private function langAddressSql($sHousenumber)
- {
- if ($this->bAddressDetails) {
- return ''; // langaddress will be computed from address details
- }
-
- return 'get_address_by_language(place_id,'.$sHousenumber.','.$this->aLangPrefOrderSql.') AS langaddress,';
- }
-
- public function lookupOSMID($sType, $iID)
- {
- $sSQL = 'select place_id from placex where osm_type = :type and osm_id = :id';
- $iPlaceID = $this->oDB->getOne($sSQL, array(':type' => $sType, ':id' => $iID));
-
- if (!$iPlaceID) {
- return null;
- }
-
- $aResults = $this->lookup(array($iPlaceID => new Result($iPlaceID)));
-
- return empty($aResults) ? null : reset($aResults);
- }
-
- public function lookup($aResults, $iMinRank = 0, $iMaxRank = 30)
- {
- Debug::newFunction('Place lookup');
-
- if (empty($aResults)) {
- return array();
- }
- $aSubSelects = array();
-
- $sPlaceIDs = Result::joinIdsByTable($aResults, Result::TABLE_PLACEX);
- if ($sPlaceIDs) {
- Debug::printVar('Ids from placex', $sPlaceIDs);
- $sSQL = 'SELECT ';
- $sSQL .= ' osm_type,';
- $sSQL .= ' osm_id,';
- $sSQL .= ' class,';
- $sSQL .= ' type,';
- $sSQL .= ' admin_level,';
- $sSQL .= ' rank_search,';
- $sSQL .= ' rank_address,';
- $sSQL .= ' min(place_id) AS place_id,';
- $sSQL .= ' min(parent_place_id) AS parent_place_id,';
- $sSQL .= ' -1 as housenumber,';
- $sSQL .= ' country_code,';
- $sSQL .= $this->langAddressSql('-1');
- $sSQL .= ' get_name_by_language(name,'.$this->aLangPrefOrderSql.') AS placename,';
- $sSQL .= " get_name_by_language(name, ARRAY['ref']) AS ref,";
- if ($this->bExtraTags) {
- $sSQL .= 'hstore_to_json(extratags)::text AS extra,';
- }
- if ($this->bNameDetails) {
- $sSQL .= 'hstore_to_json(name)::text AS names,';
- }
- $sSQL .= ' avg(ST_X(centroid)) AS lon, ';
- $sSQL .= ' avg(ST_Y(centroid)) AS lat, ';
- $sSQL .= ' COALESCE(importance,0.75-(rank_search::float/40)) AS importance, ';
- $sSQL .= $this->addressImportanceSql(
- 'ST_Collect(centroid)',
- 'min(CASE WHEN placex.rank_search < 28 THEN placex.place_id ELSE placex.parent_place_id END)'
- );
- $sSQL .= " COALESCE(extratags->'place', extratags->'linked_place') AS extra_place ";
- $sSQL .= ' FROM placex';
- $sSQL .= " WHERE place_id in ($sPlaceIDs) ";
- $sSQL .= ' AND (';
- $sSQL .= " placex.rank_address between $iMinRank and $iMaxRank ";
- if (14 >= $iMinRank && 14 <= $iMaxRank) {
- $sSQL .= " OR (extratags->'place') = 'city'";
- }
- if ($this->sAddressRankListSql) {
- $sSQL .= ' OR placex.rank_address in '.$this->sAddressRankListSql;
- }
- $sSQL .= ' ) ';
- if ($this->sAllowedTypesSQLList) {
- $sSQL .= 'AND placex.class in '.$this->sAllowedTypesSQLList;
- }
- $sSQL .= ' AND linked_place_id is null ';
- $sSQL .= ' GROUP BY ';
- $sSQL .= ' osm_type, ';
- $sSQL .= ' osm_id, ';
- $sSQL .= ' class, ';
- $sSQL .= ' type, ';
- $sSQL .= ' admin_level, ';
- $sSQL .= ' rank_search, ';
- $sSQL .= ' rank_address, ';
- $sSQL .= ' housenumber,';
- $sSQL .= ' country_code, ';
- $sSQL .= ' importance, ';
- if (!$this->bDeDupe) {
- $sSQL .= 'place_id,';
- }
- if (!$this->bAddressDetails) {
- $sSQL .= 'langaddress, ';
- }
- $sSQL .= ' placename, ';
- $sSQL .= ' ref, ';
- if ($this->bExtraTags) {
- $sSQL .= 'extratags, ';
- }
- if ($this->bNameDetails) {
- $sSQL .= 'name, ';
- }
- $sSQL .= ' extra_place ';
-
- $aSubSelects[] = $sSQL;
- }
-
- // postcode table
- $sPlaceIDs = Result::joinIdsByTable($aResults, Result::TABLE_POSTCODE);
- if ($sPlaceIDs) {
- Debug::printVar('Ids from location_postcode', $sPlaceIDs);
- $sSQL = 'SELECT';
- $sSQL .= " 'P' as osm_type,";
- $sSQL .= ' (SELECT osm_id from placex p WHERE p.place_id = lp.parent_place_id) as osm_id,';
- $sSQL .= " 'place' as class, 'postcode' as type,";
- $sSQL .= ' null::smallint as admin_level, rank_search, rank_address,';
- $sSQL .= ' place_id, parent_place_id,';
- $sSQL .= ' -1 as housenumber,';
- $sSQL .= ' country_code,';
- $sSQL .= $this->langAddressSql('-1');
- $sSQL .= ' postcode as placename,';
- $sSQL .= ' postcode as ref,';
- if ($this->bExtraTags) {
- $sSQL .= 'null::text AS extra,';
- }
- if ($this->bNameDetails) {
- $sSQL .= 'null::text AS names,';
- }
- $sSQL .= ' ST_x(geometry) AS lon, ST_y(geometry) AS lat,';
- $sSQL .= ' (0.75-(rank_search::float/40)) AS importance, ';
- $sSQL .= $this->addressImportanceSql('geometry', 'lp.parent_place_id');
- $sSQL .= ' null::text AS extra_place ';
- $sSQL .= 'FROM location_postcode lp';
- $sSQL .= " WHERE place_id in ($sPlaceIDs) ";
- $sSQL .= " AND lp.rank_address between $iMinRank and $iMaxRank";
-
- $aSubSelects[] = $sSQL;
- }
-
- // All other tables are rank 30 only.
- if ($iMaxRank == 30) {
- // TIGER table
- if (CONST_Use_US_Tiger_Data) {
- $sPlaceIDs = Result::joinIdsByTable($aResults, Result::TABLE_TIGER);
- if ($sPlaceIDs) {
- Debug::printVar('Ids from Tiger table', $sPlaceIDs);
- $sHousenumbers = Result::sqlHouseNumberTable($aResults, Result::TABLE_TIGER);
- // Tiger search only if a housenumber was searched and if it was found
- // (realized through a join)
- $sSQL = ' SELECT ';
- $sSQL .= " 'T' AS osm_type, ";
- $sSQL .= ' (SELECT osm_id from placex p WHERE p.place_id=blub.parent_place_id) as osm_id, ';
- $sSQL .= " 'place' AS class, ";
- $sSQL .= " 'house' AS type, ";
- $sSQL .= ' null::smallint AS admin_level, ';
- $sSQL .= ' 30 AS rank_search, ';
- $sSQL .= ' 30 AS rank_address, ';
- $sSQL .= ' place_id, ';
- $sSQL .= ' parent_place_id, ';
- $sSQL .= ' housenumber_for_place as housenumber,';
- $sSQL .= " 'us' AS country_code, ";
- $sSQL .= $this->langAddressSql('housenumber_for_place');
- $sSQL .= ' null::text AS placename, ';
- $sSQL .= ' null::text AS ref, ';
- if ($this->bExtraTags) {
- $sSQL .= 'null::text AS extra,';
- }
- if ($this->bNameDetails) {
- $sSQL .= 'null::text AS names,';
- }
- $sSQL .= ' st_x(centroid) AS lon, ';
- $sSQL .= ' st_y(centroid) AS lat,';
- $sSQL .= ' -1.15 AS importance, ';
- $sSQL .= $this->addressImportanceSql('centroid', 'blub.parent_place_id');
- $sSQL .= ' null::text AS extra_place ';
- $sSQL .= ' FROM (';
- $sSQL .= ' SELECT place_id, '; // interpolate the Tiger housenumbers here
- $sSQL .= ' CASE WHEN startnumber != endnumber';
- $sSQL .= ' THEN ST_LineInterpolatePoint(linegeo, (housenumber_for_place-startnumber::float)/(endnumber-startnumber)::float)';
- $sSQL .= ' ELSE ST_LineInterpolatePoint(linegeo, 0.5) END AS centroid, ';
- $sSQL .= ' parent_place_id, ';
- $sSQL .= ' housenumber_for_place';
- $sSQL .= ' FROM (';
- $sSQL .= ' location_property_tiger ';
- $sSQL .= ' JOIN (values '.$sHousenumbers.') AS housenumbers(place_id, housenumber_for_place) USING(place_id)) ';
- $sSQL .= ' WHERE ';
- $sSQL .= ' housenumber_for_place >= startnumber';
- $sSQL .= ' AND housenumber_for_place <= endnumber';
- $sSQL .= ' ) AS blub'; //postgres wants an alias here
-
- $aSubSelects[] = $sSQL;
- }
- }
-
- // osmline - interpolated housenumbers
- $sPlaceIDs = Result::joinIdsByTable($aResults, Result::TABLE_OSMLINE);
- if ($sPlaceIDs) {
- Debug::printVar('Ids from interpolation', $sPlaceIDs);
- $sHousenumbers = Result::sqlHouseNumberTable($aResults, Result::TABLE_OSMLINE);
- // interpolation line search only if a housenumber was searched
- // (realized through a join)
- $sSQL = 'SELECT ';
- $sSQL .= " 'W' AS osm_type, ";
- $sSQL .= ' osm_id, ';
- $sSQL .= " 'place' AS class, ";
- $sSQL .= " 'house' AS type, ";
- $sSQL .= ' null::smallint AS admin_level, ';
- $sSQL .= ' 30 AS rank_search, ';
- $sSQL .= ' 30 AS rank_address, ';
- $sSQL .= ' place_id, ';
- $sSQL .= ' parent_place_id, ';
- $sSQL .= ' housenumber_for_place as housenumber,';
- $sSQL .= ' country_code, ';
- $sSQL .= $this->langAddressSql('housenumber_for_place');
- $sSQL .= ' null::text AS placename, ';
- $sSQL .= ' null::text AS ref, ';
- if ($this->bExtraTags) {
- $sSQL .= 'null::text AS extra, ';
- }
- if ($this->bNameDetails) {
- $sSQL .= 'null::text AS names, ';
- }
- $sSQL .= ' st_x(centroid) AS lon, ';
- $sSQL .= ' st_y(centroid) AS lat, ';
- // slightly smaller than the importance for normal houses
- $sSQL .= ' -0.1 AS importance, ';
- $sSQL .= $this->addressImportanceSql('centroid', 'blub.parent_place_id');
- $sSQL .= ' null::text AS extra_place ';
- $sSQL .= ' FROM (';
- $sSQL .= ' SELECT ';
- $sSQL .= ' osm_id, ';
- $sSQL .= ' place_id, ';
- $sSQL .= ' country_code, ';
- $sSQL .= ' CASE '; // interpolate the housenumbers here
- $sSQL .= ' WHEN startnumber != endnumber ';
- $sSQL .= ' THEN ST_LineInterpolatePoint(linegeo, (housenumber_for_place-startnumber::float)/(endnumber-startnumber)::float) ';
- $sSQL .= ' ELSE linegeo ';
- $sSQL .= ' END as centroid, ';
- $sSQL .= ' parent_place_id, ';
- $sSQL .= ' housenumber_for_place ';
- $sSQL .= ' FROM (';
- $sSQL .= ' location_property_osmline ';
- $sSQL .= ' JOIN (values '.$sHousenumbers.') AS housenumbers(place_id, housenumber_for_place) USING(place_id)';
- $sSQL .= ' ) ';
- $sSQL .= ' WHERE housenumber_for_place >= 0 ';
- $sSQL .= ' ) as blub'; //postgres wants an alias here
-
- $aSubSelects[] = $sSQL;
- }
- }
-
- if (empty($aSubSelects)) {
- return array();
- }
-
- $sSQL = join(' UNION ', $aSubSelects);
- Debug::printSQL($sSQL);
- $aPlaces = $this->oDB->getAll($sSQL, null, 'Could not lookup place');
-
- foreach ($aPlaces as &$aPlace) {
- $aPlace['importance'] = (float) $aPlace['importance'];
- if ($this->bAddressDetails) {
- // to get addressdetails for tiger data, the housenumber is needed
- $aPlace['address'] = new AddressDetails(
- $this->oDB,
- $aPlace['place_id'],
- $aPlace['housenumber'],
- $this->aLangPrefOrderSql
- );
- $aPlace['langaddress'] = $aPlace['address']->getLocaleAddress();
- }
-
- if ($this->bExtraTags) {
- if ($aPlace['extra']) {
- $aPlace['sExtraTags'] = json_decode($aPlace['extra'], true);
- } else {
- $aPlace['sExtraTags'] = (object) array();
- }
- }
-
- if ($this->bNameDetails) {
- $aPlace['sNameDetails'] = $this->extractNames($aPlace['names']);
- }
-
- $aPlace['addresstype'] = ClassTypes\getLabelTag(
- $aPlace,
- $aPlace['country_code']
- );
-
- $aResults[$aPlace['place_id']] = $aPlace;
- }
-
- $aResults = array_filter(
- $aResults,
- function ($v) {
- return !($v instanceof Result);
- }
- );
-
- Debug::printVar('Places', $aResults);
-
- return $aResults;
- }
-
-
- private function extractNames($sNames)
- {
- if (!$sNames) {
- return (object) array();
- }
-
- $aFullNames = json_decode($sNames, true);
- $aNames = array();
-
- foreach ($aFullNames as $sKey => $sValue) {
- if (strpos($sKey, '_place_') === 0) {
- $sSubKey = substr($sKey, 7);
- if (array_key_exists($sSubKey, $aFullNames)) {
- $aNames[$sKey] = $sValue;
- } else {
- $aNames[$sSubKey] = $sValue;
- }
- } else {
- $aNames[$sKey] = $sValue;
- }
- }
-
- return $aNames;
- }
-
-
- /* returns an array which will contain the keys
- * aBoundingBox
- * and may also contain one or more of the keys
- * asgeojson
- * askml
- * assvg
- * astext
- * lat
- * lon
- */
- public function getOutlines($iPlaceID, $fLon = null, $fLat = null, $fRadius = null, $fLonReverse = null, $fLatReverse = null)
- {
-
- $aOutlineResult = array();
- if (!$iPlaceID) {
- return $aOutlineResult;
- }
-
- // Get the bounding box and outline polygon
- $sSQL = 'select place_id,0 as numfeatures,st_area(geometry) as area,';
- if ($fLonReverse != null && $fLatReverse != null) {
- $sSQL .= ' ST_Y(closest_point) as centrelat,';
- $sSQL .= ' ST_X(closest_point) as centrelon,';
- } else {
- $sSQL .= ' ST_Y(centroid) as centrelat, ST_X(centroid) as centrelon,';
- }
- $sSQL .= ' ST_YMin(geometry) as minlat,ST_YMax(geometry) as maxlat,';
- $sSQL .= ' ST_XMin(geometry) as minlon,ST_XMax(geometry) as maxlon';
- if ($this->bIncludePolygonAsGeoJSON) {
- $sSQL .= ',ST_AsGeoJSON(geometry) as asgeojson';
- }
- if ($this->bIncludePolygonAsKML) {
- $sSQL .= ',ST_AsKML(geometry) as askml';
- }
- if ($this->bIncludePolygonAsSVG) {
- $sSQL .= ',ST_AsSVG(geometry) as assvg';
- }
- if ($this->bIncludePolygonAsText) {
- $sSQL .= ',ST_AsText(geometry) as astext';
- }
- if ($fLonReverse != null && $fLatReverse != null) {
- $sFrom = ' from (SELECT * , CASE WHEN (class = \'highway\') AND (ST_GeometryType(geometry) = \'ST_LineString\') THEN ';
- $sFrom .=' ST_ClosestPoint(geometry, ST_SetSRID(ST_Point('.$fLatReverse.','.$fLonReverse.'),4326))';
- $sFrom .=' ELSE centroid END AS closest_point';
- $sFrom .= ' from placex where place_id = '.$iPlaceID.') as plx';
- } else {
- $sFrom = ' from placex where place_id = '.$iPlaceID;
- }
- if ($this->fPolygonSimplificationThreshold > 0) {
- $sSQL .= ' from (select place_id,centroid,ST_SimplifyPreserveTopology(geometry,'.$this->fPolygonSimplificationThreshold.') as geometry'.$sFrom.') as plx';
- } else {
- $sSQL .= $sFrom;
- }
-
- $aPointPolygon = $this->oDB->getRow($sSQL, null, 'Could not get outline');
-
- if ($aPointPolygon && $aPointPolygon['place_id']) {
- if ($aPointPolygon['centrelon'] !== null && $aPointPolygon['centrelat'] !== null) {
- $aOutlineResult['lat'] = $aPointPolygon['centrelat'];
- $aOutlineResult['lon'] = $aPointPolygon['centrelon'];
- }
-
- if ($this->bIncludePolygonAsGeoJSON) {
- $aOutlineResult['asgeojson'] = $aPointPolygon['asgeojson'];
- }
- if ($this->bIncludePolygonAsKML) {
- $aOutlineResult['askml'] = $aPointPolygon['askml'];
- }
- if ($this->bIncludePolygonAsSVG) {
- $aOutlineResult['assvg'] = $aPointPolygon['assvg'];
- }
- if ($this->bIncludePolygonAsText) {
- $aOutlineResult['astext'] = $aPointPolygon['astext'];
- }
-
- if (abs($aPointPolygon['minlat'] - $aPointPolygon['maxlat']) < 0.0000001) {
- $aPointPolygon['minlat'] = $aPointPolygon['minlat'] - $fRadius;
- $aPointPolygon['maxlat'] = $aPointPolygon['maxlat'] + $fRadius;
- }
-
- if (abs($aPointPolygon['minlon'] - $aPointPolygon['maxlon']) < 0.0000001) {
- $aPointPolygon['minlon'] = $aPointPolygon['minlon'] - $fRadius;
- $aPointPolygon['maxlon'] = $aPointPolygon['maxlon'] + $fRadius;
- }
-
- $aOutlineResult['aBoundingBox'] = array(
- (string)$aPointPolygon['minlat'],
- (string)$aPointPolygon['maxlat'],
- (string)$aPointPolygon['minlon'],
- (string)$aPointPolygon['maxlon']
- );
- }
-
- // as a fallback we generate a bounding box without knowing the size of the geometry
- if ((!isset($aOutlineResult['aBoundingBox'])) && isset($fLon)) {
- $aBounds = array(
- 'minlat' => $fLat - $fRadius,
- 'maxlat' => $fLat + $fRadius,
- 'minlon' => $fLon - $fRadius,
- 'maxlon' => $fLon + $fRadius
- );
-
- $aOutlineResult['aBoundingBox'] = array(
- (string)$aBounds['minlat'],
- (string)$aBounds['maxlat'],
- (string)$aBounds['minlon'],
- (string)$aBounds['maxlon']
- );
- }
- return $aOutlineResult;
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-/**
- * A single result of a search operation or a reverse lookup.
- *
- * This object only contains the id of the result. It does not yet
- * have any details needed to format the output document.
- */
-class Result
-{
- const TABLE_PLACEX = 0;
- const TABLE_POSTCODE = 1;
- const TABLE_OSMLINE = 2;
- const TABLE_TIGER = 3;
-
- /// Database table that contains the result.
- public $iTable;
- /// Id of the result.
- public $iId;
- /// House number (only for interpolation results).
- public $iHouseNumber = -1;
- /// Number of exact matches in address (address searches only).
- public $iExactMatches = 0;
- /// Subranking within the results (the higher the worse).
- public $iResultRank = 0;
- /// Address rank of the result.
- public $iAddressRank;
-
- public function debugInfo()
- {
- return array(
- 'Table' => $this->iTable,
- 'ID' => $this->iId,
- 'House number' => $this->iHouseNumber,
- 'Exact Matches' => $this->iExactMatches,
- 'Result rank' => $this->iResultRank
- );
- }
-
-
- public function __construct($sId, $iTable = Result::TABLE_PLACEX)
- {
- $this->iTable = $iTable;
- $this->iId = (int) $sId;
- }
-
- public static function joinIdsByTable($aResults, $iTable)
- {
- return join(',', array_keys(array_filter(
- $aResults,
- function ($aValue) use ($iTable) {
- return $aValue->iTable == $iTable;
- }
- )));
- }
-
- public static function joinIdsByTableMinRank($aResults, $iTable, $iMinAddressRank)
- {
- return join(',', array_keys(array_filter(
- $aResults,
- function ($aValue) use ($iTable, $iMinAddressRank) {
- return $aValue->iTable == $iTable && $aValue->iAddressRank >= $iMinAddressRank;
- }
- )));
- }
-
- public static function joinIdsByTableMaxRank($aResults, $iTable, $iMaxAddressRank)
- {
- return join(',', array_keys(array_filter(
- $aResults,
- function ($aValue) use ($iTable, $iMaxAddressRank) {
- return $aValue->iTable == $iTable && $aValue->iAddressRank <= $iMaxAddressRank;
- }
- )));
- }
-
- public static function sqlHouseNumberTable($aResults, $iTable)
- {
- $sHousenumbers = '';
- $sSep = '';
- foreach ($aResults as $oResult) {
- if ($oResult->iTable == $iTable) {
- $sHousenumbers .= $sSep.'('.$oResult->iId.',';
- $sHousenumbers .= $oResult->iHouseNumber.')';
- $sSep = ',';
- }
- }
-
- return $sHousenumbers;
- }
-
- /**
- * Split a result array into highest ranked result and the rest
- *
- * @param object[] $aResults List of results to split.
- *
- * @return array[]
- */
- public static function splitResults($aResults)
- {
- $aHead = array();
- $aTail = array();
- $iMinRank = 10000;
-
- foreach ($aResults as $oRes) {
- if ($oRes->iResultRank < $iMinRank) {
- $aTail += $aHead;
- $aHead = array($oRes->iId => $oRes);
- $iMinRank = $oRes->iResultRank;
- } elseif ($oRes->iResultRank == $iMinRank) {
- $aHead[$oRes->iId] = $oRes;
- } else {
- $aTail[$oRes->iId] = $oRes;
- }
- }
-
- return array('head' => $aHead, 'tail' => $aTail);
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/Result.php');
-
-class ReverseGeocode
-{
- protected $oDB;
- protected $iMaxRank = 28;
-
-
- public function __construct(&$oDB)
- {
- $this->oDB =& $oDB;
- }
-
-
- public function setZoom($iZoom)
- {
- // Zoom to rank, this could probably be calculated but a lookup gives fine control
- $aZoomRank = array(
- 0 => 2, // Continent / Sea
- 1 => 2,
- 2 => 2,
- 3 => 4, // Country
- 4 => 4,
- 5 => 8, // State
- 6 => 10, // Region
- 7 => 10,
- 8 => 12, // County
- 9 => 12,
- 10 => 17, // City
- 11 => 17,
- 12 => 18, // Town / Village
- 13 => 18,
- 14 => 22, // Suburb
- 15 => 22,
- 16 => 26, // major street
- 17 => 27, // minor street
- 18 => 30, // or >, Building
- 19 => 30, // or >, Building
- );
- $this->iMaxRank = (isset($iZoom) && isset($aZoomRank[$iZoom]))?$aZoomRank[$iZoom]:28;
- }
-
- /**
- * Find the closest interpolation with the given search diameter.
- *
- * @param string $sPointSQL Reverse geocoding point as SQL
- * @param float $fSearchDiam Search diameter
- *
- * @return Record of the interpolation or null.
- */
- protected function lookupInterpolation($sPointSQL, $fSearchDiam)
- {
- Debug::newFunction('lookupInterpolation');
- $sSQL = 'SELECT place_id, parent_place_id, 30 as rank_search,';
- $sSQL .= ' (CASE WHEN endnumber != startnumber';
- $sSQL .= ' THEN (endnumber - startnumber) * ST_LineLocatePoint(linegeo,'.$sPointSQL.')';
- $sSQL .= ' ELSE startnumber END) as fhnr,';
- $sSQL .= ' startnumber, endnumber, step,';
- $sSQL .= ' ST_Distance(linegeo,'.$sPointSQL.') as distance';
- $sSQL .= ' FROM location_property_osmline';
- $sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', linegeo, '.$fSearchDiam.')';
- $sSQL .= ' and indexed_status = 0 and startnumber is not NULL ';
- $sSQL .= ' ORDER BY distance ASC limit 1';
- Debug::printSQL($sSQL);
-
- return $this->oDB->getRow(
- $sSQL,
- null,
- 'Could not determine closest housenumber on an osm interpolation line.'
- );
- }
-
- protected function lookupLargeArea($sPointSQL, $iMaxRank)
- {
- if ($iMaxRank > 4) {
- $aPlace = $this->lookupPolygon($sPointSQL, $iMaxRank);
- if ($aPlace) {
- return new Result($aPlace['place_id']);
- }
- }
-
- // If no polygon which contains the searchpoint is found,
- // searches in the country_osm_grid table for a polygon.
- return $this->lookupInCountry($sPointSQL, $iMaxRank);
- }
-
- protected function lookupInCountry($sPointSQL, $iMaxRank)
- {
- Debug::newFunction('lookupInCountry');
- // searches for polygon in table country_osm_grid which contains the searchpoint
- // and searches for the nearest place node to the searchpoint in this polygon
- $sSQL = 'SELECT country_code FROM country_osm_grid';
- $sSQL .= ' WHERE ST_CONTAINS(geometry, '.$sPointSQL.') LIMIT 1';
- Debug::printSQL($sSQL);
-
- $sCountryCode = $this->oDB->getOne(
- $sSQL,
- null,
- 'Could not determine country polygon containing the point.'
- );
- Debug::printVar('Country code', $sCountryCode);
-
- if ($sCountryCode) {
- if ($iMaxRank > 4) {
- // look for place nodes with the given country code
- $sSQL = 'SELECT place_id FROM';
- $sSQL .= ' (SELECT place_id, rank_search,';
- $sSQL .= ' ST_distance('.$sPointSQL.', geometry) as distance';
- $sSQL .= ' FROM placex';
- $sSQL .= ' WHERE osm_type = \'N\'';
- $sSQL .= ' AND country_code = \''.$sCountryCode.'\'';
- $sSQL .= ' AND rank_search < 26 '; // needed to select right index
- $sSQL .= ' AND rank_search between 5 and ' .min(25, $iMaxRank);
- $sSQL .= ' AND class = \'place\' AND type != \'postcode\'';
- $sSQL .= ' AND name IS NOT NULL ';
- $sSQL .= ' and indexed_status = 0 and linked_place_id is null';
- $sSQL .= ' AND ST_DWithin('.$sPointSQL.', geometry, 1.8)) p ';
- $sSQL .= 'WHERE distance <= reverse_place_diameter(rank_search)';
- $sSQL .= ' ORDER BY rank_search DESC, distance ASC';
- $sSQL .= ' LIMIT 1';
- Debug::printSQL($sSQL);
-
- $aPlace = $this->oDB->getRow($sSQL, null, 'Could not determine place node.');
- Debug::printVar('Country node', $aPlace);
-
- if ($aPlace) {
- return new Result($aPlace['place_id']);
- }
- }
-
- // still nothing, then return the country object
- $sSQL = 'SELECT place_id, ST_distance('.$sPointSQL.', centroid) as distance';
- $sSQL .= ' FROM placex';
- $sSQL .= ' WHERE country_code = \''.$sCountryCode.'\'';
- $sSQL .= ' AND rank_search = 4 AND rank_address = 4';
- $sSQL .= ' AND class in (\'boundary\', \'place\')';
- $sSQL .= ' AND linked_place_id is null';
- $sSQL .= ' ORDER BY distance ASC';
- Debug::printSQL($sSQL);
-
- $aPlace = $this->oDB->getRow($sSQL, null, 'Could not determine place node.');
- Debug::printVar('Country place', $aPlace);
- if ($aPlace) {
- return new Result($aPlace['place_id']);
- }
- }
-
- return null;
- }
-
- /**
- * Search for areas or nodes for areas or nodes between state and suburb level.
- *
- * @param string $sPointSQL Search point as SQL string.
- * @param int $iMaxRank Maximum address rank of the feature.
- *
- * @return Record of the found feature or null.
- *
- * Searches first for polygon that contains the search point.
- * If such a polygon is found, place nodes with a higher rank are
- * searched inside the polygon.
- */
- protected function lookupPolygon($sPointSQL, $iMaxRank)
- {
- Debug::newFunction('lookupPolygon');
- // polygon search begins at suburb-level
- if ($iMaxRank > 25) {
- $iMaxRank = 25;
- }
- // no polygon search over country-level
- if ($iMaxRank < 5) {
- $iMaxRank = 5;
- }
- // search for polygon
- $sSQL = 'SELECT place_id, parent_place_id, rank_address, rank_search FROM';
- $sSQL .= '(select place_id, parent_place_id, rank_address, rank_search, country_code, geometry';
- $sSQL .= ' FROM placex';
- $sSQL .= ' WHERE ST_GeometryType(geometry) in (\'ST_Polygon\', \'ST_MultiPolygon\')';
- $sSQL .= ' AND rank_address Between 5 AND ' .$iMaxRank;
- $sSQL .= ' AND geometry && '.$sPointSQL;
- $sSQL .= ' AND type != \'postcode\' ';
- $sSQL .= ' AND name is not null';
- $sSQL .= ' AND indexed_status = 0 and linked_place_id is null';
- $sSQL .= ' ORDER BY rank_address DESC LIMIT 50 ) as a';
- $sSQL .= ' WHERE ST_CONTAINS(geometry, '.$sPointSQL.' )';
- $sSQL .= ' ORDER BY rank_address DESC LIMIT 1';
- Debug::printSQL($sSQL);
-
- $aPoly = $this->oDB->getRow($sSQL, null, 'Could not determine polygon containing the point.');
- Debug::printVar('Polygon result', $aPoly);
-
- if ($aPoly) {
- // if a polygon is found, search for placenodes begins ...
- $iRankAddress = $aPoly['rank_address'];
- $iRankSearch = $aPoly['rank_search'];
- $iPlaceID = $aPoly['place_id'];
-
- if ($iRankAddress != $iMaxRank) {
- $sSQL = 'SELECT place_id FROM ';
- $sSQL .= '(SELECT place_id, rank_search, country_code, geometry,';
- $sSQL .= ' ST_distance('.$sPointSQL.', geometry) as distance';
- $sSQL .= ' FROM placex';
- $sSQL .= ' WHERE osm_type = \'N\'';
- // using rank_search because of a better differentiation
- // for place nodes at rank_address 16
- $sSQL .= ' AND rank_search > '.$iRankSearch;
- $sSQL .= ' AND rank_search <= '.$iMaxRank;
- $sSQL .= ' AND rank_search < 26 '; // needed to select right index
- $sSQL .= ' AND rank_address > 0';
- $sSQL .= ' AND class = \'place\'';
- $sSQL .= ' AND type != \'postcode\'';
- $sSQL .= ' AND name IS NOT NULL ';
- $sSQL .= ' AND indexed_status = 0 AND linked_place_id is null';
- $sSQL .= ' AND ST_DWithin('.$sPointSQL.', geometry, reverse_place_diameter('.$iRankSearch.'::smallint))';
- $sSQL .= ' ORDER BY distance ASC,';
- $sSQL .= ' rank_address DESC';
- $sSQL .= ' limit 500) as a';
- $sSQL .= ' WHERE ST_CONTAINS((SELECT geometry FROM placex WHERE place_id = '.$iPlaceID.'), geometry )';
- $sSQL .= ' AND distance <= reverse_place_diameter(rank_search)';
- $sSQL .= ' ORDER BY distance ASC, rank_search DESC';
- $sSQL .= ' LIMIT 1';
- Debug::printSQL($sSQL);
-
- $aPlaceNode = $this->oDB->getRow($sSQL, null, 'Could not determine place node.');
- Debug::printVar('Nearest place node', $aPlaceNode);
- if ($aPlaceNode) {
- return $aPlaceNode;
- }
- }
- }
- return $aPoly;
- }
-
-
- public function lookup($fLat, $fLon, $bDoInterpolation = true)
- {
- return $this->lookupPoint(
- 'ST_SetSRID(ST_Point('.$fLon.','.$fLat.'),4326)',
- $bDoInterpolation
- );
- }
-
- public function lookupPoint($sPointSQL, $bDoInterpolation = true)
- {
- Debug::newFunction('lookupPoint');
- // Find the nearest point
- $fSearchDiam = 0.006;
- $oResult = null;
- $aPlace = null;
-
- // for POI or street level
- if ($this->iMaxRank >= 26) {
- // starts if the search is on POI or street level,
- // searches for the nearest POI or street,
- // if a street is found and a POI is searched for,
- // the nearest POI which the found street is a parent of is chosen.
- $sSQL = 'select place_id,parent_place_id,rank_address,country_code,';
- $sSQL .= ' ST_distance('.$sPointSQL.', geometry) as distance';
- $sSQL .= ' FROM ';
- $sSQL .= ' placex';
- $sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', geometry, '.$fSearchDiam.')';
- $sSQL .= ' AND';
- $sSQL .= ' rank_address between 26 and '.$this->iMaxRank;
- $sSQL .= ' and (name is not null or housenumber is not null';
- $sSQL .= ' or rank_address between 26 and 27)';
- $sSQL .= ' and (rank_address between 26 and 27';
- $sSQL .= ' or ST_GeometryType(geometry) != \'ST_LineString\')';
- $sSQL .= ' and class not in (\'boundary\')';
- $sSQL .= ' and indexed_status = 0 and linked_place_id is null';
- $sSQL .= ' and (ST_GeometryType(geometry) not in (\'ST_Polygon\',\'ST_MultiPolygon\') ';
- $sSQL .= ' OR ST_DWithin('.$sPointSQL.', centroid, '.$fSearchDiam.'))';
- $sSQL .= ' ORDER BY distance ASC limit 1';
- Debug::printSQL($sSQL);
-
- $aPlace = $this->oDB->getRow($sSQL, null, 'Could not determine closest place.');
-
- Debug::printVar('POI/street level result', $aPlace);
- if ($aPlace) {
- $iPlaceID = $aPlace['place_id'];
- $oResult = new Result($iPlaceID);
- $iRankAddress = $aPlace['rank_address'];
- }
-
- if ($aPlace) {
- // if street and maxrank > streetlevel
- if ($iRankAddress <= 27 && $this->iMaxRank > 27) {
- // find the closest object (up to a certain radius) of which the street is a parent of
- $sSQL = ' select place_id,';
- $sSQL .= ' ST_distance('.$sPointSQL.', geometry) as distance';
- $sSQL .= ' FROM ';
- $sSQL .= ' placex';
- // radius ?
- $sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', geometry, 0.001)';
- $sSQL .= ' AND parent_place_id = '.$iPlaceID;
- $sSQL .= ' and rank_address > 28';
- $sSQL .= ' and ST_GeometryType(geometry) != \'ST_LineString\'';
- $sSQL .= ' and (name is not null or housenumber is not null)';
- $sSQL .= ' and class not in (\'boundary\')';
- $sSQL .= ' and indexed_status = 0 and linked_place_id is null';
- $sSQL .= ' ORDER BY distance ASC limit 1';
- Debug::printSQL($sSQL);
-
- $aStreet = $this->oDB->getRow($sSQL, null, 'Could not determine closest place.');
- Debug::printVar('Closest POI result', $aStreet);
-
- if ($aStreet) {
- $aPlace = $aStreet;
- $oResult = new Result($aStreet['place_id']);
- $iRankAddress = 30;
- }
- }
-
- // In the US we can check TIGER data for nearest housenumber
- if (CONST_Use_US_Tiger_Data
- && $iRankAddress <= 27
- && $aPlace['country_code'] == 'us'
- && $this->iMaxRank >= 28
- ) {
- $sSQL = 'SELECT place_id,parent_place_id,30 as rank_search,';
- $sSQL .= ' (endnumber - startnumber) * ST_LineLocatePoint(linegeo,'.$sPointSQL.') as fhnr,';
- $sSQL .= ' startnumber, endnumber, step,';
- $sSQL .= ' ST_Distance('.$sPointSQL.', linegeo) as distance';
- $sSQL .= ' FROM location_property_tiger WHERE parent_place_id = '.$oResult->iId;
- $sSQL .= ' AND ST_DWithin('.$sPointSQL.', linegeo, 0.001)';
- $sSQL .= ' ORDER BY distance ASC limit 1';
- Debug::printSQL($sSQL);
-
- $aPlaceTiger = $this->oDB->getRow($sSQL, null, 'Could not determine closest Tiger place.');
- Debug::printVar('Tiger house number result', $aPlaceTiger);
-
- if ($aPlaceTiger) {
- $aPlace = $aPlaceTiger;
- $oResult = new Result($aPlaceTiger['place_id'], Result::TABLE_TIGER);
- $iRndNum = max(0, round($aPlaceTiger['fhnr'] / $aPlaceTiger['step']) * $aPlaceTiger['step']);
- $oResult->iHouseNumber = $aPlaceTiger['startnumber'] + $iRndNum;
- if ($oResult->iHouseNumber > $aPlaceTiger['endnumber']) {
- $oResult->iHouseNumber = $aPlaceTiger['endnumber'];
- }
- $iRankAddress = 30;
- }
- }
- }
-
- if ($bDoInterpolation && $this->iMaxRank >= 30) {
- $fDistance = $fSearchDiam;
- if ($aPlace) {
- // We can't reliably go from the closest street to an
- // interpolation line because the closest interpolation
- // may have a different street segments as a parent.
- // Therefore allow an interpolation line to take precedence
- // even when the street is closer.
- $fDistance = $iRankAddress < 28 ? 0.001 : $aPlace['distance'];
- }
-
- $aHouse = $this->lookupInterpolation($sPointSQL, $fDistance);
- Debug::printVar('Interpolation result', $aPlace);
-
- if ($aHouse) {
- $oResult = new Result($aHouse['place_id'], Result::TABLE_OSMLINE);
- $iRndNum = max(0, round($aHouse['fhnr'] / $aHouse['step']) * $aHouse['step']);
- $oResult->iHouseNumber = $aHouse['startnumber'] + $iRndNum;
- if ($oResult->iHouseNumber > $aHouse['endnumber']) {
- $oResult->iHouseNumber = $aHouse['endnumber'];
- }
- $aPlace = $aHouse;
- }
- }
-
- if (!$aPlace) {
- // if no POI or street is found ...
- $oResult = $this->lookupLargeArea($sPointSQL, 25);
- }
- } else {
- // lower than street level ($iMaxRank < 26 )
- $oResult = $this->lookupLargeArea($sPointSQL, $this->iMaxRank);
- }
-
- Debug::printVar('Final result', $oResult);
- return $oResult;
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/lib.php');
-
-
-/**
- * Collection of search constraints that are independent of the
- * actual interpretation of the search query.
- *
- * The search context is shared between all SearchDescriptions. This
- * object mainly serves as context provider for the database queries.
- * Therefore most data is directly cached as SQL statements.
- */
-class SearchContext
-{
- /// Search radius around a given Near reference point.
- private $fNearRadius = false;
- /// True if search must be restricted to viewbox only.
- public $bViewboxBounded = false;
-
- /// Reference point for search (as SQL).
- public $sqlNear = '';
- /// Viewbox selected for search (as SQL).
- public $sqlViewboxSmall = '';
- /// Viewbox with a larger buffer around (as SQL).
- public $sqlViewboxLarge = '';
- /// Reference along a route (as SQL).
- public $sqlViewboxCentre = '';
- /// List of countries to restrict search to (as array).
- public $aCountryList = null;
- /// List of countries to restrict search to (as SQL).
- public $sqlCountryList = '';
- /// List of place IDs to exclude (as SQL).
- private $sqlExcludeList = '';
- /// Subset of word ids of full words in the query.
- private $aFullNameWords = array();
-
- public function setFullNameWords($aWordList)
- {
- $this->aFullNameWords = $aWordList;
- }
-
- public function getFullNameTerms()
- {
- return $this->aFullNameWords;
- }
-
- /**
- * Check if a reference point is defined.
- *
- * @return bool True if a reference point is defined.
- */
- public function hasNearPoint()
- {
- return $this->fNearRadius !== false;
- }
-
- /**
- * Get radius around reference point.
- *
- * @return float Search radius around reference point.
- */
- public function nearRadius()
- {
- return $this->fNearRadius;
- }
-
- /**
- * Set search reference point in WGS84.
- *
- * If set, then only places around this point will be taken into account.
- *
- * @param float $fLat Latitude of point.
- * @param float $fLon Longitude of point.
- * @param float $fRadius Search radius around point.
- *
- * @return void
- */
- public function setNearPoint($fLat, $fLon, $fRadius = 0.1)
- {
- $this->fNearRadius = $fRadius;
- $this->sqlNear = 'ST_SetSRID(ST_Point('.$fLon.','.$fLat.'),4326)';
- }
-
- /**
- * Check if the search is geographically restricted.
- *
- * Searches are restricted if a reference point is given or if
- * a bounded viewbox is set.
- *
- * @return bool True, if the search is geographically bounded.
- */
- public function isBoundedSearch()
- {
- return $this->hasNearPoint() || ($this->sqlViewboxSmall && $this->bViewboxBounded);
- }
-
- /**
- * Set rectangular viewbox.
- *
- * The viewbox may be bounded which means that no search results
- * must be outside the viewbox.
- *
- * @param float[4] $aViewBox Coordinates of the viewbox.
- * @param bool $bBounded True if the viewbox is bounded.
- *
- * @return void
- */
- public function setViewboxFromBox(&$aViewBox, $bBounded)
- {
- $this->bViewboxBounded = $bBounded;
- $this->sqlViewboxCentre = '';
-
- $this->sqlViewboxSmall = sprintf(
- 'ST_SetSRID(ST_MakeBox2D(ST_Point(%F,%F),ST_Point(%F,%F)),4326)',
- $aViewBox[0],
- $aViewBox[1],
- $aViewBox[2],
- $aViewBox[3]
- );
-
- $fHeight = abs($aViewBox[0] - $aViewBox[2]);
- $fWidth = abs($aViewBox[1] - $aViewBox[3]);
-
- $this->sqlViewboxLarge = sprintf(
- 'ST_SetSRID(ST_MakeBox2D(ST_Point(%F,%F),ST_Point(%F,%F)),4326)',
- max($aViewBox[0], $aViewBox[2]) + $fHeight,
- max($aViewBox[1], $aViewBox[3]) + $fWidth,
- min($aViewBox[0], $aViewBox[2]) - $fHeight,
- min($aViewBox[1], $aViewBox[3]) - $fWidth
- );
- }
-
- /**
- * Set viewbox along a route.
- *
- * The viewbox may be bounded which means that no search results
- * must be outside the viewbox.
- *
- * @param object $oDB Nominatim::DB instance to use for computing the box.
- * @param string[] $aRoutePoints List of x,y coordinates along a route.
- * @param float $fRouteWidth Buffer around the route to use.
- * @param bool $bBounded True if the viewbox bounded.
- *
- * @return void
- */
- public function setViewboxFromRoute(&$oDB, $aRoutePoints, $fRouteWidth, $bBounded)
- {
- $this->bViewboxBounded = $bBounded;
- $this->sqlViewboxCentre = "ST_SetSRID('LINESTRING(";
- $sSep = '';
- foreach ($aRoutePoints as $aPoint) {
- $fPoint = (float)$aPoint;
- $this->sqlViewboxCentre .= $sSep.$fPoint;
- $sSep = ($sSep == ' ') ? ',' : ' ';
- }
- $this->sqlViewboxCentre .= ")'::geometry,4326)";
-
- $sSQL = 'ST_BUFFER('.$this->sqlViewboxCentre.','.($fRouteWidth/69).')';
- $sGeom = $oDB->getOne('select '.$sSQL, null, 'Could not get small viewbox');
- $this->sqlViewboxSmall = "'".$sGeom."'::geometry";
-
- $sSQL = 'ST_BUFFER('.$this->sqlViewboxCentre.','.($fRouteWidth/30).')';
- $sGeom = $oDB->getOne('select '.$sSQL, null, 'Could not get large viewbox');
- $this->sqlViewboxLarge = "'".$sGeom."'::geometry";
- }
-
- /**
- * Set list of excluded place IDs.
- *
- * @param integer[] $aExcluded List of IDs.
- *
- * @return void
- */
- public function setExcludeList($aExcluded)
- {
- $this->sqlExcludeList = ' not in ('.join(',', $aExcluded).')';
- }
-
- /**
- * Set list of countries to restrict search to.
- *
- * @param string[] $aCountries List of two-letter lower-case country codes.
- *
- * @return void
- */
- public function setCountryList($aCountries)
- {
- $this->sqlCountryList = '('.join(',', array_map('addQuotes', $aCountries)).')';
- $this->aCountryList = $aCountries;
- }
-
- /**
- * Extract a reference point from a query string.
- *
- * @param string $sQuery Query to scan.
- *
- * @return string The remaining query string.
- */
- public function setNearPointFromQuery($sQuery)
- {
- $aResult = parseLatLon($sQuery);
-
- if ($aResult !== false
- && $aResult[1] <= 90.1
- && $aResult[1] >= -90.1
- && $aResult[2] <= 180.1
- && $aResult[2] >= -180.1
- ) {
- $this->setNearPoint($aResult[1], $aResult[2]);
- $sQuery = trim(str_replace($aResult[0], ' ', $sQuery));
- }
-
- return $sQuery;
- }
-
- /**
- * Get an SQL snippet for computing the distance from the reference point.
- *
- * @param string $sObj SQL variable name to compute the distance from.
- *
- * @return string An SQL string.
- */
- public function distanceSQL($sObj)
- {
- return 'ST_Distance('.$this->sqlNear.", $sObj)";
- }
-
- /**
- * Get an SQL snippet for checking if something is within range of the
- * reference point.
- *
- * @param string $sObj SQL variable name to compute if it is within range.
- *
- * @return string An SQL string.
- */
- public function withinSQL($sObj)
- {
- return sprintf('ST_DWithin(%s, %s, %F)', $sObj, $this->sqlNear, $this->fNearRadius);
- }
-
- /**
- * Get an SQL snippet of the importance factor of the viewbox.
- *
- * The importance factor is computed by checking if an object is within
- * the viewbox and/or the extended version of the viewbox.
- *
- * @param string $sObj SQL variable name of object to weight the importance
- *
- * @return string SQL snippet of the factor with a leading multiply sign.
- */
- public function viewboxImportanceSQL($sObj)
- {
- $sSQL = '';
-
- if ($this->sqlViewboxSmall) {
- $sSQL = " * CASE WHEN ST_Contains($this->sqlViewboxSmall, $sObj) THEN 1 ELSE 0.5 END";
- }
- if ($this->sqlViewboxLarge) {
- $sSQL = " * CASE WHEN ST_Contains($this->sqlViewboxLarge, $sObj) THEN 1 ELSE 0.5 END";
- }
-
- return $sSQL;
- }
-
- /**
- * SQL snippet checking if a place ID should be excluded.
- *
- * @param string $sVariable SQL variable name of place ID to check,
- * potentially prefixed with more SQL.
- *
- * @return string SQL snippet.
- */
- public function excludeSQL($sVariable)
- {
- if ($this->sqlExcludeList) {
- return $sVariable.$this->sqlExcludeList;
- }
-
- return '';
- }
-
- /**
- * Check if the given country is covered by the search context.
- *
- * @param string $sCountryCode Country code of the country to check.
- *
- * @return True, if no country code restrictions are set or the
- * country is included in the country list.
- */
- public function isCountryApplicable($sCountryCode)
- {
- return $this->aCountryList === null || in_array($sCountryCode, $this->aCountryList);
- }
-
- public function debugInfo()
- {
- return array(
- 'Near radius' => $this->fNearRadius,
- 'Near point (SQL)' => $this->sqlNear,
- 'Bounded viewbox' => $this->bViewboxBounded,
- 'Viewbox (SQL, small)' => $this->sqlViewboxSmall,
- 'Viewbox (SQL, large)' => $this->sqlViewboxLarge,
- 'Viewbox (SQL, centre)' => $this->sqlViewboxCentre,
- 'Countries (SQL)' => $this->sqlCountryList,
- 'Excluded IDs (SQL)' => $this->sqlExcludeList
- );
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/SpecialSearchOperator.php');
-require_once(CONST_LibDir.'/SearchContext.php');
-require_once(CONST_LibDir.'/Result.php');
-
-/**
- * Description of a single interpretation of a search query.
- */
-class SearchDescription
-{
- /// Ranking how well the description fits the query.
- private $iSearchRank = 0;
- /// Country code of country the result must belong to.
- private $sCountryCode = '';
- /// List of word ids making up the name of the object.
- private $aName = array();
- /// True if the name is rare enough to force index use on name.
- private $bRareName = false;
- /// True if the name requires to be accompanied by address terms.
- private $bNameNeedsAddress = false;
- /// List of word ids making up the address of the object.
- private $aAddress = array();
- /// List of word ids that appear in the name but should be ignored.
- private $aNameNonSearch = array();
- /// List of word ids that appear in the address but should be ignored.
- private $aAddressNonSearch = array();
- /// Kind of search for special searches, see Nominatim::Operator.
- private $iOperator = Operator::NONE;
- /// Class of special feature to search for.
- private $sClass = '';
- /// Type of special feature to search for.
- private $sType = '';
- /// Housenumber of the object.
- private $sHouseNumber = '';
- /// Postcode for the object.
- private $sPostcode = '';
- /// Global search constraints.
- private $oContext;
-
- // Temporary values used while creating the search description.
-
- /// Index of phrase currently processed.
- private $iNamePhrase = -1;
-
- /**
- * Create an empty search description.
- *
- * @param object $oContext Global context to use. Will be inherited by
- * all derived search objects.
- */
- public function __construct($oContext)
- {
- $this->oContext = $oContext;
- }
-
- /**
- * Get current search rank.
- *
- * The higher the search rank the lower the likelihood that the
- * search is a correct interpretation of the search query.
- *
- * @return integer Search rank.
- */
- public function getRank()
- {
- return $this->iSearchRank;
- }
-
- /**
- * Extract key/value pairs from a query.
- *
- * Key/value pairs are recognised if they are of the form [<key>=<value>].
- * If multiple terms of this kind are found then all terms are removed
- * but only the first is used for search.
- *
- * @param string $sQuery Original query string.
- *
- * @return string The query string with the special search patterns removed.
- */
- public function extractKeyValuePairs($sQuery)
- {
- // Search for terms of kind [<key>=<value>].
- preg_match_all(
- '/\\[([\\w_]*)=([\\w_]*)\\]/',
- $sQuery,
- $aSpecialTermsRaw,
- PREG_SET_ORDER
- );
-
- foreach ($aSpecialTermsRaw as $aTerm) {
- $sQuery = str_replace($aTerm[0], ' ', $sQuery);
- if (!$this->hasOperator()) {
- $this->setPoiSearch(Operator::TYPE, $aTerm[1], $aTerm[2]);
- }
- }
-
- return $sQuery;
- }
-
- /**
- * Check if the combination of parameters is sensible.
- *
- * @return bool True, if the search looks valid.
- */
- public function isValidSearch()
- {
- if (empty($this->aName)) {
- if ($this->sHouseNumber) {
- return false;
- }
- if (!$this->sClass && !$this->sCountryCode) {
- return false;
- }
- }
- if ($this->bNameNeedsAddress && empty($this->aAddress)) {
- return false;
- }
-
- return true;
- }
-
- /////////// Search building functions
-
- /**
- * Create a copy of this search description adding to search rank.
- *
- * @param integer $iTermCost Cost to add to the current search rank.
- *
- * @return object Cloned search description.
- */
- public function clone($iTermCost)
- {
- $oSearch = clone $this;
- $oSearch->iSearchRank += $iTermCost;
-
- return $oSearch;
- }
-
- /**
- * Check if the search currently includes a name.
- *
- * @param bool bIncludeNonNames If true stop-word tokens are taken into
- * account, too.
- *
- * @return bool True, if search has a name.
- */
- public function hasName($bIncludeNonNames = false)
- {
- return !empty($this->aName)
- || (!empty($this->aNameNonSearch) && $bIncludeNonNames);
- }
-
- /**
- * Check if the search currently includes an address term.
- *
- * @return bool True, if any address term is included, including stop-word
- * terms.
- */
- public function hasAddress()
- {
- return !empty($this->aAddress) || !empty($this->aAddressNonSearch);
- }
-
- /**
- * Check if a country restriction is currently included in the search.
- *
- * @return bool True, if a country restriction is set.
- */
- public function hasCountry()
- {
- return $this->sCountryCode !== '';
- }
-
- /**
- * Check if a postcode is currently included in the search.
- *
- * @return bool True, if a postcode is set.
- */
- public function hasPostcode()
- {
- return $this->sPostcode !== '';
- }
-
- /**
- * Check if a house number is set for the search.
- *
- * @return bool True, if a house number is set.
- */
- public function hasHousenumber()
- {
- return $this->sHouseNumber !== '';
- }
-
- /**
- * Check if a special type of place is requested.
- *
- * param integer iOperator When set, check for the particular
- * operator used for the special type.
- *
- * @return bool True, if speial type is requested or, if requested,
- * a special type with the given operator.
- */
- public function hasOperator($iOperator = null)
- {
- return $iOperator === null ? $this->iOperator != Operator::NONE : $this->iOperator == $iOperator;
- }
-
- /**
- * Add the given token to the list of terms to search for in the address.
- *
- * @param integer iID ID of term to add.
- * @param bool bSearchable Term should be used to search for result
- * (i.e. term is not a stop word).
- */
- public function addAddressToken($iId, $bSearchable = true)
- {
- if ($bSearchable) {
- $this->aAddress[$iId] = $iId;
- } else {
- $this->aAddressNonSearch[$iId] = $iId;
- }
- }
-
- /**
- * Add the given full-word token to the list of terms to search for in the
- * name.
- *
- * @param integer iId ID of term to add.
- * @param bool bRareName True if the term is infrequent enough to not
- * require other constraints for efficient search.
- */
- public function addNameToken($iId, $bRareName)
- {
- $this->aName[$iId] = $iId;
- $this->bRareName = $bRareName;
- $this->bNameNeedsAddress = false;
- }
-
- /**
- * Add the given partial token to the list of terms to search for in
- * the name.
- *
- * @param integer iID ID of term to add.
- * @param bool bSearchable Term should be used to search for result
- * (i.e. term is not a stop word).
- * @param bool bNeedsAddress True if the term is too unspecific to be used
- * in a stand-alone search without an address
- * to narrow down the search.
- * @param integer iPhraseNumber Index of phrase, where the partial term
- * appears.
- */
- public function addPartialNameToken($iId, $bSearchable, $bNeedsAddress, $iPhraseNumber)
- {
- if (empty($this->aName)) {
- $this->bNameNeedsAddress = $bNeedsAddress;
- } elseif ($bSearchable && count($this->aName) >= 2) {
- $this->bNameNeedsAddress = false;
- } else {
- $this->bNameNeedsAddress &= $bNeedsAddress;
- }
- if ($bSearchable) {
- $this->aName[$iId] = $iId;
- } else {
- $this->aNameNonSearch[$iId] = $iId;
- }
- $this->iNamePhrase = $iPhraseNumber;
- }
-
- /**
- * Set country restriction for the search.
- *
- * @param string sCountryCode Country code of country to restrict search to.
- */
- public function setCountry($sCountryCode)
- {
- $this->sCountryCode = $sCountryCode;
- $this->iNamePhrase = -1;
- }
-
- /**
- * Set postcode search constraint.
- *
- * @param string sPostcode Postcode the result should have.
- */
- public function setPostcode($sPostcode)
- {
- $this->sPostcode = $sPostcode;
- $this->iNamePhrase = -1;
- }
-
- /**
- * Make this search a search for a postcode object.
- *
- * @param integer iId Token Id for the postcode.
- * @param string sPostcode Postcode to look for.
- */
- public function setPostcodeAsName($iId, $sPostcode)
- {
- $this->iOperator = Operator::POSTCODE;
- $this->aAddress = array_merge($this->aAddress, $this->aName);
- $this->aName = array($iId => $sPostcode);
- $this->bRareName = true;
- $this->iNamePhrase = -1;
- }
-
- /**
- * Set house number search cnstraint.
- *
- * @param string sNumber House number the result should have.
- */
- public function setHousenumber($sNumber)
- {
- $this->sHouseNumber = $sNumber;
- $this->iNamePhrase = -1;
- }
-
- /**
- * Make this search a search for a house number.
- *
- * @param integer iId Token Id for the house number.
- */
- public function setHousenumberAsName($iId)
- {
- $this->aAddress = array_merge($this->aAddress, $this->aName);
- $this->bRareName = false;
- $this->bNameNeedsAddress = true;
- $this->aName = array($iId => $iId);
- $this->iNamePhrase = -1;
- }
-
- /**
- * Make this search a POI search.
- *
- * In a POI search, objects are not (only) searched by their name
- * but also by the primary OSM key/value pair (class and type in Nominatim).
- *
- * @param integer $iOperator Type of POI search
- * @param string $sClass Class (or OSM tag key) of POI.
- * @param string $sType Type (or OSM tag value) of POI.
- *
- * @return void
- */
- public function setPoiSearch($iOperator, $sClass, $sType)
- {
- $this->iOperator = $iOperator;
- $this->sClass = $sClass;
- $this->sType = $sType;
- $this->iNamePhrase = -1;
- }
-
- public function getNamePhrase()
- {
- return $this->iNamePhrase;
- }
-
- /**
- * Get the global search context.
- *
- * @return object Objects of global search constraints.
- */
- public function getContext()
- {
- return $this->oContext;
- }
-
- /////////// Query functions
-
-
- /**
- * Query database for places that match this search.
- *
- * @param object $oDB Nominatim::DB instance to use.
- * @param integer $iMinRank Minimum address rank to restrict search to.
- * @param integer $iMaxRank Maximum address rank to restrict search to.
- * @param integer $iLimit Maximum number of results.
- *
- * @return mixed[] An array with two fields: IDs contains the list of
- * matching place IDs and houseNumber the houseNumber
- * if applicable or -1 if not.
- */
- public function query(&$oDB, $iMinRank, $iMaxRank, $iLimit)
- {
- $aResults = array();
-
- if ($this->sCountryCode
- && empty($this->aName)
- && !$this->iOperator
- && !$this->sClass
- && !$this->oContext->hasNearPoint()
- ) {
- // Just looking for a country - look it up
- if (4 >= $iMinRank && 4 <= $iMaxRank) {
- $aResults = $this->queryCountry($oDB);
- }
- } elseif (empty($this->aName) && empty($this->aAddress)) {
- // Neither name nor address? Then we must be
- // looking for a POI in a geographic area.
- if ($this->oContext->isBoundedSearch()) {
- $aResults = $this->queryNearbyPoi($oDB, $iLimit);
- }
- } elseif ($this->iOperator == Operator::POSTCODE) {
- // looking for postcode
- $aResults = $this->queryPostcode($oDB, $iLimit);
- } else {
- // Ordinary search:
- // First search for places according to name and address.
- $aResults = $this->queryNamedPlace(
- $oDB,
- $iMinRank,
- $iMaxRank,
- $iLimit
- );
-
- // finally get POIs if requested
- if ($this->sClass && !empty($aResults)) {
- $aResults = $this->queryPoiByOperator($oDB, $aResults, $iLimit);
- }
- }
-
- Debug::printDebugTable('Place IDs', $aResults);
-
- if (!empty($aResults) && $this->sPostcode) {
- $sPlaceIds = Result::joinIdsByTable($aResults, Result::TABLE_PLACEX);
- if ($sPlaceIds) {
- $sSQL = 'SELECT place_id FROM placex';
- $sSQL .= ' WHERE place_id in ('.$sPlaceIds.')';
- $sSQL .= " AND postcode != '".$this->sPostcode."'";
- Debug::printSQL($sSQL);
- $aFilteredPlaceIDs = $oDB->getCol($sSQL);
- if ($aFilteredPlaceIDs) {
- foreach ($aFilteredPlaceIDs as $iPlaceId) {
- $aResults[$iPlaceId]->iResultRank++;
- }
- }
- }
- }
-
- return $aResults;
- }
-
-
- private function queryCountry(&$oDB)
- {
- $sSQL = 'SELECT place_id FROM placex ';
- $sSQL .= "WHERE country_code='".$this->sCountryCode."'";
- $sSQL .= ' AND rank_search = 4';
- if ($this->oContext->bViewboxBounded) {
- $sSQL .= ' AND ST_Intersects('.$this->oContext->sqlViewboxSmall.', geometry)';
- }
- $sSQL .= ' ORDER BY st_area(geometry) DESC LIMIT 1';
-
- Debug::printSQL($sSQL);
-
- $iPlaceId = $oDB->getOne($sSQL);
-
- $aResults = array();
- if ($iPlaceId) {
- $aResults[$iPlaceId] = new Result($iPlaceId);
- }
-
- return $aResults;
- }
-
- private function queryNearbyPoi(&$oDB, $iLimit)
- {
- if (!$this->sClass) {
- return array();
- }
-
- $aDBResults = array();
- $sPoiTable = $this->poiTable();
-
- if ($oDB->tableExists($sPoiTable)) {
- $sSQL = 'SELECT place_id FROM '.$sPoiTable.' ct';
- if ($this->oContext->sqlCountryList) {
- $sSQL .= ' JOIN placex USING (place_id)';
- }
- if ($this->oContext->hasNearPoint()) {
- $sSQL .= ' WHERE '.$this->oContext->withinSQL('ct.centroid');
- } elseif ($this->oContext->bViewboxBounded) {
- $sSQL .= ' WHERE ST_Contains('.$this->oContext->sqlViewboxSmall.', ct.centroid)';
- }
- if ($this->oContext->sqlCountryList) {
- $sSQL .= ' AND country_code in '.$this->oContext->sqlCountryList;
- }
- $sSQL .= $this->oContext->excludeSQL(' AND place_id');
- if ($this->oContext->sqlViewboxCentre) {
- $sSQL .= ' ORDER BY ST_Distance(';
- $sSQL .= $this->oContext->sqlViewboxCentre.', ct.centroid) ASC';
- } elseif ($this->oContext->hasNearPoint()) {
- $sSQL .= ' ORDER BY '.$this->oContext->distanceSQL('ct.centroid').' ASC';
- }
- $sSQL .= " LIMIT $iLimit";
- Debug::printSQL($sSQL);
- $aDBResults = $oDB->getCol($sSQL);
- }
-
- if ($this->oContext->hasNearPoint()) {
- $sSQL = 'SELECT place_id FROM placex WHERE ';
- $sSQL .= 'class = :class and type = :type';
- $sSQL .= ' AND '.$this->oContext->withinSQL('geometry');
- $sSQL .= ' AND linked_place_id is null';
- if ($this->oContext->sqlCountryList) {
- $sSQL .= ' AND country_code in '.$this->oContext->sqlCountryList;
- }
- $sSQL .= ' ORDER BY '.$this->oContext->distanceSQL('centroid').' ASC';
- $sSQL .= " LIMIT $iLimit";
- Debug::printSQL($sSQL);
- $aDBResults = $oDB->getCol(
- $sSQL,
- array(':class' => $this->sClass, ':type' => $this->sType)
- );
- }
-
- $aResults = array();
- foreach ($aDBResults as $iPlaceId) {
- $aResults[$iPlaceId] = new Result($iPlaceId);
- }
-
- return $aResults;
- }
-
- private function queryPostcode(&$oDB, $iLimit)
- {
- $sSQL = 'SELECT p.place_id FROM location_postcode p ';
-
- if (!empty($this->aAddress)) {
- $sSQL .= ', search_name s ';
- $sSQL .= 'WHERE s.place_id = p.parent_place_id ';
- $sSQL .= 'AND array_cat(s.nameaddress_vector, s.name_vector)';
- $sSQL .= ' @> '.$oDB->getArraySQL($this->aAddress).' AND ';
- } else {
- $sSQL .= 'WHERE ';
- }
-
- $sSQL .= "p.postcode = '".reset($this->aName)."'";
- $sSQL .= $this->countryCodeSQL(' AND p.country_code');
- if ($this->oContext->bViewboxBounded) {
- $sSQL .= ' AND ST_Intersects('.$this->oContext->sqlViewboxSmall.', geometry)';
- }
- $sSQL .= $this->oContext->excludeSQL(' AND p.place_id');
- $sSQL .= " LIMIT $iLimit";
-
- Debug::printSQL($sSQL);
-
- $aResults = array();
- foreach ($oDB->getCol($sSQL) as $iPlaceId) {
- $aResults[$iPlaceId] = new Result($iPlaceId, Result::TABLE_POSTCODE);
- }
-
- return $aResults;
- }
-
- private function queryNamedPlace(&$oDB, $iMinAddressRank, $iMaxAddressRank, $iLimit)
- {
- $aTerms = array();
- $aOrder = array();
-
- if (!empty($this->aName)) {
- $aTerms[] = 'name_vector @> '.$oDB->getArraySQL($this->aName);
- }
- if (!empty($this->aAddress)) {
- // For infrequent name terms disable index usage for address
- if ($this->bRareName) {
- $aTerms[] = 'array_cat(nameaddress_vector,ARRAY[]::integer[]) @> '.$oDB->getArraySQL($this->aAddress);
- } else {
- $aTerms[] = 'nameaddress_vector @> '.$oDB->getArraySQL($this->aAddress);
- }
- }
-
- $sCountryTerm = $this->countryCodeSQL('country_code');
- if ($sCountryTerm) {
- $aTerms[] = $sCountryTerm;
- }
-
- if ($this->sHouseNumber) {
- $aTerms[] = 'address_rank between 16 and 30';
- } elseif (!$this->sClass || $this->iOperator == Operator::NAME) {
- if ($iMinAddressRank > 0) {
- $aTerms[] = "((address_rank between $iMinAddressRank and $iMaxAddressRank) or (search_rank between $iMinAddressRank and $iMaxAddressRank))";
- }
- }
-
- if ($this->oContext->hasNearPoint()) {
- $aTerms[] = $this->oContext->withinSQL('centroid');
- $aOrder[] = $this->oContext->distanceSQL('centroid');
- } elseif ($this->sPostcode) {
- if (empty($this->aAddress)) {
- $aTerms[] = "EXISTS(SELECT place_id FROM location_postcode p WHERE p.postcode = '".$this->sPostcode."' AND ST_DWithin(search_name.centroid, p.geometry, 0.12))";
- } else {
- $aOrder[] = "(SELECT min(ST_Distance(search_name.centroid, p.geometry)) FROM location_postcode p WHERE p.postcode = '".$this->sPostcode."')";
- }
- }
-
- $sExcludeSQL = $this->oContext->excludeSQL('place_id');
- if ($sExcludeSQL) {
- $aTerms[] = $sExcludeSQL;
- }
-
- if ($this->oContext->bViewboxBounded) {
- $aTerms[] = 'centroid && '.$this->oContext->sqlViewboxSmall;
- }
-
- if ($this->sHouseNumber) {
- $sImportanceSQL = '- abs(26 - address_rank) + 3';
- } else {
- $sImportanceSQL = '(CASE WHEN importance = 0 OR importance IS NULL THEN 0.75001-(search_rank::float/40) ELSE importance END)';
- }
- $sImportanceSQL .= $this->oContext->viewboxImportanceSQL('centroid');
- $aOrder[] = "$sImportanceSQL DESC";
-
- $aFullNameAddress = $this->oContext->getFullNameTerms();
- if (!empty($aFullNameAddress)) {
- $sExactMatchSQL = ' ( ';
- $sExactMatchSQL .= ' SELECT count(*) FROM ( ';
- $sExactMatchSQL .= ' SELECT unnest('.$oDB->getArraySQL($aFullNameAddress).')';
- $sExactMatchSQL .= ' INTERSECT ';
- $sExactMatchSQL .= ' SELECT unnest(nameaddress_vector)';
- $sExactMatchSQL .= ' ) s';
- $sExactMatchSQL .= ') as exactmatch';
- $aOrder[] = 'exactmatch DESC';
- } else {
- $sExactMatchSQL = '0::int as exactmatch';
- }
-
- if (empty($aTerms)) {
- return array();
- }
-
- if ($this->hasHousenumber()) {
- $sHouseNumberRegex = $oDB->getDBQuoted('\\\\m'.$this->sHouseNumber.'\\\\M');
-
- // Housenumbers on streets and places.
- $sPlacexSql = 'SELECT array_agg(place_id) FROM placex';
- $sPlacexSql .= ' WHERE parent_place_id = sin.place_id AND sin.address_rank < 30';
- $sPlacexSql .= $this->oContext->excludeSQL(' AND place_id');
- $sPlacexSql .= ' and housenumber ~* E'.$sHouseNumberRegex;
-
- // Interpolations on streets and places.
- $sInterpolSql = 'null';
- $sTigerSql = 'null';
- if (preg_match('/^[0-9]+$/', $this->sHouseNumber)) {
- $sIpolHnr = 'WHERE parent_place_id = sin.place_id ';
- $sIpolHnr .= ' AND startnumber is not NULL AND sin.address_rank < 30';
- $sIpolHnr .= ' AND '.$this->sHouseNumber.' between startnumber and endnumber';
- $sIpolHnr .= ' AND ('.$this->sHouseNumber.' - startnumber) % step = 0';
-
- $sInterpolSql = 'SELECT array_agg(place_id) FROM location_property_osmline '.$sIpolHnr;
- if (CONST_Use_US_Tiger_Data) {
- $sTigerSql = 'SELECT array_agg(place_id) FROM location_property_tiger '.$sIpolHnr;
- $sTigerSql .= " and sin.country_code = 'us'";
- }
- }
-
- if ($this->sClass) {
- $iLimit = 40;
- }
-
- $sSelfHnr = 'SELECT * FROM placex WHERE place_id = search_name.place_id';
- $sSelfHnr .= ' AND housenumber ~* E'.$sHouseNumberRegex;
-
- $aTerms[] = '(address_rank < 30 or exists('.$sSelfHnr.'))';
-
-
- $sSQL = 'SELECT sin.*, ';
- $sSQL .= '('.$sPlacexSql.') as placex_hnr, ';
- $sSQL .= '('.$sInterpolSql.') as interpol_hnr, ';
- $sSQL .= '('.$sTigerSql.') as tiger_hnr ';
- $sSQL .= ' FROM (';
- $sSQL .= ' SELECT place_id, address_rank, country_code,'.$sExactMatchSQL.',';
- $sSQL .= ' CASE WHEN importance = 0 OR importance IS NULL';
- $sSQL .= ' THEN 0.75001-(search_rank::float/40) ELSE importance END as importance';
- $sSQL .= ' FROM search_name';
- $sSQL .= ' WHERE '.join(' and ', $aTerms);
- $sSQL .= ' ORDER BY '.join(', ', $aOrder);
- $sSQL .= ' LIMIT 40000';
- $sSQL .= ') as sin';
- $sSQL .= ' ORDER BY address_rank = 30 desc, placex_hnr, interpol_hnr, tiger_hnr,';
- $sSQL .= ' importance';
- $sSQL .= ' LIMIT '.$iLimit;
- } else {
- if ($this->sClass) {
- $iLimit = 40;
- }
-
- $sSQL = 'SELECT place_id, address_rank, '.$sExactMatchSQL;
- $sSQL .= ' FROM search_name';
- $sSQL .= ' WHERE '.join(' and ', $aTerms);
- $sSQL .= ' ORDER BY '.join(', ', $aOrder);
- $sSQL .= ' LIMIT '.$iLimit;
- }
-
- Debug::printSQL($sSQL);
-
- $aDBResults = $oDB->getAll($sSQL, null, 'Could not get places for search terms.');
-
- $aResults = array();
-
- foreach ($aDBResults as $aResult) {
- $oResult = new Result($aResult['place_id']);
- $oResult->iExactMatches = $aResult['exactmatch'];
- $oResult->iAddressRank = $aResult['address_rank'];
-
- $bNeedResult = true;
- if ($this->hasHousenumber() && $aResult['address_rank'] < 30) {
- if ($aResult['placex_hnr']) {
- foreach (explode(',', substr($aResult['placex_hnr'], 1, -1)) as $sPlaceID) {
- $iPlaceID = intval($sPlaceID);
- $oHnrResult = new Result($iPlaceID);
- $oHnrResult->iExactMatches = $aResult['exactmatch'];
- $oHnrResult->iAddressRank = 30;
- $aResults[$iPlaceID] = $oHnrResult;
- $bNeedResult = false;
- }
- }
- if ($aResult['interpol_hnr']) {
- foreach (explode(',', substr($aResult['interpol_hnr'], 1, -1)) as $sPlaceID) {
- $iPlaceID = intval($sPlaceID);
- $oHnrResult = new Result($iPlaceID, Result::TABLE_OSMLINE);
- $oHnrResult->iExactMatches = $aResult['exactmatch'];
- $oHnrResult->iAddressRank = 30;
- $oHnrResult->iHouseNumber = intval($this->sHouseNumber);
- $aResults[$iPlaceID] = $oHnrResult;
- $bNeedResult = false;
- }
- }
- if ($aResult['tiger_hnr']) {
- foreach (explode(',', substr($aResult['tiger_hnr'], 1, -1)) as $sPlaceID) {
- $iPlaceID = intval($sPlaceID);
- $oHnrResult = new Result($iPlaceID, Result::TABLE_TIGER);
- $oHnrResult->iExactMatches = $aResult['exactmatch'];
- $oHnrResult->iAddressRank = 30;
- $oHnrResult->iHouseNumber = intval($this->sHouseNumber);
- $aResults[$iPlaceID] = $oHnrResult;
- $bNeedResult = false;
- }
- }
-
- if ($aResult['address_rank'] < 26) {
- $oResult->iResultRank += 2;
- } else {
- $oResult->iResultRank++;
- }
- }
-
- if ($bNeedResult) {
- $aResults[$aResult['place_id']] = $oResult;
- }
- }
-
- return $aResults;
- }
-
-
- private function queryPoiByOperator(&$oDB, $aParentIDs, $iLimit)
- {
- $aResults = array();
- $sPlaceIDs = Result::joinIdsByTable($aParentIDs, Result::TABLE_PLACEX);
-
- if (!$sPlaceIDs) {
- return $aResults;
- }
-
- if ($this->iOperator == Operator::TYPE || $this->iOperator == Operator::NAME) {
- // If they were searching for a named class (i.e. 'Kings Head pub')
- // then we might have an extra match
- $sSQL = 'SELECT place_id FROM placex ';
- $sSQL .= " WHERE place_id in ($sPlaceIDs)";
- $sSQL .= " AND class='".$this->sClass."' ";
- $sSQL .= " AND type='".$this->sType."'";
- $sSQL .= ' AND linked_place_id is null';
- $sSQL .= $this->oContext->excludeSQL(' AND place_id');
- $sSQL .= ' ORDER BY rank_search ASC ';
- $sSQL .= " LIMIT $iLimit";
-
- Debug::printSQL($sSQL);
-
- foreach ($oDB->getCol($sSQL) as $iPlaceId) {
- $aResults[$iPlaceId] = new Result($iPlaceId);
- }
- }
-
- // NEAR and IN are handled the same
- if ($this->iOperator == Operator::TYPE || $this->iOperator == Operator::NEAR) {
- $sClassTable = $this->poiTable();
- $bCacheTable = $oDB->tableExists($sClassTable);
-
- $sSQL = "SELECT min(rank_search) FROM placex WHERE place_id in ($sPlaceIDs)";
- Debug::printSQL($sSQL);
- $iMaxRank = (int) $oDB->getOne($sSQL);
-
- // For state / country level searches the normal radius search doesn't work very well
- $sPlaceGeom = false;
- if ($iMaxRank < 9 && $bCacheTable) {
- // Try and get a polygon to search in instead
- $sSQL = 'SELECT geometry FROM placex';
- $sSQL .= " WHERE place_id in ($sPlaceIDs)";
- $sSQL .= " AND rank_search < $iMaxRank + 5";
- $sSQL .= ' AND ST_Area(Box2d(geometry)) < 20';
- $sSQL .= " AND ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon')";
- $sSQL .= ' ORDER BY rank_search ASC ';
- $sSQL .= ' LIMIT 1';
- Debug::printSQL($sSQL);
- $sPlaceGeom = $oDB->getOne($sSQL);
- }
-
- if ($sPlaceGeom) {
- $sPlaceIDs = false;
- } else {
- $iMaxRank += 5;
- $sSQL = 'SELECT place_id FROM placex';
- $sSQL .= " WHERE place_id in ($sPlaceIDs) and rank_search < $iMaxRank";
- Debug::printSQL($sSQL);
- $aPlaceIDs = $oDB->getCol($sSQL);
- $sPlaceIDs = join(',', $aPlaceIDs);
- }
-
- if ($sPlaceIDs || $sPlaceGeom) {
- $fRange = 0.01;
- if ($bCacheTable) {
- // More efficient - can make the range bigger
- $fRange = 0.05;
-
- $sOrderBySQL = '';
- if ($this->oContext->hasNearPoint()) {
- $sOrderBySQL = $this->oContext->distanceSQL('l.centroid');
- } elseif ($sPlaceIDs) {
- $sOrderBySQL = 'ST_Distance(l.centroid, f.geometry)';
- } elseif ($sPlaceGeom) {
- $sOrderBySQL = "ST_Distance(st_centroid('".$sPlaceGeom."'), l.centroid)";
- }
-
- $sSQL = 'SELECT distinct i.place_id';
- if ($sOrderBySQL) {
- $sSQL .= ', i.order_term';
- }
- $sSQL .= ' from (SELECT l.place_id';
- if ($sOrderBySQL) {
- $sSQL .= ','.$sOrderBySQL.' as order_term';
- }
- $sSQL .= ' from '.$sClassTable.' as l';
-
- if ($sPlaceIDs) {
- $sSQL .= ',placex as f WHERE ';
- $sSQL .= "f.place_id in ($sPlaceIDs) ";
- $sSQL .= " AND ST_DWithin(l.centroid, f.centroid, $fRange)";
- } elseif ($sPlaceGeom) {
- $sSQL .= " WHERE ST_Contains('$sPlaceGeom', l.centroid)";
- }
-
- $sSQL .= $this->oContext->excludeSQL(' AND l.place_id');
- $sSQL .= 'limit 300) i ';
- if ($sOrderBySQL) {
- $sSQL .= 'order by order_term asc';
- }
- $sSQL .= " limit $iLimit";
-
- Debug::printSQL($sSQL);
-
- foreach ($oDB->getCol($sSQL) as $iPlaceId) {
- $aResults[$iPlaceId] = new Result($iPlaceId);
- }
- } else {
- if ($this->oContext->hasNearPoint()) {
- $fRange = $this->oContext->nearRadius();
- }
-
- $sOrderBySQL = '';
- if ($this->oContext->hasNearPoint()) {
- $sOrderBySQL = $this->oContext->distanceSQL('l.geometry');
- } else {
- $sOrderBySQL = 'ST_Distance(l.geometry, f.geometry)';
- }
-
- $sSQL = 'SELECT distinct l.place_id';
- if ($sOrderBySQL) {
- $sSQL .= ','.$sOrderBySQL.' as orderterm';
- }
- $sSQL .= ' FROM placex as l, placex as f';
- $sSQL .= " WHERE f.place_id in ($sPlaceIDs)";
- $sSQL .= " AND ST_DWithin(l.geometry, f.centroid, $fRange)";
- $sSQL .= " AND l.class='".$this->sClass."'";
- $sSQL .= " AND l.type='".$this->sType."'";
- $sSQL .= $this->oContext->excludeSQL(' AND l.place_id');
- if ($sOrderBySQL) {
- $sSQL .= 'ORDER BY orderterm ASC';
- }
- $sSQL .= " limit $iLimit";
-
- Debug::printSQL($sSQL);
-
- foreach ($oDB->getCol($sSQL) as $iPlaceId) {
- $aResults[$iPlaceId] = new Result($iPlaceId);
- }
- }
- }
- }
-
- return $aResults;
- }
-
- private function poiTable()
- {
- return 'place_classtype_'.$this->sClass.'_'.$this->sType;
- }
-
- private function countryCodeSQL($sVar)
- {
- if ($this->sCountryCode) {
- return $sVar.' = \''.$this->sCountryCode."'";
- }
- if ($this->oContext->sqlCountryList) {
- return $sVar.' in '.$this->oContext->sqlCountryList;
- }
-
- return '';
- }
-
- /////////// Sort functions
-
-
- public static function bySearchRank($a, $b)
- {
- if ($a->iSearchRank == $b->iSearchRank) {
- return $a->iOperator + strlen($a->sHouseNumber)
- - $b->iOperator - strlen($b->sHouseNumber);
- }
-
- return $a->iSearchRank < $b->iSearchRank ? -1 : 1;
- }
-
- //////////// Debugging functions
-
-
- public function debugInfo()
- {
- return array(
- 'Search rank' => $this->iSearchRank,
- 'Country code' => $this->sCountryCode,
- 'Name terms' => $this->aName,
- 'Name terms (stop words)' => $this->aNameNonSearch,
- 'Address terms' => $this->aAddress,
- 'Address terms (stop words)' => $this->aAddressNonSearch,
- 'Address terms (full words)' => $this->aFullNameAddress ?? '',
- 'Special search' => $this->iOperator,
- 'Class' => $this->sClass,
- 'Type' => $this->sType,
- 'House number' => $this->sHouseNumber,
- 'Postcode' => $this->sPostcode
- );
- }
-
- public function dumpAsHtmlTableRow(&$aWordIDs)
- {
- $kf = function ($k) use (&$aWordIDs) {
- return $aWordIDs[$k] ?? '['.$k.']';
- };
-
- echo '<tr>';
- echo "<td>$this->iSearchRank</td>";
- echo '<td>'.join(', ', array_map($kf, $this->aName)).'</td>';
- echo '<td>'.join(', ', array_map($kf, $this->aNameNonSearch)).'</td>';
- echo '<td>'.join(', ', array_map($kf, $this->aAddress)).'</td>';
- echo '<td>'.join(', ', array_map($kf, $this->aAddressNonSearch)).'</td>';
- echo '<td>'.$this->sCountryCode.'</td>';
- echo '<td>'.Operator::toString($this->iOperator).'</td>';
- echo '<td>'.$this->sClass.'</td>';
- echo '<td>'.$this->sType.'</td>';
- echo '<td>'.$this->sPostcode.'</td>';
- echo '<td>'.$this->sHouseNumber.'</td>';
-
- echo '</tr>';
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-/**
- * Description of the position of a token within a query.
- */
-class SearchPosition
-{
- private $sPhraseType;
-
- private $iPhrase;
- private $iNumPhrases;
-
- private $iToken;
- private $iNumTokens;
-
-
- public function __construct($sPhraseType, $iPhrase, $iNumPhrases)
- {
- $this->sPhraseType = $sPhraseType;
- $this->iPhrase = $iPhrase;
- $this->iNumPhrases = $iNumPhrases;
- }
-
- public function setTokenPosition($iToken, $iNumTokens)
- {
- $this->iToken = $iToken;
- $this->iNumTokens = $iNumTokens;
- }
-
- /**
- * Check if the phrase can be of the given type.
- *
- * @param string $sType Type of phrse requested.
- *
- * @return True if the phrase is untyped or of the given type.
- */
- public function maybePhrase($sType)
- {
- return $this->sPhraseType == '' || $this->sPhraseType == $sType;
- }
-
- /**
- * Check if the phrase is exactly of the given type.
- *
- * @param string $sType Type of phrse requested.
- *
- * @return True if the phrase of the given type.
- */
- public function isPhrase($sType)
- {
- return $this->sPhraseType == $sType;
- }
-
- /**
- * Return true if the token is the very first in the query.
- */
- public function isFirstToken()
- {
- return $this->iPhrase == 0 && $this->iToken == 0;
- }
-
- /**
- * Check if the token is the final one in the query.
- */
- public function isLastToken()
- {
- return $this->iToken + 1 == $this->iNumTokens && $this->iPhrase + 1 == $this->iNumPhrases;
- }
-
- /**
- * Check if the current token is part of the first phrase in the query.
- */
- public function isFirstPhrase()
- {
- return $this->iPhrase == 0;
- }
-
- /**
- * Get the phrase position in the query.
- */
- public function getPhrase()
- {
- return $this->iPhrase;
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-class Shell
-{
- public function __construct($sBaseCmd, ...$aParams)
- {
- if (!$sBaseCmd) {
- throw new \Exception('Command missing in new() call');
- }
- $this->baseCmd = $sBaseCmd;
- $this->aParams = array();
- $this->aEnv = null; // null = use the same environment as the current PHP process
-
- $this->stdoutString = null;
-
- foreach ($aParams as $sParam) {
- $this->addParams($sParam);
- }
- }
-
- public function addParams(...$aParams)
- {
- foreach ($aParams as $sParam) {
- if (isset($sParam) && $sParam !== null && $sParam !== '') {
- array_push($this->aParams, $sParam);
- }
- }
- return $this;
- }
-
- public function addEnvPair($sKey, $sVal)
- {
- if (isset($sKey) && $sKey && isset($sVal)) {
- if (!isset($this->aEnv)) {
- $this->aEnv = $_ENV;
- }
- $this->aEnv = array_merge($this->aEnv, array($sKey => $sVal), $_ENV);
- }
- return $this;
- }
-
- public function escapedCmd()
- {
- $aEscaped = array_map(function ($sParam) {
- return $this->escapeParam($sParam);
- }, array_merge(array($this->baseCmd), $this->aParams));
-
- return join(' ', $aEscaped);
- }
-
- public function run($bExitOnFail = false)
- {
- $sCmd = $this->escapedCmd();
- // $aEnv does not need escaping, proc_open seems to handle it fine
-
- $aFDs = array(
- 0 => array('pipe', 'r'),
- 1 => STDOUT,
- 2 => STDERR
- );
- $aPipes = null;
- $hProc = @proc_open($sCmd, $aFDs, $aPipes, null, $this->aEnv);
- if (!is_resource($hProc)) {
- throw new \Exception('Unable to run command: ' . $sCmd);
- }
-
- fclose($aPipes[0]); // no stdin
-
- $iStat = proc_close($hProc);
-
- if ($iStat != 0 && $bExitOnFail) {
- exit($iStat);
- }
-
- return $iStat;
- }
-
- private function escapeParam($sParam)
- {
- return (preg_match('/^-*\w+$/', $sParam)) ? $sParam : escapeshellarg($sParam);
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-/**
- * A word list creator based on simple splitting by space.
- *
- * Creates possible permutations of split phrases by finding all combination
- * of splitting the phrase on space boundaries.
- */
-class SimpleWordList
-{
- const MAX_WORDSET_LEN = 20;
- const MAX_WORDSETS = 100;
-
- // The phrase as a list of simple terms (without spaces).
- private $aWords;
-
- /**
- * Create a new word list
- *
- * @param string sPhrase Phrase to create the word list from. The phrase is
- * expected to be normalised, so that there are no
- * subsequent spaces.
- */
- public function __construct($sPhrase)
- {
- if (strlen($sPhrase) > 0) {
- $this->aWords = explode(' ', $sPhrase);
- } else {
- $this->aWords = array();
- }
- }
-
- /**
- * Get all possible tokens that are present in this word list.
- *
- * @return array The list of string tokens in the word list.
- */
- public function getTokens()
- {
- $aTokens = array();
- $iNumWords = count($this->aWords);
-
- for ($i = 0; $i < $iNumWords; $i++) {
- $sPhrase = $this->aWords[$i];
- $aTokens[$sPhrase] = $sPhrase;
-
- for ($j = $i + 1; $j < $iNumWords; $j++) {
- $sPhrase .= ' '.$this->aWords[$j];
- $aTokens[$sPhrase] = $sPhrase;
- }
- }
-
- return $aTokens;
- }
-
- /**
- * Compute all possible permutations of phrase splits that result in
- * words which are in the token list.
- */
- public function getWordSets($oTokens)
- {
- $iNumWords = count($this->aWords);
-
- if ($iNumWords == 0) {
- return null;
- }
-
- // Caches the word set for the partial phrase up to word i.
- $aSetCache = array_fill(0, $iNumWords, array());
-
- // Initialise first element of cache. There can only be the word.
- if ($oTokens->containsAny($this->aWords[0])) {
- $aSetCache[0][] = array($this->aWords[0]);
- }
-
- // Now do the next elements using what we already have.
- for ($i = 1; $i < $iNumWords; $i++) {
- for ($j = $i; $j > 0; $j--) {
- $sPartial = $j == $i ? $this->aWords[$j] : $this->aWords[$j].' '.$sPartial;
- if (!empty($aSetCache[$j - 1]) && $oTokens->containsAny($sPartial)) {
- $aPartial = array($sPartial);
- foreach ($aSetCache[$j - 1] as $aSet) {
- if (count($aSet) < SimpleWordList::MAX_WORDSET_LEN) {
- $aSetCache[$i][] = array_merge($aSet, $aPartial);
- }
- }
- if (count($aSetCache[$i]) > 2 * SimpleWordList::MAX_WORDSETS) {
- usort(
- $aSetCache[$i],
- array('\Nominatim\SimpleWordList', 'cmpByArraylen')
- );
- $aSetCache[$i] = array_slice(
- $aSetCache[$i],
- 0,
- SimpleWordList::MAX_WORDSETS
- );
- }
- }
- }
-
- // finally the current full phrase
- $sPartial = $this->aWords[0].' '.$sPartial;
- if ($oTokens->containsAny($sPartial)) {
- $aSetCache[$i][] = array($sPartial);
- }
- }
-
- $aWordSets = $aSetCache[$iNumWords - 1];
- usort($aWordSets, array('\Nominatim\SimpleWordList', 'cmpByArraylen'));
- return array_slice($aWordSets, 0, SimpleWordList::MAX_WORDSETS);
- }
-
- /**
- * Custom search routine which takes two arrays. The array with the fewest
- * items wins. If same number of items then the one with the longest first
- * element wins.
- */
- public static function cmpByArraylen($aA, $aB)
- {
- $iALen = count($aA);
- $iBLen = count($aB);
-
- if ($iALen == $iBLen) {
- return strlen($aB[0]) <=> strlen($aA[0]);
- }
-
- return ($iALen < $iBLen) ? -1 : 1;
- }
-
- public function debugInfo()
- {
- return $this->aWords;
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-/**
- * Operators describing special searches.
- */
-abstract class Operator
-{
- /// No operator selected.
- const NONE = 0;
- /// Search for POI of the given type.
- const TYPE = 1;
- /// Search for POIs near the given place.
- const NEAR = 2;
- /// Search for POIS in the given place.
- const IN = 3;
- /// Search for POIS named as given.
- const NAME = 4;
- /// Search for postcodes.
- const POSTCODE = 5;
-
- private static $aConstantNames = null;
-
-
- public static function toString($iOperator)
- {
- if ($iOperator == Operator::NONE) {
- return '';
- }
-
- if (Operator::$aConstantNames === null) {
- $oReflector = new \ReflectionClass('Nominatim\Operator');
- $aConstants = $oReflector->getConstants();
-
- Operator::$aConstantNames = array();
- foreach ($aConstants as $sName => $iValue) {
- Operator::$aConstantNames[$iValue] = $sName;
- }
- }
-
- return Operator::$aConstantNames[$iOperator];
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_TokenizerDir.'/tokenizer.php');
-
-use Exception;
-
-class Status
-{
- protected $oDB;
-
- public function __construct(&$oDB)
- {
- $this->oDB =& $oDB;
- }
-
- public function status()
- {
- if (!$this->oDB) {
- throw new Exception('No database', 700);
- }
-
- try {
- $this->oDB->connect();
- } catch (\Nominatim\DatabaseError $e) {
- throw new Exception('Database connection failed', 700);
- }
-
- $oTokenizer = new \Nominatim\Tokenizer($this->oDB);
- $oTokenizer->checkStatus();
- }
-
- public function dataDate()
- {
- $sSQL = 'SELECT EXTRACT(EPOCH FROM lastimportdate) FROM import_status LIMIT 1';
- $iDataDateEpoch = $this->oDB->getOne($sSQL);
-
- if ($iDataDateEpoch === false) {
- throw new Exception('Import date is not available', 705);
- }
-
- return $iDataDateEpoch;
- }
-
- public function databaseVersion()
- {
- $sSQL = 'SELECT value FROM nominatim_properties WHERE property = \'database_version\'';
- return $this->oDB->getOne($sSQL);
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim\Token;
-
-/**
- * A country token.
- */
-class Country
-{
- /// Database word id, if available.
- private $iId;
- /// Two-letter country code (lower-cased).
- private $sCountryCode;
-
- public function __construct($iId, $sCountryCode)
- {
- $this->iId = $iId;
- $this->sCountryCode = $sCountryCode;
- }
-
- public function getId()
- {
- return $this->iId;
- }
-
- /**
- * Check if the token can be added to the given search.
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return True if the token is compatible with the search configuration
- * given the position.
- */
- public function isExtendable($oSearch, $oPosition)
- {
- return !$oSearch->hasCountry()
- && $oPosition->maybePhrase('country')
- && $oSearch->getContext()->isCountryApplicable($this->sCountryCode);
- }
-
- /**
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return SearchDescription[] List of derived search descriptions.
- */
- public function extendSearch($oSearch, $oPosition)
- {
- $oNewSearch = $oSearch->clone($oPosition->isLastToken() ? 1 : 6);
- $oNewSearch->setCountry($this->sCountryCode);
-
- return array($oNewSearch);
- }
-
- public function debugInfo()
- {
- return array(
- 'ID' => $this->iId,
- 'Type' => 'country',
- 'Info' => $this->sCountryCode
- );
- }
-
- public function debugCode()
- {
- return 'C';
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim\Token;
-
-/**
- * A house number token.
- */
-class HouseNumber
-{
- /// Database word id, if available.
- private $iId;
- /// Normalized house number.
- private $sToken;
-
- public function __construct($iId, $sToken)
- {
- $this->iId = $iId;
- $this->sToken = $sToken;
- }
-
- public function getId()
- {
- return $this->iId;
- }
-
- /**
- * Check if the token can be added to the given search.
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return True if the token is compatible with the search configuration
- * given the position.
- */
- public function isExtendable($oSearch, $oPosition)
- {
- return !$oSearch->hasHousenumber()
- && !$oSearch->hasOperator(\Nominatim\Operator::POSTCODE)
- && $oPosition->maybePhrase('street');
- }
-
- /**
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return SearchDescription[] List of derived search descriptions.
- */
- public function extendSearch($oSearch, $oPosition)
- {
- $aNewSearches = array();
-
- // sanity check: if the housenumber is not mainly made
- // up of numbers, add a penalty
- $iSearchCost = 1;
- if (preg_match('/\\d/', $this->sToken) === 0
- || preg_match_all('/[^0-9 ]/', $this->sToken, $aMatches) > 3) {
- $iSearchCost += strlen($this->sToken) - 1;
- }
- if (!$oSearch->hasOperator(\Nominatim\Operator::NONE)) {
- $iSearchCost++;
- }
- if (empty($this->iId)) {
- $iSearchCost++;
- }
- // also must not appear in the middle of the address
- if ($oSearch->hasAddress() || $oSearch->hasPostcode()) {
- $iSearchCost++;
- }
-
- $oNewSearch = $oSearch->clone($iSearchCost);
- $oNewSearch->setHousenumber($this->sToken);
- $aNewSearches[] = $oNewSearch;
-
- // Housenumbers may appear in the name when the place has its own
- // address terms.
- if ($this->iId !== null
- && ($oSearch->getNamePhrase() >= 0 || !$oSearch->hasName())
- && !$oSearch->hasAddress()
- ) {
- $oNewSearch = $oSearch->clone($iSearchCost);
- $oNewSearch->setHousenumberAsName($this->iId);
-
- $aNewSearches[] = $oNewSearch;
- }
-
- return $aNewSearches;
- }
-
-
- public function debugInfo()
- {
- return array(
- 'ID' => $this->iId,
- 'Type' => 'house number',
- 'Info' => array('nr' => $this->sToken)
- );
- }
-
- public function debugCode()
- {
- return 'H';
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/TokenCountry.php');
-require_once(CONST_LibDir.'/TokenHousenumber.php');
-require_once(CONST_LibDir.'/TokenPostcode.php');
-require_once(CONST_LibDir.'/TokenSpecialTerm.php');
-require_once(CONST_LibDir.'/TokenWord.php');
-require_once(CONST_LibDir.'/TokenPartial.php');
-require_once(CONST_LibDir.'/SpecialSearchOperator.php');
-
-/**
- * Saves information about the tokens that appear in a search query.
- *
- * Tokens are sorted by their normalized form, the token word. There are different
- * kinds of tokens, represented by different Token* classes. Note that
- * tokens do not have a common base class. All tokens need to have a field
- * with the word id that points to an entry in the `word` database table
- * but otherwise the information saved about a token can be very different.
- */
-class TokenList
-{
- // List of list of tokens indexed by their word_token.
- private $aTokens = array();
-
-
- /**
- * Return total number of tokens.
- *
- * @return Integer
- */
- public function count()
- {
- return count($this->aTokens);
- }
-
- /**
- * Check if there are tokens for the given token word.
- *
- * @param string $sWord Token word to look for.
- *
- * @return bool True if there is one or more token for the token word.
- */
- public function contains($sWord)
- {
- return isset($this->aTokens[$sWord]);
- }
-
- /**
- * Check if there are partial or full tokens for the given word.
- *
- * @param string $sWord Token word to look for.
- *
- * @return bool True if there is one or more token for the token word.
- */
- public function containsAny($sWord)
- {
- return isset($this->aTokens[$sWord]);
- }
-
- /**
- * Get the list of tokens for the given token word.
- *
- * @param string $sWord Token word to look for.
- *
- * @return object[] Array of tokens for the given token word or an
- * empty array if no tokens could be found.
- */
- public function get($sWord)
- {
- return isset($this->aTokens[$sWord]) ? $this->aTokens[$sWord] : array();
- }
-
- public function getFullWordIDs()
- {
- $ids = array();
-
- foreach ($this->aTokens as $aTokenList) {
- foreach ($aTokenList as $oToken) {
- if (is_a($oToken, '\Nominatim\Token\Word')) {
- $ids[$oToken->getId()] = $oToken->getId();
- }
- }
- }
-
- return $ids;
- }
-
- /**
- * Add a new token for the given word.
- *
- * @param string $sWord Word the token describes.
- * @param object $oToken Token object to add.
- *
- * @return void
- */
- public function addToken($sWord, $oToken)
- {
- if (isset($this->aTokens[$sWord])) {
- $this->aTokens[$sWord][] = $oToken;
- } else {
- $this->aTokens[$sWord] = array($oToken);
- }
- }
-
- public function debugTokenByWordIdList()
- {
- $aWordsIDs = array();
- foreach ($this->aTokens as $sToken => $aWords) {
- foreach ($aWords as $aToken) {
- $iId = $aToken->getId();
- if ($iId !== null) {
- $aWordsIDs[$iId] = '#'.$sToken.'('.$aToken->debugCode().' '.$iId.')#';
- }
- }
- }
-
- return $aWordsIDs;
- }
-
- public function debugInfo()
- {
- return $this->aTokens;
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim\Token;
-
-/**
- * A standard word token.
- */
-class Partial
-{
- /// Database word id, if applicable.
- private $iId;
- /// Number of appearances in the database.
- private $iSearchNameCount;
- /// True, if the token consists exclusively of digits and spaces.
- private $bNumberToken;
-
- public function __construct($iId, $sToken, $iSearchNameCount)
- {
- $this->iId = $iId;
- $this->bNumberToken = (bool) preg_match('#^[0-9 ]+$#', $sToken);
- $this->iSearchNameCount = $iSearchNameCount;
- }
-
- public function getId()
- {
- return $this->iId;
- }
-
- /**
- * Check if the token can be added to the given search.
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return True if the token is compatible with the search configuration
- * given the position.
- */
- public function isExtendable($oSearch, $oPosition)
- {
- return !$oPosition->isPhrase('country');
- }
-
- /**
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return SearchDescription[] List of derived search descriptions.
- */
- public function extendSearch($oSearch, $oPosition)
- {
- $aNewSearches = array();
-
- // Partial token in Address.
- if (($oPosition->isPhrase('') || !$oPosition->isFirstPhrase())
- && $oSearch->hasName()
- ) {
- $iSearchCost = $this->bNumberToken ? 2 : 1;
- if ($this->iSearchNameCount >= CONST_Max_Word_Frequency) {
- $iSearchCost += 1;
- }
-
- $oNewSearch = $oSearch->clone($iSearchCost);
- $oNewSearch->addAddressToken(
- $this->iId,
- $this->iSearchNameCount < CONST_Max_Word_Frequency
- );
-
- $aNewSearches[] = $oNewSearch;
- }
-
- // Partial token in Name.
- if ((!$oSearch->hasPostcode() && !$oSearch->hasAddress())
- && (!$oSearch->hasName(true)
- || $oSearch->getNamePhrase() == $oPosition->getPhrase())
- ) {
- $iSearchCost = 1;
- if (!$oSearch->hasName(true)) {
- $iSearchCost += 1;
- }
- if ($this->bNumberToken) {
- $iSearchCost += 1;
- }
-
- $oNewSearch = $oSearch->clone($iSearchCost);
- $oNewSearch->addPartialNameToken(
- $this->iId,
- $this->iSearchNameCount < CONST_Max_Word_Frequency,
- $this->iSearchNameCount > CONST_Search_NameOnlySearchFrequencyThreshold,
- $oPosition->getPhrase()
- );
-
- $aNewSearches[] = $oNewSearch;
- }
-
- return $aNewSearches;
- }
-
-
- public function debugInfo()
- {
- return array(
- 'ID' => $this->iId,
- 'Type' => 'partial',
- 'Info' => array(
- 'count' => $this->iSearchNameCount
- )
- );
- }
-
- public function debugCode()
- {
- return 'w';
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim\Token;
-
-/**
- * A postcode token.
- */
-class Postcode
-{
- /// Database word id, if available.
- private $iId;
- /// Full normalized postcode (upper cased).
- private $sPostcode;
- // Optional country code the postcode belongs to (currently unused).
- private $sCountryCode;
-
- public function __construct($iId, $sPostcode, $sCountryCode = '')
- {
- $this->iId = $iId;
- $iSplitPos = strpos($sPostcode, '@');
- if ($iSplitPos === false) {
- $this->sPostcode = $sPostcode;
- } else {
- $this->sPostcode = substr($sPostcode, 0, $iSplitPos);
- }
- $this->sCountryCode = empty($sCountryCode) ? '' : $sCountryCode;
- }
-
- public function getId()
- {
- return $this->iId;
- }
-
- /**
- * Check if the token can be added to the given search.
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return True if the token is compatible with the search configuration
- * given the position.
- */
- public function isExtendable($oSearch, $oPosition)
- {
- return !$oSearch->hasPostcode() && $oPosition->maybePhrase('postalcode');
- }
-
- /**
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return SearchDescription[] List of derived search descriptions.
- */
- public function extendSearch($oSearch, $oPosition)
- {
- $aNewSearches = array();
-
- // If we have structured search or this is the first term,
- // make the postcode the primary search element.
- if ($oSearch->hasOperator(\Nominatim\Operator::NONE) && $oPosition->isFirstToken()) {
- $oNewSearch = $oSearch->clone(1);
- $oNewSearch->setPostcodeAsName($this->iId, $this->sPostcode);
-
- $aNewSearches[] = $oNewSearch;
- }
-
- // If we have a structured search or this is not the first term,
- // add the postcode as an addendum.
- if (!$oSearch->hasOperator(\Nominatim\Operator::POSTCODE)
- && ($oPosition->isPhrase('postalcode') || $oSearch->hasName())
- ) {
- $iPenalty = 1;
- if (strlen($this->sPostcode) < 4) {
- $iPenalty += 4 - strlen($this->sPostcode);
- }
- $oNewSearch = $oSearch->clone($iPenalty);
- $oNewSearch->setPostcode($this->sPostcode);
-
- $aNewSearches[] = $oNewSearch;
- }
-
- return $aNewSearches;
- }
-
- public function debugInfo()
- {
- return array(
- 'ID' => $this->iId,
- 'Type' => 'postcode',
- 'Info' => $this->sPostcode.'('.$this->sCountryCode.')'
- );
- }
-
- public function debugCode()
- {
- return 'P';
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim\Token;
-
-require_once(CONST_LibDir.'/SpecialSearchOperator.php');
-
-/**
- * A word token describing a place type.
- */
-class SpecialTerm
-{
- /// Database word id, if applicable.
- private $iId;
- /// Class (or OSM tag key) of the place to look for.
- private $sClass;
- /// Type (or OSM tag value) of the place to look for.
- private $sType;
- /// Relationship of the operator to the object (see Operator class).
- private $iOperator;
-
- public function __construct($iID, $sClass, $sType, $iOperator)
- {
- $this->iId = $iID;
- $this->sClass = $sClass;
- $this->sType = $sType;
- $this->iOperator = $iOperator;
- }
-
- public function getId()
- {
- return $this->iId;
- }
-
- /**
- * Check if the token can be added to the given search.
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return True if the token is compatible with the search configuration
- * given the position.
- */
- public function isExtendable($oSearch, $oPosition)
- {
- return !$oSearch->hasOperator()
- && $oPosition->isPhrase('')
- && ($this->iOperator != \Nominatim\Operator::NONE
- || (!$oSearch->hasAddress() && !$oSearch->hasHousenumber() && !$oSearch->hasCountry()));
- }
-
- /**
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return SearchDescription[] List of derived search descriptions.
- */
- public function extendSearch($oSearch, $oPosition)
- {
- $iSearchCost = 2;
-
- $iOp = $this->iOperator;
- if ($iOp == \Nominatim\Operator::NONE) {
- if ($oSearch->hasName() || $oSearch->getContext()->isBoundedSearch()) {
- $iOp = \Nominatim\Operator::NAME;
- } else {
- $iOp = \Nominatim\Operator::NEAR;
- $iSearchCost += 2;
- }
- } elseif (!$oPosition->isFirstToken() && !$oPosition->isLastToken()) {
- $iSearchCost += 2;
- }
- if ($oSearch->hasHousenumber()) {
- $iSearchCost ++;
- }
-
- $oNewSearch = $oSearch->clone($iSearchCost);
- $oNewSearch->setPoiSearch($iOp, $this->sClass, $this->sType);
-
- return array($oNewSearch);
- }
-
-
- public function debugInfo()
- {
- return array(
- 'ID' => $this->iId,
- 'Type' => 'special term',
- 'Info' => array(
- 'class' => $this->sClass,
- 'type' => $this->sType,
- 'operator' => \Nominatim\Operator::toString($this->iOperator)
- )
- );
- }
-
- public function debugCode()
- {
- return 'S';
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim\Token;
-
-/**
- * A standard word token.
- */
-class Word
-{
- /// Database word id, if applicable.
- private $iId;
- /// Number of appearances in the database.
- private $iSearchNameCount;
- /// Number of terms in the word.
- private $iTermCount;
-
- public function __construct($iId, $iSearchNameCount, $iTermCount)
- {
- $this->iId = $iId;
- $this->iSearchNameCount = $iSearchNameCount;
- $this->iTermCount = $iTermCount;
- }
-
- public function getId()
- {
- return $this->iId;
- }
-
- /**
- * Check if the token can be added to the given search.
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return True if the token is compatible with the search configuration
- * given the position.
- */
- public function isExtendable($oSearch, $oPosition)
- {
- return !$oPosition->isPhrase('country');
- }
-
- /**
- * Derive new searches by adding this token to an existing search.
- *
- * @param object $oSearch Partial search description derived so far.
- * @param object $oPosition Description of the token position within
- the query.
- *
- * @return SearchDescription[] List of derived search descriptions.
- */
- public function extendSearch($oSearch, $oPosition)
- {
- // Full words can only be a name if they appear at the beginning
- // of the phrase. In structured search the name must forcibly in
- // the first phrase. In unstructured search it may be in a later
- // phrase when the first phrase is a house number.
- if ($oSearch->hasName()
- || !($oPosition->isFirstPhrase() || $oPosition->isPhrase(''))
- ) {
- if ($this->iTermCount > 1
- && ($oPosition->isPhrase('') || !$oPosition->isFirstPhrase())
- ) {
- $oNewSearch = $oSearch->clone(1);
- $oNewSearch->addAddressToken($this->iId);
-
- return array($oNewSearch);
- }
- } elseif (!$oSearch->hasName(true)) {
- $oNewSearch = $oSearch->clone(1);
- $oNewSearch->addNameToken(
- $this->iId,
- CONST_Search_NameOnlySearchFrequencyThreshold
- && $this->iSearchNameCount
- < CONST_Search_NameOnlySearchFrequencyThreshold
- );
-
- return array($oNewSearch);
- }
-
- return array();
- }
-
- public function debugInfo()
- {
- return array(
- 'ID' => $this->iId,
- 'Type' => 'word',
- 'Info' => array(
- 'count' => $this->iSearchNameCount,
- 'terms' => $this->iTermCount
- )
- );
- }
-
- public function debugCode()
- {
- return 'W';
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
- @define('CONST_LibDir', dirname(dirname(__FILE__)));
- // Script to extract structured city and street data
- // from a running nominatim instance as CSV data
-
-
- require_once(CONST_LibDir.'/init-cmd.php');
- require_once(CONST_LibDir.'/ParameterParser.php');
- ini_set('memory_limit', '800M');
-
- $aCMDOptions = array(
- 'Export addresses as CSV file from a Nominatim database',
- array('help', 'h', 0, 1, 0, 0, false, 'Show Help'),
- array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
- array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
-
- array('output-type', '', 0, 1, 1, 1, 'str', 'Type of places to output (see below)'),
- array('output-format', '', 0, 1, 1, 1, 'str', 'Column mapping (see below)'),
- array('output-all-postcodes', '', 0, 1, 0, 0, 'bool', 'List all postcodes for address instead of just the most likely one'),
- array('language', '', 0, 1, 1, 1, 'str', 'Preferred language for output (local name, if omitted)'),
- array('restrict-to-country', '', 0, 1, 1, 1, 'str', 'Export only objects within country (country code)'),
- array('restrict-to-osm-node', '', 0, 1, 1, 1, 'int', 'Export only objects that are children of this OSM node'),
- array('restrict-to-osm-way', '', 0, 1, 1, 1, 'int', 'Export only objects that are children of this OSM way'),
- array('restrict-to-osm-relation', '', 0, 1, 1, 1, 'int', 'Export only objects that are children of this OSM relation'),
- array('project-dir', '', 0, 1, 1, 1, 'realpath', 'Base directory of the Nominatim installation (default: .)'),
- "\nAddress ranks: continent, country, state, county, city, suburb, street, path",
- 'Additional output types: postcode, placeid (placeid for each object)',
- "\noutput-format must be a semicolon-separated list of address ranks. Multiple ranks",
- 'can be merged into one column by simply using a comma-separated list.',
- "\nDefault output-type: street",
- 'Default output format: street;suburb;city;county;state;country'
- );
- getCmdOpt($_SERVER['argv'], $aCMDOptions, $aCMDResult, true, true);
-
- loadSettings($aCMDResult['project-dir'] ?? getcwd());
-
- $aRankmap = array(
- 'continent' => 1,
- 'country' => 4,
- 'state' => 8,
- 'county' => 12,
- 'city' => 16,
- 'suburb' => 20,
- 'street' => 26,
- 'path' => 27
- );
-
- $oDB = new Nominatim\DB();
- $oDB->connect();
-
- if (isset($aCMDResult['output-type'])) {
- if (!isset($aRankmap[$aCMDResult['output-type']])) {
- fail('unknown output-type: '.$aCMDResult['output-type']);
- }
- $iOutputRank = $aRankmap[$aCMDResult['output-type']];
- } else {
- $iOutputRank = $aRankmap['street'];
- }
-
-
- // Preferred language
- $oParams = new Nominatim\ParameterParser();
- if (!isset($aCMDResult['language'])) {
- $aCMDResult['language'] = 'xx';
- }
- $aLangPrefOrder = $oParams->getPreferredLanguages($aCMDResult['language']);
- $sLanguagePrefArraySQL = $oDB->getArraySQL($oDB->getDBQuotedList($aLangPrefOrder));
-
- // output formatting: build up a lookup table that maps address ranks to columns
- $aColumnMapping = array();
- $iNumCol = 0;
- if (!isset($aCMDResult['output-format'])) {
- $aCMDResult['output-format'] = 'street;suburb;city;county;state;country';
- }
- foreach (preg_split('/\s*;\s*/', $aCMDResult['output-format']) as $sColumn) {
- $bHasData = false;
- foreach (preg_split('/\s*,\s*/', $sColumn) as $sRank) {
- if ($sRank == 'postcode' || $sRank == 'placeid') {
- $aColumnMapping[$sRank] = $iNumCol;
- $bHasData = true;
- } elseif (isset($aRankmap[$sRank])) {
- $iRank = $aRankmap[$sRank];
- if ($iRank <= $iOutputRank) {
- $aColumnMapping[(string)$iRank] = $iNumCol;
- $bHasData = true;
- }
- }
- }
- if ($bHasData) {
- $iNumCol++;
- }
- }
-
- // build the query for objects
- $sPlacexSQL = 'select min(place_id) as place_id, ';
- $sPlacexSQL .= 'array_agg(place_id) as place_ids, ';
- $sPlacexSQL .= 'country_code as cc, ';
- $sPlacexSQL .= 'postcode, ';
- // get the address places excluding postcodes
- $sPlacexSQL .= 'array(select address_place_id from place_addressline a';
- $sPlacexSQL .= ' where a.place_id = placex.place_id and isaddress';
- $sPlacexSQL .= ' and address_place_id != placex.place_id';
- $sPlacexSQL .= ' and not cached_rank_address in (5,11)';
- $sPlacexSQL .= ' and cached_rank_address > 2 order by cached_rank_address)';
- $sPlacexSQL .= ' as address';
- $sPlacexSQL .= ' from placex where name is not null and linked_place_id is null';
-
- $sPlacexSQL .= ' and rank_address = '.$iOutputRank;
-
- if (isset($aCMDResult['restrict-to-country'])) {
- $sPlacexSQL .= ' and country_code = '.$oDB->getDBQuoted($aCMDResult['restrict-to-country']);
- }
-
- // restriction to parent place id
- $sParentId = false;
- $sOsmType = false;
-
- if (isset($aCMDResult['restrict-to-osm-node'])) {
- $sOsmType = 'N';
- $sOsmId = $aCMDResult['restrict-to-osm-node'];
- }
- if (isset($aCMDResult['restrict-to-osm-way'])) {
- $sOsmType = 'W';
- $sOsmId = $aCMDResult['restrict-to-osm-way'];
- }
- if (isset($aCMDResult['restrict-to-osm-relation'])) {
- $sOsmType = 'R';
- $sOsmId = $aCMDResult['restrict-to-osm-relation'];
- }
- if ($sOsmType) {
- $sSQL = 'select place_id from placex where osm_type = :osm_type and osm_id = :osm_id';
- $sParentId = $oDB->getOne($sSQL, array('osm_type' => $sOsmType, 'osm_id' => $sOsmId));
- if (!$sParentId) {
- fail('Could not find place '.$sOsmType.' '.$sOsmId);
- }
- }
- if ($sParentId) {
- $sPlacexSQL .= ' and place_id in (select place_id from place_addressline where address_place_id = '.$sParentId.' and isaddress)';
- }
-
- $sPlacexSQL .= " group by name->'name', address, postcode, country_code, placex.place_id";
-
- // Iterate over placeids
- // to get further hierarchical information
- //var_dump($sPlacexSQL);
- $oResults = $oDB->getQueryStatement($sPlacexSQL);
- $fOutstream = fopen('php://output', 'w');
- while ($aRow = $oResults->fetch()) {
- $iPlaceID = $aRow['place_id'];
- $sSQL = "select rank_address,get_name_by_language(name,$sLanguagePrefArraySQL) as localname from get_addressdata(:place_id, -1)";
- $sSQL .= ' WHERE isaddress';
- $sSQL .= ' order by rank_address desc,isaddress desc';
- $aAddressLines = $oDB->getAll($sSQL, array('place_id' => $iPlaceID));
-
- $aOutput = array_fill(0, $iNumCol, '');
- // output address parts
- foreach ($aAddressLines as $aAddress) {
- if (isset($aColumnMapping[$aAddress['rank_address']])) {
- $aOutput[$aColumnMapping[$aAddress['rank_address']]] = $aAddress['localname'];
- }
- }
- // output postcode
- if (isset($aColumnMapping['postcode'])) {
- if ($aCMDResult['output-all-postcodes']) {
- $sSQL = 'select array_agg(px.postcode) from placex px join place_addressline pa ';
- $sSQL .= 'on px.place_id = pa.address_place_id ';
- $sSQL .= 'where pa.cached_rank_address in (5,11) ';
- $sSQL .= 'and pa.place_id in (select place_id from place_addressline where address_place_id in (:first_place_id)) ';
- $sSQL .= 'group by postcode order by count(*) desc limit 1';
- $sRes = $oDB->getOne($sSQL, array('first_place_id' => substr($aRow['place_ids'], 1, -1)));
-
- $aOutput[$aColumnMapping['postcode']] = substr($sRes, 1, -1);
- } else {
- $aOutput[$aColumnMapping['postcode']] = $aRow['postcode'];
- }
- }
- if (isset($aColumnMapping['placeid'])) {
- $aOutput[$aColumnMapping['placeid']] = substr($aRow['place_ids'], 1, -1);
- }
- fputcsv($fOutstream, $aOutput);
- }
- fclose($fOutstream);
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-@define('CONST_LibDir', dirname(dirname(__FILE__)));
-
-require_once(CONST_LibDir.'/init-cmd.php');
-require_once(CONST_LibDir.'/log.php');
-require_once(CONST_LibDir.'/PlaceLookup.php');
-require_once(CONST_LibDir.'/ReverseGeocode.php');
-
-ini_set('memory_limit', '800M');
-
-$aCMDOptions = array(
- 'Tools to warm nominatim db',
- array('help', 'h', 0, 1, 0, 0, false, 'Show Help'),
- array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
- array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
- array('reverse-only', '', 0, 1, 0, 0, 'bool', 'Warm reverse only'),
- array('search-only', '', 0, 1, 0, 0, 'bool', 'Warm search only'),
- array('project-dir', '', 0, 1, 1, 1, 'realpath', 'Base directory of the Nominatim installation (default: .)'),
- );
-getCmdOpt($_SERVER['argv'], $aCMDOptions, $aResult, true, true);
-
-loadSettings($aCMDResult['project-dir'] ?? getcwd());
-
-@define('CONST_Database_DSN', getSetting('DATABASE_DSN'));
-@define('CONST_Default_Language', getSetting('DEFAULT_LANGUAGE', false));
-@define('CONST_Log_DB', getSettingBool('LOG_DB'));
-@define('CONST_Log_File', getSetting('LOG_FILE', false));
-@define('CONST_NoAccessControl', getSettingBool('CORS_NOACCESSCONTROL'));
-@define('CONST_Places_Max_ID_count', getSetting('LOOKUP_MAX_COUNT'));
-@define('CONST_PolygonOutput_MaximumTypes', getSetting('POLYGON_OUTPUT_MAX_TYPES'));
-@define('CONST_Search_BatchMode', getSettingBool('SEARCH_BATCH_MODE'));
-@define('CONST_Search_NameOnlySearchFrequencyThreshold', getSetting('SEARCH_NAME_ONLY_THRESHOLD'));
-@define('CONST_Use_US_Tiger_Data', getSettingBool('USE_US_TIGER_DATA'));
-@define('CONST_MapIcon_URL', getSetting('MAPICON_URL', false));
-@define('CONST_TokenizerDir', CONST_InstallDir.'/tokenizer');
-
-require_once(CONST_LibDir.'/Geocode.php');
-
-$oDB = new Nominatim\DB();
-$oDB->connect();
-
-$bVerbose = $aResult['verbose'];
-
-function print_results($aResults, $bVerbose)
-{
- if ($bVerbose) {
- if ($aResults && count($aResults)) {
- echo $aResults[0]['langaddress']."\n";
- } else {
- echo "<not found>\n";
- }
- } else {
- echo '.';
- }
-}
-
-if (!$aResult['search-only']) {
- $oReverseGeocode = new Nominatim\ReverseGeocode($oDB);
- $oReverseGeocode->setZoom(20);
- $oPlaceLookup = new Nominatim\PlaceLookup($oDB);
- $oPlaceLookup->setIncludeAddressDetails(true);
- $oPlaceLookup->setLanguagePreference(array('en'));
-
- echo 'Warm reverse: ';
- if ($bVerbose) {
- echo "\n";
- }
- for ($i = 0; $i < 1000; $i++) {
- $fLat = rand(-9000, 9000) / 100;
- $fLon = rand(-18000, 18000) / 100;
- if ($bVerbose) {
- echo "$fLat, $fLon = ";
- }
-
- $oLookup = $oReverseGeocode->lookup($fLat, $fLon);
- $aSearchResults = $oLookup ? $oPlaceLookup->lookup(array($oLookup->iId => $oLookup)) : null;
- print_results($aSearchResults, $bVerbose);
- }
- echo "\n";
-}
-
-if (!$aResult['reverse-only']) {
- $oGeocode = new Nominatim\Geocode($oDB);
-
- echo 'Warm search: ';
- if ($bVerbose) {
- echo "\n";
- }
-
- $oTokenizer = new \Nominatim\Tokenizer($oDB);
-
- $aWords = $oTokenizer->mostFrequentWords(1000);
-
- $sSQL = 'SELECT word FROM word WHERE word is not null ORDER BY search_name_count DESC LIMIT 1000';
- foreach ($aWords as $sWord) {
- if ($bVerbose) {
- echo "$sWord = ";
- }
-
- $oGeocode->setLanguagePreference(array('en'));
- $oGeocode->setQuery($sWord);
- $aSearchResults = $oGeocode->lookup();
- print_results($aSearchResults, $bVerbose);
- }
- echo "\n";
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/Shell.php');
-
-function getCmdOpt($aArg, $aSpec, &$aResult, $bExitOnError = false, $bExitOnUnknown = false)
-{
- $aQuick = array();
- $aCounts = array();
-
- foreach ($aSpec as $aLine) {
- if (is_array($aLine)) {
- if ($aLine[0]) {
- $aQuick['--'.$aLine[0]] = $aLine;
- }
- if ($aLine[1]) {
- $aQuick['-'.$aLine[1]] = $aLine;
- }
- $aCounts[$aLine[0]] = 0;
- }
- }
-
- $aResult = array();
- $bUnknown = false;
- $iSize = count($aArg);
- for ($i = 1; $i < $iSize; $i++) {
- if (isset($aQuick[$aArg[$i]])) {
- $aLine = $aQuick[$aArg[$i]];
- $aCounts[$aLine[0]]++;
- $xVal = null;
- if ($aLine[4] == $aLine[5]) {
- if ($aLine[4]) {
- $xVal = array();
- for ($n = $aLine[4]; $i < $iSize && $n; $n--) {
- $i++;
- if ($i >= $iSize || $aArg[$i][0] == '-') {
- showUsage($aSpec, $bExitOnError, 'Parameter of \''.$aLine[0].'\' is missing');
- }
-
- switch ($aLine[6]) {
- case 'realpath':
- $xVal[] = realpath($aArg[$i]);
- break;
- case 'realdir':
- $sPath = realpath(dirname($aArg[$i]));
- if ($sPath) {
- $xVal[] = $sPath . '/' . basename($aArg[$i]);
- } else {
- $xVal[] = $sPath;
- }
- break;
- case 'bool':
- $xVal[] = (bool)$aArg[$i];
- break;
- case 'int':
- $xVal[] = (int)$aArg[$i];
- break;
- case 'float':
- $xVal[] = (float)$aArg[$i];
- break;
- default:
- $xVal[] = $aArg[$i];
- break;
- }
- }
- if ($aLine[4] == 1) {
- $xVal = $xVal[0];
- }
- } else {
- $xVal = true;
- }
- } else {
- fail('Variable numbers of params not yet supported');
- }
-
- if ($aLine[3] > 1) {
- if (!array_key_exists($aLine[0], $aResult)) {
- $aResult[$aLine[0]] = array();
- }
- $aResult[$aLine[0]][] = $xVal;
- } else {
- $aResult[$aLine[0]] = $xVal;
- }
- } else {
- $bUnknown = $aArg[$i];
- }
- }
-
- if (array_key_exists('help', $aResult)) {
- showUsage($aSpec);
- }
- if ($bUnknown && $bExitOnUnknown) {
- showUsage($aSpec, $bExitOnError, 'Unknown option \''.$bUnknown.'\'');
- }
-
- foreach ($aSpec as $aLine) {
- if (is_array($aLine)) {
- if ($aCounts[$aLine[0]] < $aLine[2]) {
- showUsage($aSpec, $bExitOnError, 'Option \''.$aLine[0].'\' is missing');
- }
- if ($aCounts[$aLine[0]] > $aLine[3]) {
- showUsage($aSpec, $bExitOnError, 'Option \''.$aLine[0].'\' is present too many times');
- }
- if ($aLine[6] == 'bool' && !array_key_exists($aLine[0], $aResult)) {
- $aResult[$aLine[0]] = false;
- }
- }
- }
- return $bUnknown;
-}
-
-function showUsage($aSpec, $bExit = false, $sError = false)
-{
- if ($sError) {
- echo basename($_SERVER['argv'][0]).': '.$sError."\n";
- echo 'Try `'.basename($_SERVER['argv'][0]).' --help` for more information.'."\n";
- exit;
- }
- echo 'Usage: '.basename($_SERVER['argv'][0])."\n";
- $bFirst = true;
- foreach ($aSpec as $aLine) {
- if (is_array($aLine)) {
- if ($bFirst) {
- $bFirst = false;
- echo "\n";
- }
- $aNames = array();
- if ($aLine[1]) {
- $aNames[] = '-'.$aLine[1];
- }
- if ($aLine[0]) {
- $aNames[] = '--'.$aLine[0];
- }
- $sName = join(', ', $aNames);
- echo ' '.$sName.str_repeat(' ', 30-strlen($sName)).$aLine[7]."\n";
- } else {
- echo $aLine."\n";
- }
- }
- echo "\n";
- exit;
-}
-
-function info($sMsg)
-{
- echo date('Y-m-d H:i:s == ').$sMsg."\n";
-}
-
-$aWarnings = array();
-
-
-function warn($sMsg)
-{
- $GLOBALS['aWarnings'][] = $sMsg;
- echo date('Y-m-d H:i:s == ').'WARNING: '.$sMsg."\n";
-}
-
-
-function repeatWarnings()
-{
- foreach ($GLOBALS['aWarnings'] as $sMsg) {
- echo ' * ',$sMsg."\n";
- }
-}
-
-
-function setupHTTPProxy()
-{
- if (!getSettingBool('HTTP_PROXY')) {
- return;
- }
-
- $sProxy = 'tcp://'.getSetting('HTTP_PROXY_HOST').':'.getSetting('HTTP_PROXY_PROT');
- $aHeaders = array();
-
- $sLogin = getSetting('HTTP_PROXY_LOGIN');
- $sPassword = getSetting('HTTP_PROXY_PASSWORD');
-
- if ($sLogin && $sPassword) {
- $sAuth = base64_encode($sLogin.':'.$sPassword);
- $aHeaders = array('Proxy-Authorization: Basic '.$sAuth);
- }
-
- $aProxyHeader = array(
- 'proxy' => $sProxy,
- 'request_fulluri' => true,
- 'header' => $aHeaders
- );
-
- $aContext = array('http' => $aProxyHeader, 'https' => $aProxyHeader);
- stream_context_set_default($aContext);
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require('Symfony/Component/Dotenv/autoload.php');
-
-function loadDotEnv()
-{
- $dotenv = new \Symfony\Component\Dotenv\Dotenv();
- $dotenv->load(CONST_ConfigDir.'/env.defaults');
-
- if (file_exists('.env')) {
- $dotenv->load('.env');
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once('init.php');
-require_once('cmd.php');
-require_once('DebugNone.php');
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once('init.php');
-require_once('ParameterParser.php');
-require_once(CONST_Debug ? 'DebugHtml.php' : 'DebugNone.php');
-
-/***************************************************************************
- *
- * Error handling functions
- *
- */
-
-function userError($sMsg)
-{
- throw new \Exception($sMsg, 400);
-}
-
-
-function exception_handler_json($exception)
-{
- http_response_code($exception->getCode() == 0 ? 500 : $exception->getCode());
- header('Content-type: application/json; charset=utf-8');
- include(CONST_LibDir.'/template/error-json.php');
- exit();
-}
-
-function exception_handler_xml($exception)
-{
- http_response_code($exception->getCode() == 0 ? 500 : $exception->getCode());
- header('Content-type: text/xml; charset=utf-8');
- echo '<?xml version="1.0" encoding="UTF-8" ?>'."\n";
- include(CONST_LibDir.'/template/error-xml.php');
- exit();
-}
-
-function shutdown_exception_handler_xml()
-{
- $error = error_get_last();
- if ($error !== null && $error['type'] === E_ERROR) {
- exception_handler_xml(new \Exception($error['message'], 500));
- }
-}
-
-function shutdown_exception_handler_json()
-{
- $error = error_get_last();
- if ($error !== null && $error['type'] === E_ERROR) {
- exception_handler_json(new \Exception($error['message'], 500));
- }
-}
-
-
-function set_exception_handler_by_format($sFormat = null)
-{
- // Multiple calls to register_shutdown_function will cause multiple callbacks
- // to be executed, we only want the last executed. Thus we don't want to register
- // one by default without an explicit $sFormat set.
-
- if (!isset($sFormat)) {
- set_exception_handler('exception_handler_json');
- } elseif ($sFormat == 'xml') {
- set_exception_handler('exception_handler_xml');
- register_shutdown_function('shutdown_exception_handler_xml');
- } else {
- set_exception_handler('exception_handler_json');
- register_shutdown_function('shutdown_exception_handler_json');
- }
-}
-// set a default
-set_exception_handler_by_format();
-
-
-/***************************************************************************
- * HTTP Reply header setup
- */
-
-if (CONST_NoAccessControl) {
- header('Access-Control-Allow-Origin: *');
- header('Access-Control-Allow-Methods: OPTIONS,GET');
- if (!empty($_SERVER['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'])) {
- header('Access-Control-Allow-Headers: '.$_SERVER['HTTP_ACCESS_CONTROL_REQUEST_HEADERS']);
- }
-}
-if (isset($_SERVER['REQUEST_METHOD']) && $_SERVER['REQUEST_METHOD'] == 'OPTIONS') {
- exit;
-}
-
-if (CONST_Debug) {
- header('Content-type: text/html; charset=utf-8');
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/lib.php');
-require_once(CONST_LibDir.'/DB.php');
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-function loadSettings($sProjectDir)
-{
- @define('CONST_InstallDir', $sProjectDir);
- // Temporary hack to set the directory via environment instead of
- // the installed scripts. Neither setting is part of the official
- // set of settings.
- defined('CONST_ConfigDir') or define('CONST_ConfigDir', $_SERVER['NOMINATIM_CONFIGDIR']);
-}
-
-function getSetting($sConfName, $sDefault = null)
-{
- $sValue = $_SERVER['NOMINATIM_'.$sConfName];
-
- if ($sDefault !== null && !$sValue) {
- return $sDefault;
- }
-
- return $sValue;
-}
-
-function getSettingBool($sConfName)
-{
- $sVal = strtolower(getSetting($sConfName));
-
- return strcmp($sVal, 'yes') == 0
- || strcmp($sVal, 'true') == 0
- || strcmp($sVal, '1') == 0;
-}
-
-function fail($sError, $sUserError = false)
-{
- if (!$sUserError) {
- $sUserError = $sError;
- }
- error_log('ERROR: '.$sError);
- var_dump($sUserError);
- echo "\n";
- exit(-1);
-}
-
-
-function getProcessorCount()
-{
- $sCPU = file_get_contents('/proc/cpuinfo');
- preg_match_all('#processor\s+: [0-9]+#', $sCPU, $aMatches);
- return count($aMatches[0]);
-}
-
-
-function getTotalMemoryMB()
-{
- $sCPU = file_get_contents('/proc/meminfo');
- preg_match('#MemTotal: +([0-9]+) kB#', $sCPU, $aMatches);
- return (int)($aMatches[1]/1024);
-}
-
-
-function getCacheMemoryMB()
-{
- $sCPU = file_get_contents('/proc/meminfo');
- preg_match('#Cached: +([0-9]+) kB#', $sCPU, $aMatches);
- return (int)($aMatches[1]/1024);
-}
-
-function getDatabaseDate(&$oDB)
-{
- // Find the newest node in the DB
- $iLastOSMID = $oDB->getOne("select max(osm_id) from place where osm_type = 'N'");
- // Lookup the timestamp that node was created
- $sLastNodeURL = 'https://www.openstreetmap.org/api/0.6/node/'.$iLastOSMID.'/1';
- $sLastNodeXML = file_get_contents($sLastNodeURL);
-
- if ($sLastNodeXML === false) {
- return false;
- }
-
- preg_match('#timestamp="(([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})Z)"#', $sLastNodeXML, $aLastNodeDate);
-
- return $aLastNodeDate[1];
-}
-
-
-function byImportance($a, $b)
-{
- if ($a['importance'] != $b['importance']) {
- return ($a['importance'] > $b['importance']?-1:1);
- }
-
- return $a['foundorder'] <=> $b['foundorder'];
-}
-
-
-function javascript_renderData($xVal, $iOptions = 0)
-{
- $sCallback = isset($_GET['json_callback']) ? $_GET['json_callback'] : '';
- if ($sCallback && !preg_match('/^[$_\p{L}][$_\p{L}\p{Nd}.[\]]*$/u', $sCallback)) {
- // Unset, we call javascript_renderData again during exception handling
- unset($_GET['json_callback']);
- throw new Exception('Invalid json_callback value', 400);
- }
-
- $iOptions |= JSON_UNESCAPED_UNICODE | JSON_UNESCAPED_SLASHES;
- if (isset($_GET['pretty']) && in_array(strtolower($_GET['pretty']), array('1', 'true'))) {
- $iOptions |= JSON_PRETTY_PRINT;
- }
-
- $jsonout = json_encode($xVal, $iOptions);
-
- if ($sCallback) {
- header('Content-Type: application/javascript; charset=UTF-8');
- echo $_GET['json_callback'].'('.$jsonout.')';
- } else {
- header('Content-Type: application/json; charset=UTF-8');
- echo $jsonout;
- }
-}
-
-function addQuotes($s)
-{
- return "'".$s."'";
-}
-
-function parseLatLon($sQuery)
-{
- $sFound = null;
- $fQueryLat = null;
- $fQueryLon = null;
-
- if (preg_match('/\\s*([NS])[\s]+([0-9]+[0-9.]*)[°\s]+([0-9.]+)?[′\']*[,\s]+([EW])[\s]+([0-9]+)[°\s]+([0-9]+[0-9.]*)[′\']*\\s*/', $sQuery, $aData)) {
- /* 1 2 3 4 5 6
- * degrees decimal minutes
- * N 40 26.767, W 79 58.933
- * N 40°26.767′, W 79°58.933′
- */
- $sFound = $aData[0];
- $fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60);
- $fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[5] + $aData[6]/60);
- } elseif (preg_match('/\\s*([0-9]+)[°\s]+([0-9]+[0-9.]*)?[′\']*[\s]+([NS])[,\s]+([0-9]+)[°\s]+([0-9]+[0-9.]*)?[′\'\s]+([EW])\\s*/', $sQuery, $aData)) {
- /* 1 2 3 4 5 6
- * degrees decimal minutes
- * 40 26.767 N, 79 58.933 W
- * 40° 26.767′ N 79° 58.933′ W
- */
- $sFound = $aData[0];
- $fQueryLat = ($aData[3]=='N'?1:-1) * ($aData[1] + $aData[2]/60);
- $fQueryLon = ($aData[6]=='E'?1:-1) * ($aData[4] + $aData[5]/60);
- } elseif (preg_match('/\\s*([NS])[\s]+([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+)[″"]*[,\s]+([EW])[\s]+([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+)[″"]*\\s*/', $sQuery, $aData)) {
- /* 1 2 3 4 5 6 7 8
- * degrees decimal seconds
- * N 40 26 46 W 79 58 56
- * N 40° 26′ 46″, W 79° 58′ 56″
- */
- $sFound = $aData[0];
- $fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60 + $aData[4]/3600);
- $fQueryLon = ($aData[5]=='E'?1:-1) * ($aData[6] + $aData[7]/60 + $aData[8]/3600);
- } elseif (preg_match('/\\s*([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+[0-9.]*)[″"\s]+([NS])[,\s]+([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+[0-9.]*)[″"\s]+([EW])\\s*/', $sQuery, $aData)) {
- /* 1 2 3 4 5 6 7 8
- * degrees decimal seconds
- * 40 26 46 N 79 58 56 W
- * 40° 26′ 46″ N, 79° 58′ 56″ W
- * 40° 26′ 46.78″ N, 79° 58′ 56.89″ W
- */
- $sFound = $aData[0];
- $fQueryLat = ($aData[4]=='N'?1:-1) * ($aData[1] + $aData[2]/60 + $aData[3]/3600);
- $fQueryLon = ($aData[8]=='E'?1:-1) * ($aData[5] + $aData[6]/60 + $aData[7]/3600);
- } elseif (preg_match('/\\s*([NS])[\s]+([0-9]+[0-9]*\\.[0-9]+)[°]*[,\s]+([EW])[\s]+([0-9]+[0-9]*\\.[0-9]+)[°]*\\s*/', $sQuery, $aData)) {
- /* 1 2 3 4
- * degrees decimal
- * N 40.446° W 79.982°
- */
- $sFound = $aData[0];
- $fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2]);
- $fQueryLon = ($aData[3]=='E'?1:-1) * ($aData[4]);
- } elseif (preg_match('/\\s*([0-9]+[0-9]*\\.[0-9]+)[°\s]+([NS])[,\s]+([0-9]+[0-9]*\\.[0-9]+)[°\s]+([EW])\\s*/', $sQuery, $aData)) {
- /* 1 2 3 4
- * degrees decimal
- * 40.446° N 79.982° W
- */
- $sFound = $aData[0];
- $fQueryLat = ($aData[2]=='N'?1:-1) * ($aData[1]);
- $fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[3]);
- } elseif (preg_match('/(\\s*\\[|^\\s*|\\s*)(-?[0-9]+[0-9]*\\.[0-9]+)[,\s]+(-?[0-9]+[0-9]*\\.[0-9]+)(\\]\\s*|\\s*$|\\s*)/', $sQuery, $aData)) {
- /* 1 2 3 4
- * degrees decimal
- * 12.34, 56.78
- * 12.34 56.78
- * [12.456,-78.90]
- */
- $sFound = $aData[0];
- $fQueryLat = $aData[2];
- $fQueryLon = $aData[3];
- } else {
- return false;
- }
-
- return array($sFound, $fQueryLat, $fQueryLon);
-}
-
-function addressRankToGeocodeJsonType($iAddressRank)
-{
- if ($iAddressRank >= 29 && $iAddressRank <= 30) {
- return 'house';
- }
- if ($iAddressRank >= 26 && $iAddressRank < 28) {
- return 'street';
- }
- if ($iAddressRank >= 22 && $iAddressRank < 26) {
- return 'locality';
- }
- if ($iAddressRank >= 17 && $iAddressRank < 22) {
- return 'district';
- }
- if ($iAddressRank >= 13 && $iAddressRank < 17) {
- return 'city';
- }
- if ($iAddressRank >= 10 && $iAddressRank < 13) {
- return 'county';
- }
- if ($iAddressRank >= 5 && $iAddressRank < 10) {
- return 'state';
- }
- if ($iAddressRank >= 4 && $iAddressRank < 5) {
- return 'country';
- }
-
- return 'locality';
-}
-
-if (!function_exists('array_key_last')) {
- function array_key_last(array $array)
- {
- if (!empty($array)) {
- return key(array_slice($array, -1, 1, true));
- }
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-
-function logStart(&$oDB, $sType = '', $sQuery = '', $aLanguageList = array())
-{
- $fStartTime = microtime(true);
- $aStartTime = explode('.', $fStartTime);
- if (!isset($aStartTime[1])) {
- $aStartTime[1] = '0';
- }
-
- $sOutputFormat = '';
- if (isset($_GET['format'])) {
- $sOutputFormat = $_GET['format'];
- }
-
- if ($sType == 'reverse') {
- $sOutQuery = (isset($_GET['lat'])?$_GET['lat']:'').'/';
- if (isset($_GET['lon'])) {
- $sOutQuery .= $_GET['lon'];
- }
- if (isset($_GET['zoom'])) {
- $sOutQuery .= '/'.$_GET['zoom'];
- }
- } else {
- $sOutQuery = $sQuery;
- }
-
- $hLog = array(
- date('Y-m-d H:i:s', $aStartTime[0]).'.'.$aStartTime[1],
- $_SERVER['REMOTE_ADDR'],
- $_SERVER['QUERY_STRING'],
- $sOutQuery,
- $sType,
- $fStartTime
- );
-
- if (CONST_Log_DB) {
- if (isset($_GET['email'])) {
- $sUserAgent = $_GET['email'];
- } elseif (isset($_SERVER['HTTP_REFERER'])) {
- $sUserAgent = $_SERVER['HTTP_REFERER'];
- } elseif (isset($_SERVER['HTTP_USER_AGENT'])) {
- $sUserAgent = $_SERVER['HTTP_USER_AGENT'];
- } else {
- $sUserAgent = '';
- }
- $sSQL = 'insert into new_query_log (type,starttime,query,ipaddress,useragent,language,format,searchterm)';
- $sSQL .= ' values (';
- $sSQL .= join(',', $oDB->getDBQuotedList(array(
- $sType,
- $hLog[0],
- $hLog[2],
- $hLog[1],
- $sUserAgent,
- join(',', $aLanguageList),
- $sOutputFormat,
- $hLog[3]
- )));
- $sSQL .= ')';
- $oDB->exec($sSQL);
- }
-
- return $hLog;
-}
-
-function logEnd(&$oDB, $hLog, $iNumResults)
-{
- $fEndTime = microtime(true);
-
- if (CONST_Log_DB) {
- $aEndTime = explode('.', $fEndTime);
- if (!isset($aEndTime[1])) {
- $aEndTime[1] = '0';
- }
- $sEndTime = date('Y-m-d H:i:s', $aEndTime[0]).'.'.$aEndTime[1];
-
- $sSQL = 'update new_query_log set endtime = '.$oDB->getDBQuoted($sEndTime).', results = '.$iNumResults;
- $sSQL .= ' where starttime = '.$oDB->getDBQuoted($hLog[0]);
- $sSQL .= ' and ipaddress = '.$oDB->getDBQuoted($hLog[1]);
- $sSQL .= ' and query = '.$oDB->getDBQuoted($hLog[2]);
- $oDB->exec($sSQL);
- }
-
- if (CONST_Log_File) {
- $aOutdata = sprintf(
- "[%s] %.4f %d %s \"%s\"\n",
- $hLog[0],
- $fEndTime-$hLog[5],
- $iNumResults,
- $hLog[4],
- $hLog[2]
- );
- file_put_contents(CONST_Log_File, $aOutdata, FILE_APPEND | LOCK_EX);
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-
-function formatOSMType($sType, $bIncludeExternal = true)
-{
- if ($sType == 'N') {
- return 'node';
- }
- if ($sType == 'W') {
- return 'way';
- }
- if ($sType == 'R') {
- return 'relation';
- }
-
- if (!$bIncludeExternal) {
- return '';
- }
-
- if ($sType == 'T') {
- return 'way';
- }
- if ($sType == 'I') {
- return 'way';
- }
-
- // not handled: P, L
-
- return '';
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-function getOsm2pgsqlBinary()
-{
- $sBinary = getSetting('OSM2PGSQL_BINARY');
-
- return $sBinary ? $sBinary : CONST_Default_Osm2pgsql;
-}
-
-function getImportStyle()
-{
- $sStyle = getSetting('IMPORT_STYLE');
-
- if (in_array($sStyle, array('admin', 'street', 'address', 'full', 'extratags'))) {
- return CONST_ConfigDir.'/import-'.$sStyle.'.style';
- }
-
- return $sStyle;
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-// https://github.com/geocoders/geocodejson-spec/
-
-$aFilteredPlaces = array();
-
-if (empty($aPlace)) {
- if (isset($sError)) {
- $aFilteredPlaces['error'] = $sError;
- } else {
- $aFilteredPlaces['error'] = 'Unable to geocode';
- }
- javascript_renderData($aFilteredPlaces);
-} else {
- $aFilteredPlaces = array(
- 'type' => 'Feature',
- 'properties' => array(
- 'geocoding' => array()
- )
- );
-
- if (isset($aPlace['place_id'])) {
- $aFilteredPlaces['properties']['geocoding']['place_id'] = $aPlace['place_id'];
- }
- $sOSMType = formatOSMType($aPlace['osm_type']);
- if ($sOSMType) {
- $aFilteredPlaces['properties']['geocoding']['osm_type'] = $sOSMType;
- $aFilteredPlaces['properties']['geocoding']['osm_id'] = $aPlace['osm_id'];
- }
-
- $aFilteredPlaces['properties']['geocoding']['type'] = addressRankToGeocodeJsonType($aPlace['rank_address']);
-
- $aFilteredPlaces['properties']['geocoding']['accuracy'] = (int) $fDistance;
-
- $aFilteredPlaces['properties']['geocoding']['label'] = $aPlace['langaddress'];
-
- if ($aPlace['placename'] !== null) {
- $aFilteredPlaces['properties']['geocoding']['name'] = $aPlace['placename'];
- }
-
- if (isset($aPlace['address'])) {
- $aPlace['address']->addGeocodeJsonAddressParts(
- $aFilteredPlaces['properties']['geocoding']
- );
-
- $aFilteredPlaces['properties']['geocoding']['admin']
- = $aPlace['address']->getAdminLevels();
- }
-
- if (isset($aPlace['asgeojson'])) {
- $aFilteredPlaces['geometry'] = json_decode($aPlace['asgeojson'], true);
- } else {
- $aFilteredPlaces['geometry'] = array(
- 'type' => 'Point',
- 'coordinates' => array(
- (float) $aPlace['lon'],
- (float) $aPlace['lat']
- )
- );
- }
-
- javascript_renderData(array(
- 'type' => 'FeatureCollection',
- 'geocoding' => array(
- 'version' => '0.1.0',
- 'attribution' => 'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright',
- 'licence' => 'ODbL',
- 'query' => $sQuery
- ),
- 'features' => array($aFilteredPlaces)
- ));
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-$aFilteredPlaces = array();
-
-if (empty($aPlace)) {
- if (isset($sError)) {
- $aFilteredPlaces['error'] = $sError;
- } else {
- $aFilteredPlaces['error'] = 'Unable to geocode';
- }
- javascript_renderData($aFilteredPlaces);
-} else {
- $aFilteredPlaces = array(
- 'type' => 'Feature',
- 'properties' => array()
- );
-
- if (isset($aPlace['place_id'])) {
- $aFilteredPlaces['properties']['place_id'] = $aPlace['place_id'];
- }
- $sOSMType = formatOSMType($aPlace['osm_type']);
- if ($sOSMType) {
- $aFilteredPlaces['properties']['osm_type'] = $sOSMType;
- $aFilteredPlaces['properties']['osm_id'] = $aPlace['osm_id'];
- }
-
- $aFilteredPlaces['properties']['place_rank'] = $aPlace['rank_search'];
-
- $aFilteredPlaces['properties']['category'] = $aPlace['class'];
- $aFilteredPlaces['properties']['type'] = $aPlace['type'];
-
- $aFilteredPlaces['properties']['importance'] = $aPlace['importance'];
-
- $aFilteredPlaces['properties']['addresstype'] = strtolower($aPlace['addresstype']);
-
- $aFilteredPlaces['properties']['name'] = $aPlace['placename'];
-
- $aFilteredPlaces['properties']['display_name'] = $aPlace['langaddress'];
-
- if (isset($aPlace['address'])) {
- $aFilteredPlaces['properties']['address'] = $aPlace['address']->getAddressNames();
- }
- if (isset($aPlace['sExtraTags'])) {
- $aFilteredPlaces['properties']['extratags'] = $aPlace['sExtraTags'];
- }
- if (isset($aPlace['sNameDetails'])) {
- $aFilteredPlaces['properties']['namedetails'] = $aPlace['sNameDetails'];
- }
-
- if (isset($aPlace['aBoundingBox'])) {
- $aFilteredPlaces['bbox'] = array(
- (float) $aPlace['aBoundingBox'][2], // minlon
- (float) $aPlace['aBoundingBox'][0], // minlat
- (float) $aPlace['aBoundingBox'][3], // maxlon
- (float) $aPlace['aBoundingBox'][1] // maxlat
- );
- }
-
- if (isset($aPlace['asgeojson'])) {
- $aFilteredPlaces['geometry'] = json_decode($aPlace['asgeojson'], true);
- } else {
- $aFilteredPlaces['geometry'] = array(
- 'type' => 'Point',
- 'coordinates' => array(
- (float) $aPlace['lon'],
- (float) $aPlace['lat']
- )
- );
- }
-
-
- javascript_renderData(array(
- 'type' => 'FeatureCollection',
- 'licence' => 'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright',
- 'features' => array($aFilteredPlaces)
- ));
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-$aFilteredPlaces = array();
-
-if (empty($aPlace)) {
- if (isset($sError)) {
- $aFilteredPlaces['error'] = $sError;
- } else {
- $aFilteredPlaces['error'] = 'Unable to geocode';
- }
-} else {
- if (isset($aPlace['place_id'])) {
- $aFilteredPlaces['place_id'] = $aPlace['place_id'];
- }
- $aFilteredPlaces['licence'] = 'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright';
- $sOSMType = formatOSMType($aPlace['osm_type']);
- if ($sOSMType) {
- $aFilteredPlaces['osm_type'] = $sOSMType;
- $aFilteredPlaces['osm_id'] = $aPlace['osm_id'];
- }
- if (isset($aPlace['lat'])) {
- $aFilteredPlaces['lat'] = $aPlace['lat'];
- }
- if (isset($aPlace['lon'])) {
- $aFilteredPlaces['lon'] = $aPlace['lon'];
- }
-
- if ($sOutputFormat == 'jsonv2' || $sOutputFormat == 'geojson') {
- $aFilteredPlaces['place_rank'] = $aPlace['rank_search'];
-
- $aFilteredPlaces['category'] = $aPlace['class'];
- $aFilteredPlaces['type'] = $aPlace['type'];
-
- $aFilteredPlaces['importance'] = $aPlace['importance'];
-
- $aFilteredPlaces['addresstype'] = strtolower($aPlace['addresstype']);
-
- $aFilteredPlaces['name'] = $aPlace['placename'];
- }
-
- $aFilteredPlaces['display_name'] = $aPlace['langaddress'];
-
- if (isset($aPlace['address'])) {
- $aFilteredPlaces['address'] = $aPlace['address']->getAddressNames();
- }
- if (isset($aPlace['sExtraTags'])) {
- $aFilteredPlaces['extratags'] = $aPlace['sExtraTags'];
- }
- if (isset($aPlace['sNameDetails'])) {
- $aFilteredPlaces['namedetails'] = $aPlace['sNameDetails'];
- }
-
- if (isset($aPlace['aBoundingBox'])) {
- $aFilteredPlaces['boundingbox'] = $aPlace['aBoundingBox'];
- }
-
- if (isset($aPlace['asgeojson'])) {
- $aFilteredPlaces['geojson'] = json_decode($aPlace['asgeojson'], true);
- }
-
- if (isset($aPlace['assvg'])) {
- $aFilteredPlaces['svg'] = $aPlace['assvg'];
- }
-
- if (isset($aPlace['astext'])) {
- $aFilteredPlaces['geotext'] = $aPlace['astext'];
- }
-
- if (isset($aPlace['askml'])) {
- $aFilteredPlaces['geokml'] = $aPlace['askml'];
- }
-}
-
-javascript_renderData($aFilteredPlaces);
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-header('content-type: text/xml; charset=UTF-8');
-
-echo '<';
-echo '?xml version="1.0" encoding="UTF-8" ?';
-echo ">\n";
-
-echo '<reversegeocode';
-echo " timestamp='".date(DATE_RFC822)."'";
-echo " attribution='Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright'";
-echo " querystring='".htmlspecialchars($_SERVER['QUERY_STRING'], ENT_QUOTES)."'";
-echo ">\n";
-
-if (empty($aPlace)) {
- if (isset($sError)) {
- echo "<error>$sError</error>";
- } else {
- echo '<error>Unable to geocode</error>';
- }
-} else {
- echo '<result';
- if ($aPlace['place_id']) {
- echo ' place_id="'.$aPlace['place_id'].'"';
- }
- $sOSMType = formatOSMType($aPlace['osm_type']);
- if ($sOSMType) {
- echo ' osm_type="'.$sOSMType.'"'.' osm_id="'.$aPlace['osm_id'].'"';
- }
- if ($aPlace['ref']) {
- echo ' ref="'.htmlspecialchars($aPlace['ref']).'"';
- }
- if (isset($aPlace['lat'])) {
- echo ' lat="'.htmlspecialchars($aPlace['lat']).'"';
- }
- if (isset($aPlace['lon'])) {
- echo ' lon="'.htmlspecialchars($aPlace['lon']).'"';
- }
- if (isset($aPlace['aBoundingBox'])) {
- echo ' boundingbox="';
- echo join(',', $aPlace['aBoundingBox']);
- echo '"';
- }
- echo " place_rank='".$aPlace['rank_search']."'";
- echo " address_rank='".$aPlace['rank_address']."'";
-
-
- if (isset($aPlace['asgeojson'])) {
- echo ' geojson=\'';
- echo $aPlace['asgeojson'];
- echo '\'';
- }
-
- if (isset($aPlace['assvg'])) {
- echo ' geosvg=\'';
- echo $aPlace['assvg'];
- echo '\'';
- }
-
- if (isset($aPlace['astext'])) {
- echo ' geotext=\'';
- echo $aPlace['astext'];
- echo '\'';
- }
- echo '>'.htmlspecialchars($aPlace['langaddress']).'</result>';
-
- if (isset($aPlace['address'])) {
- echo '<addressparts>';
- foreach ($aPlace['address']->getAddressNames() as $sKey => $sValue) {
- $sKey = str_replace(' ', '_', $sKey);
- echo "<$sKey>";
- echo htmlspecialchars($sValue);
- echo "</$sKey>";
- }
- echo '</addressparts>';
- }
-
- if (isset($aPlace['sExtraTags'])) {
- echo '<extratags>';
- foreach ($aPlace['sExtraTags'] as $sKey => $sValue) {
- echo '<tag key="'.htmlspecialchars($sKey).'" value="'.htmlspecialchars($sValue).'"/>';
- }
- echo '</extratags>';
- }
-
- if (isset($aPlace['sNameDetails'])) {
- echo '<namedetails>';
- foreach ($aPlace['sNameDetails'] as $sKey => $sValue) {
- echo '<name desc="'.htmlspecialchars($sKey).'">';
- echo htmlspecialchars($sValue);
- echo '</name>';
- }
- echo '</namedetails>';
- }
-
- if (isset($aPlace['askml'])) {
- echo "\n<geokml>";
- echo $aPlace['askml'];
- echo '</geokml>';
- }
-}
-
-echo '</reversegeocode>';
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-$aPlaceDetails = array();
-
-$aPlaceDetails['place_id'] = (int) $aPointDetails['place_id'];
-$aPlaceDetails['parent_place_id'] = (int) $aPointDetails['parent_place_id'];
-
-$aPlaceDetails['osm_type'] = $aPointDetails['osm_type'];
-$aPlaceDetails['osm_id'] = (int) $aPointDetails['osm_id'];
-
-$aPlaceDetails['category'] = $aPointDetails['class'];
-$aPlaceDetails['type'] = $aPointDetails['type'];
-$aPlaceDetails['admin_level'] = $aPointDetails['admin_level'];
-
-$aPlaceDetails['localname'] = $aPointDetails['localname'];
-$aPlaceDetails['names'] = $aPointDetails['aNames'];
-
-$aPlaceDetails['addresstags'] = $aPointDetails['aAddressTags'];
-$aPlaceDetails['housenumber'] = $aPointDetails['housenumber'];
-$aPlaceDetails['calculated_postcode'] = $aPointDetails['postcode'];
-$aPlaceDetails['country_code'] = $aPointDetails['country_code'];
-
-$aPlaceDetails['indexed_date'] = (new DateTime('@'.$aPointDetails['indexed_epoch']))->format(DateTime::RFC3339);
-$aPlaceDetails['importance'] = (float) $aPointDetails['importance'];
-$aPlaceDetails['calculated_importance'] = (float) $aPointDetails['calculated_importance'];
-
-$aPlaceDetails['extratags'] = $aPointDetails['aExtraTags'];
-$aPlaceDetails['calculated_wikipedia'] = $aPointDetails['wikipedia'];
-$sIcon = Nominatim\ClassTypes\getIconFile($aPointDetails);
-if (isset($sIcon)) {
- $aPlaceDetails['icon'] = $sIcon;
-}
-
-$aPlaceDetails['rank_address'] = (int) $aPointDetails['rank_address'];
-$aPlaceDetails['rank_search'] = (int) $aPointDetails['rank_search'];
-
-$aPlaceDetails['isarea'] = $aPointDetails['isarea'];
-$aPlaceDetails['centroid'] = array(
- 'type' => 'Point',
- 'coordinates' => array( (float) $aPointDetails['lon'], (float) $aPointDetails['lat'] )
- );
-
-$aPlaceDetails['geometry'] = json_decode($aPointDetails['asgeojson'], true);
-
-$funcMapAddressLine = function ($aFull) {
- return array(
- 'localname' => $aFull['localname'],
- 'place_id' => isset($aFull['place_id']) ? (int) $aFull['place_id'] : null,
- 'osm_id' => isset($aFull['osm_id']) ? (int) $aFull['osm_id'] : null,
- 'osm_type' => isset($aFull['osm_type']) ? $aFull['osm_type'] : null,
- 'place_type' => isset($aFull['place_type']) ? $aFull['place_type'] : null,
- 'class' => $aFull['class'],
- 'type' => $aFull['type'],
- 'admin_level' => isset($aFull['admin_level']) ? (int) $aFull['admin_level'] : null,
- 'rank_address' => $aFull['rank_address'] ? (int) $aFull['rank_address'] : null,
- 'distance' => (float) $aFull['distance'],
- 'isaddress' => isset($aFull['isaddress']) ? (bool) $aFull['isaddress'] : null
- );
-};
-
-$funcMapKeyword = function ($aFull) {
- return array(
- 'id' => (int) $aFull['word_id'],
- 'token' => $aFull['word_token']
- );
-};
-
-if ($aAddressLines) {
- $aPlaceDetails['address'] = array_map($funcMapAddressLine, $aAddressLines);
-}
-
-if ($aLinkedLines) {
- $aPlaceDetails['linked_places'] = array_map($funcMapAddressLine, $aLinkedLines);
-}
-
-if ($bIncludeKeywords) {
- $aPlaceDetails['keywords'] = array();
-
- if ($aPlaceSearchNameKeywords) {
- $aPlaceDetails['keywords']['name'] = array_map($funcMapKeyword, $aPlaceSearchNameKeywords);
- } else {
- $aPlaceDetails['keywords']['name'] = array();
- }
-
- if ($aPlaceSearchAddressKeywords) {
- $aPlaceDetails['keywords']['address'] = array_map($funcMapKeyword, $aPlaceSearchAddressKeywords);
- } else {
- $aPlaceDetails['keywords']['address'] = array();
- }
-}
-
-if ($bIncludeHierarchy) {
- if ($bGroupHierarchy) {
- $aPlaceDetails['hierarchy'] = array();
- foreach ($aHierarchyLines as $aAddressLine) {
- if ($aAddressLine['type'] == 'yes') {
- $sType = $aAddressLine['class'];
- } else {
- $sType = $aAddressLine['type'];
- }
-
- if (!isset($aPlaceDetails['hierarchy'][$sType])) {
- $aPlaceDetails['hierarchy'][$sType] = array();
- }
- $aPlaceDetails['hierarchy'][$sType][] = $funcMapAddressLine($aAddressLine);
- }
- } else {
- $aPlaceDetails['hierarchy'] = array_map($funcMapAddressLine, $aHierarchyLines);
- }
-}
-
-javascript_renderData($aPlaceDetails);
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
- $error = array(
- 'code' => $exception->getCode(),
- 'message' => $exception->getMessage()
- );
-
- if (CONST_Debug) {
- $error['details'] = $exception->getFile() . '('. $exception->getLine() . ')';
- }
-
- javascript_renderData(array('error' => $error));
+++ /dev/null
-<error>
- <code><?php echo $exception->getCode() ?></code>
- <message><?php echo $exception->getMessage() ?></message>
- <?php if (CONST_Debug) { ?>
- <details><?php echo $exception->getFile() . '('. $exception->getLine() . ')' ?></details>
- <?php } ?>
-</error>
\ No newline at end of file
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-$aOutput = array();
-$aOutput['licence'] = 'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright';
-$aOutput['batch'] = array();
-
-foreach ($aBatchResults as $aSearchResults) {
- if (!$aSearchResults) {
- $aSearchResults = array();
- }
- $aFilteredPlaces = array();
- foreach ($aSearchResults as $iResNum => $aPointDetails) {
- $aPlace = array(
- 'place_id'=>$aPointDetails['place_id'],
- );
-
- $sOSMType = formatOSMType($aPointDetails['osm_type']);
- if ($sOSMType) {
- $aPlace['osm_type'] = $sOSMType;
- $aPlace['osm_id'] = $aPointDetails['osm_id'];
- }
-
- if (isset($aPointDetails['aBoundingBox'])) {
- $aPlace['boundingbox'] = array(
- $aPointDetails['aBoundingBox'][0],
- $aPointDetails['aBoundingBox'][1],
- $aPointDetails['aBoundingBox'][2],
- $aPointDetails['aBoundingBox'][3]
- );
- }
-
- if (isset($aPointDetails['zoom'])) {
- $aPlace['zoom'] = $aPointDetails['zoom'];
- }
-
- $aPlace['lat'] = $aPointDetails['lat'];
- $aPlace['lon'] = $aPointDetails['lon'];
- $aPlace['display_name'] = $aPointDetails['name'];
- $aPlace['place_rank'] = $aPointDetails['rank_search'];
-
- $aPlace['category'] = $aPointDetails['class'];
- $aPlace['type'] = $aPointDetails['type'];
-
- $aPlace['importance'] = $aPointDetails['importance'];
-
- if (isset($aPointDetails['icon'])) {
- $aPlace['icon'] = $aPointDetails['icon'];
- }
-
- if (isset($aPointDetails['address'])) {
- $aPlace['address'] = $aPointDetails['address']->getAddressNames();
- }
-
- if (isset($aPointDetails['asgeojson'])) {
- $aPlace['geojson'] = json_decode($aPointDetails['asgeojson'], true);
- }
-
- if (isset($aPointDetails['assvg'])) {
- $aPlace['svg'] = $aPointDetails['assvg'];
- }
-
- if (isset($aPointDetails['astext'])) {
- $aPlace['geotext'] = $aPointDetails['astext'];
- }
-
- if (isset($aPointDetails['askml'])) {
- $aPlace['geokml'] = $aPointDetails['askml'];
- }
-
- $aFilteredPlaces[] = $aPlace;
- }
- $aOutput['batch'][] = $aFilteredPlaces;
-}
-
-javascript_renderData($aOutput, array('geojson'));
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-$aFilteredPlaces = array();
-foreach ($aSearchResults as $iResNum => $aPointDetails) {
- $aPlace = array(
- 'type' => 'Feature',
- 'properties' => array(
- 'geocoding' => array()
- )
- );
-
- if (isset($aPointDetails['place_id'])) {
- $aPlace['properties']['geocoding']['place_id'] = $aPointDetails['place_id'];
- }
- $sOSMType = formatOSMType($aPointDetails['osm_type']);
- if ($sOSMType) {
- $aPlace['properties']['geocoding']['osm_type'] = $sOSMType;
- $aPlace['properties']['geocoding']['osm_id'] = $aPointDetails['osm_id'];
- }
- $aPlace['properties']['geocoding']['osm_key'] = $aPointDetails['class'];
- $aPlace['properties']['geocoding']['osm_value'] = $aPointDetails['type'];
-
- $aPlace['properties']['geocoding']['type'] = addressRankToGeocodeJsonType($aPointDetails['rank_address']);
-
- $aPlace['properties']['geocoding']['label'] = $aPointDetails['langaddress'];
-
- if ($aPointDetails['placename'] !== null) {
- $aPlace['properties']['geocoding']['name'] = $aPointDetails['placename'];
- }
-
- if (isset($aPointDetails['address'])) {
- $aPointDetails['address']->addGeocodeJsonAddressParts(
- $aPlace['properties']['geocoding']
- );
-
- $aPlace['properties']['geocoding']['admin']
- = $aPointDetails['address']->getAdminLevels();
- }
-
- if (isset($aPointDetails['asgeojson'])) {
- $aPlace['geometry'] = json_decode($aPointDetails['asgeojson'], true);
- } else {
- $aPlace['geometry'] = array(
- 'type' => 'Point',
- 'coordinates' => array(
- (float) $aPointDetails['lon'],
- (float) $aPointDetails['lat']
- )
- );
- }
- $aFilteredPlaces[] = $aPlace;
-}
-
-
-javascript_renderData(array(
- 'type' => 'FeatureCollection',
- 'geocoding' => array(
- 'version' => '0.1.0',
- 'attribution' => 'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright',
- 'licence' => 'ODbL',
- 'query' => $sQuery
- ),
- 'features' => $aFilteredPlaces
- ));
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-$aFilteredPlaces = array();
-foreach ($aSearchResults as $iResNum => $aPointDetails) {
- $aPlace = array(
- 'type' => 'Feature',
- 'properties' => array(
- 'place_id'=>$aPointDetails['place_id'],
- )
- );
-
- $sOSMType = formatOSMType($aPointDetails['osm_type']);
- if ($sOSMType) {
- $aPlace['properties']['osm_type'] = $sOSMType;
- $aPlace['properties']['osm_id'] = $aPointDetails['osm_id'];
- }
-
- if (isset($aPointDetails['aBoundingBox'])) {
- $aPlace['bbox'] = array(
- (float) $aPointDetails['aBoundingBox'][2], // minlon
- (float) $aPointDetails['aBoundingBox'][0], // minlat
- (float) $aPointDetails['aBoundingBox'][3], // maxlon
- (float) $aPointDetails['aBoundingBox'][1] // maxlat
- );
- }
-
- if (isset($aPointDetails['zoom'])) {
- $aPlace['properties']['zoom'] = $aPointDetails['zoom'];
- }
-
- $aPlace['properties']['display_name'] = $aPointDetails['name'];
-
- $aPlace['properties']['place_rank'] = $aPointDetails['rank_search'];
- $aPlace['properties']['category'] = $aPointDetails['class'];
-
- $aPlace['properties']['type'] = $aPointDetails['type'];
-
- $aPlace['properties']['importance'] = $aPointDetails['importance'];
-
- if (isset($aPointDetails['icon']) && $aPointDetails['icon']) {
- $aPlace['properties']['icon'] = $aPointDetails['icon'];
- }
-
- if (isset($aPointDetails['address'])) {
- $aPlace['properties']['address'] = $aPointDetails['address']->getAddressNames();
- }
-
- if (isset($aPointDetails['asgeojson'])) {
- $aPlace['geometry'] = json_decode($aPointDetails['asgeojson'], true);
- } else {
- $aPlace['geometry'] = array(
- 'type' => 'Point',
- 'coordinates' => array(
- (float) $aPointDetails['lon'],
- (float) $aPointDetails['lat']
- )
- );
- }
-
-
- if (isset($aPointDetails['sExtraTags'])) {
- $aPlace['properties']['extratags'] = $aPointDetails['sExtraTags'];
- }
- if (isset($aPointDetails['sNameDetails'])) {
- $aPlace['properties']['namedetails'] = $aPointDetails['sNameDetails'];
- }
-
- $aFilteredPlaces[] = $aPlace;
-}
-
-javascript_renderData(array(
- 'type' => 'FeatureCollection',
- 'licence' => 'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright',
- 'features' => $aFilteredPlaces
- ));
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-$aFilteredPlaces = array();
-foreach ($aSearchResults as $iResNum => $aPointDetails) {
- $aPlace = array(
- 'place_id'=>$aPointDetails['place_id'],
- 'licence'=>'Data © OpenStreetMap contributors, ODbL 1.0. https://osm.org/copyright',
- );
-
- $sOSMType = formatOSMType($aPointDetails['osm_type']);
- if ($sOSMType) {
- $aPlace['osm_type'] = $sOSMType;
- $aPlace['osm_id'] = $aPointDetails['osm_id'];
- }
-
- if (isset($aPointDetails['aBoundingBox'])) {
- $aPlace['boundingbox'] = $aPointDetails['aBoundingBox'];
- }
-
- if (isset($aPointDetails['zoom'])) {
- $aPlace['zoom'] = $aPointDetails['zoom'];
- }
-
- $aPlace['lat'] = $aPointDetails['lat'];
- $aPlace['lon'] = $aPointDetails['lon'];
-
- $aPlace['display_name'] = $aPointDetails['name'];
-
- if ($sOutputFormat == 'jsonv2' || $sOutputFormat == 'geojson') {
- $aPlace['place_rank'] = $aPointDetails['rank_search'];
- $aPlace['category'] = $aPointDetails['class'];
- } else {
- $aPlace['class'] = $aPointDetails['class'];
- }
- $aPlace['type'] = $aPointDetails['type'];
-
- $aPlace['importance'] = $aPointDetails['importance'];
-
- if (isset($aPointDetails['icon']) && $aPointDetails['icon']) {
- $aPlace['icon'] = $aPointDetails['icon'];
- }
-
- if (isset($aPointDetails['address'])) {
- $aPlace['address'] = $aPointDetails['address']->getAddressNames();
- }
-
- if (isset($aPointDetails['asgeojson'])) {
- $aPlace['geojson'] = json_decode($aPointDetails['asgeojson'], true);
- }
-
- if (isset($aPointDetails['assvg'])) {
- $aPlace['svg'] = $aPointDetails['assvg'];
- }
-
- if (isset($aPointDetails['astext'])) {
- $aPlace['geotext'] = $aPointDetails['astext'];
- }
-
- if (isset($aPointDetails['askml'])) {
- $aPlace['geokml'] = $aPointDetails['askml'];
- }
-
- if (isset($aPointDetails['sExtraTags'])) {
- $aPlace['extratags'] = $aPointDetails['sExtraTags'];
- }
- if (isset($aPointDetails['sNameDetails'])) {
- $aPlace['namedetails'] = $aPointDetails['sNameDetails'];
- }
-
- $aFilteredPlaces[] = $aPlace;
-}
-
-javascript_renderData($aFilteredPlaces);
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-header('content-type: text/xml; charset=UTF-8');
-
-echo '<';
-echo '?xml version="1.0" encoding="UTF-8" ?';
-echo ">\n";
-
-echo '<';
-echo (isset($sXmlRootTag)?$sXmlRootTag:'searchresults');
-echo " timestamp='".date(DATE_RFC822)."'";
-echo " attribution='Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright'";
-echo " querystring='".htmlspecialchars($sQuery, ENT_QUOTES)."'";
-if (isset($aMoreParams['viewbox'])) {
- echo " viewbox='".htmlspecialchars($aMoreParams['viewbox'], ENT_QUOTES)."'";
-}
-if (isset($aMoreParams['exclude_place_ids'])) {
- echo " exclude_place_ids='".htmlspecialchars($aMoreParams['exclude_place_ids'])."'";
-}
-echo " more_url='".htmlspecialchars($sMoreURL)."'";
-echo ">\n";
-
-foreach ($aSearchResults as $iResNum => $aResult) {
- echo "<place place_id='".$aResult['place_id']."'";
- $sOSMType = formatOSMType($aResult['osm_type']);
- if ($sOSMType) {
- echo " osm_type='$sOSMType'";
- echo " osm_id='".$aResult['osm_id']."'";
- }
- echo " place_rank='".$aResult['rank_search']."'";
- echo " address_rank='".$aResult['rank_address']."'";
-
- if (isset($aResult['aBoundingBox'])) {
- echo ' boundingbox="';
- echo join(',', $aResult['aBoundingBox']);
- echo '"';
- }
-
- if (isset($aResult['asgeojson'])) {
- echo ' geojson=\'';
- echo $aResult['asgeojson'];
- echo '\'';
- }
-
- if (isset($aResult['assvg'])) {
- echo ' geosvg=\'';
- echo $aResult['assvg'];
- echo '\'';
- }
-
- if (isset($aResult['astext'])) {
- echo ' geotext=\'';
- echo $aResult['astext'];
- echo '\'';
- }
-
- if (isset($aResult['zoom'])) {
- echo " zoom='".$aResult['zoom']."'";
- }
-
- echo " lat='".$aResult['lat']."'";
- echo " lon='".$aResult['lon']."'";
- echo " display_name='".htmlspecialchars($aResult['name'], ENT_QUOTES)."'";
-
- echo " class='".htmlspecialchars($aResult['class'])."'";
- echo " type='".htmlspecialchars($aResult['type'], ENT_QUOTES)."'";
- echo " importance='".htmlspecialchars($aResult['importance'])."'";
- if (isset($aResult['icon']) && $aResult['icon']) {
- echo " icon='".htmlspecialchars($aResult['icon'], ENT_QUOTES)."'";
- }
-
- $bHasDelim = false;
-
- if (isset($aResult['askml'])) {
- if (!$bHasDelim) {
- $bHasDelim = true;
- echo '>';
- }
- echo "\n<geokml>";
- echo $aResult['askml'];
- echo '</geokml>';
- }
-
- if (isset($aResult['sExtraTags'])) {
- if (!$bHasDelim) {
- $bHasDelim = true;
- echo '>';
- }
- echo "\n<extratags>";
- foreach ($aResult['sExtraTags'] as $sKey => $sValue) {
- echo '<tag key="'.htmlspecialchars($sKey).'" value="'.htmlspecialchars($sValue).'"/>';
- }
- echo '</extratags>';
- }
-
- if (isset($aResult['sNameDetails'])) {
- if (!$bHasDelim) {
- $bHasDelim = true;
- echo '>';
- }
- echo "\n<namedetails>";
- foreach ($aResult['sNameDetails'] as $sKey => $sValue) {
- echo '<name desc="'.htmlspecialchars($sKey).'">';
- echo htmlspecialchars($sValue);
- echo '</name>';
- }
- echo '</namedetails>';
- }
-
- if (isset($aResult['address'])) {
- if (!$bHasDelim) {
- $bHasDelim = true;
- echo '>';
- }
- echo "\n";
- foreach ($aResult['address']->getAddressNames() as $sKey => $sValue) {
- $sKey = str_replace(' ', '_', $sKey);
- echo "<$sKey>";
- echo htmlspecialchars($sValue);
- echo "</$sKey>";
- }
- }
-
- if ($bHasDelim) {
- echo '</place>';
- } else {
- echo '/>';
- }
-}
-
-echo '</' . (isset($sXmlRootTag)?$sXmlRootTag:'searchresults') . '>';
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/SimpleWordList.php');
-
-class Tokenizer
-{
- private $oDB;
-
- private $oNormalizer;
- private $oTransliterator;
-
- public function __construct(&$oDB)
- {
- $this->oDB =& $oDB;
- $this->oNormalizer = \Transliterator::createFromRules(CONST_Term_Normalization_Rules);
- $this->oTransliterator = \Transliterator::createFromRules(CONST_Transliteration);
- }
-
- public function checkStatus()
- {
- $sSQL = 'SELECT word_id FROM word WHERE word_id is not null limit 1';
- $iWordID = $this->oDB->getOne($sSQL);
- if ($iWordID === false) {
- throw new \Exception('Query failed', 703);
- }
- if (!$iWordID) {
- throw new \Exception('No value', 704);
- }
- }
-
-
- public function normalizeString($sTerm)
- {
- if ($this->oNormalizer === null) {
- return $sTerm;
- }
-
- return $this->oNormalizer->transliterate($sTerm);
- }
-
-
- public function mostFrequentWords($iNum)
- {
- $sSQL = "SELECT word FROM word WHERE type = 'W'";
- $sSQL .= "ORDER BY info->'count' DESC LIMIT ".$iNum;
- return $this->oDB->getCol($sSQL);
- }
-
-
- private function makeStandardWord($sTerm)
- {
- return trim($this->oTransliterator->transliterate(' '.$sTerm.' '));
- }
-
-
- public function tokensForSpecialTerm($sTerm)
- {
- $aResults = array();
-
- $sSQL = "SELECT word_id, info->>'class' as class, info->>'type' as type ";
- $sSQL .= ' FROM word WHERE word_token = :term and type = \'S\'';
-
- Debug::printVar('Term', $sTerm);
- Debug::printSQL($sSQL);
- $aSearchWords = $this->oDB->getAll($sSQL, array(':term' => $this->makeStandardWord($sTerm)));
-
- Debug::printVar('Results', $aSearchWords);
-
- foreach ($aSearchWords as $aSearchTerm) {
- $aResults[] = new \Nominatim\Token\SpecialTerm(
- $aSearchTerm['word_id'],
- $aSearchTerm['class'],
- $aSearchTerm['type'],
- \Nominatim\Operator::TYPE
- );
- }
-
- Debug::printVar('Special term tokens', $aResults);
-
- return $aResults;
- }
-
-
- public function extractTokensFromPhrases(&$aPhrases)
- {
- $sNormQuery = '';
- $aWordLists = array();
- $aTokens = array();
- foreach ($aPhrases as $iPhrase => $oPhrase) {
- $sNormQuery .= ','.$this->normalizeString($oPhrase->getPhrase());
- $sPhrase = $this->makeStandardWord($oPhrase->getPhrase());
- Debug::printVar('Phrase', $sPhrase);
-
- $oWordList = new SimpleWordList($sPhrase);
- $aTokens = array_merge($aTokens, $oWordList->getTokens());
- $aWordLists[] = $oWordList;
- }
-
- Debug::printVar('Tokens', $aTokens);
- Debug::printVar('WordLists', $aWordLists);
-
- $oValidTokens = $this->computeValidTokens($aTokens, $sNormQuery);
-
- foreach ($aPhrases as $iPhrase => $oPhrase) {
- $oPhrase->setWordSets($aWordLists[$iPhrase]->getWordSets($oValidTokens));
- }
-
- return $oValidTokens;
- }
-
-
- private function computeValidTokens($aTokens, $sNormQuery)
- {
- $oValidTokens = new TokenList();
-
- if (!empty($aTokens)) {
- $this->addTokensFromDB($oValidTokens, $aTokens, $sNormQuery);
-
- // Try more interpretations for Tokens that could not be matched.
- foreach ($aTokens as $sToken) {
- if ($sToken[0] != ' ' && !$oValidTokens->contains($sToken)) {
- if (preg_match('/^([0-9]{5}) [0-9]{4}$/', $sToken, $aData)) {
- // US ZIP+4 codes - merge in the 5-digit ZIP code
- $oValidTokens->addToken(
- $sToken,
- new Token\Postcode(null, $aData[1], 'us')
- );
- } elseif (preg_match('/^[0-9]+$/', $sToken)) {
- // Unknown single word token with a number.
- // Assume it is a house number.
- $oValidTokens->addToken(
- $sToken,
- new Token\HouseNumber(null, trim($sToken))
- );
- }
- }
- }
- }
-
- return $oValidTokens;
- }
-
-
- private function addTokensFromDB(&$oValidTokens, $aTokens, $sNormQuery)
- {
- // Check which tokens we have, get the ID numbers
- $sSQL = 'SELECT word_id, word_token, type, word,';
- $sSQL .= " info->>'op' as operator,";
- $sSQL .= " info->>'class' as class, info->>'type' as ctype,";
- $sSQL .= " info->>'count' as count,";
- $sSQL .= " info->>'lookup' as lookup";
- $sSQL .= ' FROM word WHERE word_token in (';
- $sSQL .= join(',', $this->oDB->getDBQuotedList($aTokens)).')';
-
- Debug::printSQL($sSQL);
-
- $aDBWords = $this->oDB->getAll($sSQL, null, 'Could not get word tokens.');
-
- foreach ($aDBWords as $aWord) {
- $iId = (int) $aWord['word_id'];
- $sTok = $aWord['word_token'];
-
- switch ($aWord['type']) {
- case 'C': // country name tokens
- if ($aWord['word'] !== null) {
- $oValidTokens->addToken(
- $sTok,
- new Token\Country($iId, $aWord['word'])
- );
- }
- break;
- case 'H': // house number tokens
- $sLookup = $aWord['lookup'] ?? $aWord['word_token'];
- $oValidTokens->addToken($sTok, new Token\HouseNumber($iId, $sLookup));
- break;
- case 'P': // postcode tokens
- // Postcodes are not normalized, so they may have content
- // that makes SQL injection possible. Reject postcodes
- // that would need special escaping.
- if ($aWord['word'] !== null
- && pg_escape_string($aWord['word']) == $aWord['word']
- ) {
- $iSplitPos = strpos($aWord['word'], '@');
- if ($iSplitPos === false) {
- $sPostcode = $aWord['word'];
- } else {
- $sPostcode = substr($aWord['word'], 0, $iSplitPos);
- }
-
- $oValidTokens->addToken(
- $sTok,
- new Token\Postcode($iId, $sPostcode, null)
- );
- }
- break;
- case 'S': // tokens for classification terms (special phrases)
- if ($aWord['class'] !== null && $aWord['ctype'] !== null) {
- $oValidTokens->addToken($sTok, new Token\SpecialTerm(
- $iId,
- $aWord['class'],
- $aWord['ctype'],
- (isset($aWord['operator'])) ? Operator::NEAR : Operator::NONE
- ));
- }
- break;
- case 'W': // full-word tokens
- $oValidTokens->addToken($sTok, new Token\Word(
- $iId,
- (int) $aWord['count'],
- substr_count($aWord['word_token'], ' ')
- ));
- break;
- case 'w': // partial word terms
- $oValidTokens->addToken($sTok, new Token\Partial(
- $iId,
- $aWord['word_token'],
- (int) $aWord['count']
- ));
- break;
- default:
- break;
- }
- }
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/SimpleWordList.php');
-
-class Tokenizer
-{
- private $oDB;
-
- private $oNormalizer = null;
-
- public function __construct(&$oDB)
- {
- $this->oDB =& $oDB;
- $this->oNormalizer = \Transliterator::createFromRules(CONST_Term_Normalization_Rules);
- }
-
- public function checkStatus()
- {
- $sStandardWord = $this->oDB->getOne("SELECT make_standard_name('a')");
- if ($sStandardWord === false) {
- throw new \Exception('Module failed', 701);
- }
-
- if ($sStandardWord != 'a') {
- throw new \Exception('Module call failed', 702);
- }
-
- $sSQL = "SELECT word_id FROM word WHERE word_token IN (' a')";
- $iWordID = $this->oDB->getOne($sSQL);
- if ($iWordID === false) {
- throw new \Exception('Query failed', 703);
- }
- if (!$iWordID) {
- throw new \Exception('No value', 704);
- }
- }
-
-
- public function normalizeString($sTerm)
- {
- if ($this->oNormalizer === null) {
- return $sTerm;
- }
-
- return $this->oNormalizer->transliterate($sTerm);
- }
-
-
- public function mostFrequentWords($iNum)
- {
- $sSQL = 'SELECT word FROM word WHERE word is not null ';
- $sSQL .= 'ORDER BY search_name_count DESC LIMIT '.$iNum;
- return $this->oDB->getCol($sSQL);
- }
-
-
- public function tokensForSpecialTerm($sTerm)
- {
- $aResults = array();
-
- $sSQL = 'SELECT word_id, class, type FROM word ';
- $sSQL .= ' WHERE word_token = \' \' || make_standard_name(:term)';
- $sSQL .= ' AND class is not null AND class not in (\'place\')';
-
- Debug::printVar('Term', $sTerm);
- Debug::printSQL($sSQL);
- $aSearchWords = $this->oDB->getAll($sSQL, array(':term' => $sTerm));
-
- Debug::printVar('Results', $aSearchWords);
-
- foreach ($aSearchWords as $aSearchTerm) {
- $aResults[] = new \Nominatim\Token\SpecialTerm(
- $aSearchTerm['word_id'],
- $aSearchTerm['class'],
- $aSearchTerm['type'],
- \Nominatim\Operator::TYPE
- );
- }
-
- Debug::printVar('Special term tokens', $aResults);
-
- return $aResults;
- }
-
-
- public function extractTokensFromPhrases(&$aPhrases)
- {
- // First get the normalized version of all phrases
- $sNormQuery = '';
- $sSQL = 'SELECT ';
- $aParams = array();
- foreach ($aPhrases as $iPhrase => $oPhrase) {
- $sNormQuery .= ','.$this->normalizeString($oPhrase->getPhrase());
- $sSQL .= 'make_standard_name(:' .$iPhrase.') as p'.$iPhrase.',';
- $aParams[':'.$iPhrase] = $oPhrase->getPhrase();
-
- // Conflicts between US state abbreviations and various words
- // for 'the' in different languages
- switch (strtolower($oPhrase->getPhrase())) {
- case 'il':
- $aParams[':'.$iPhrase] = 'illinois';
- break;
- case 'al':
- $aParams[':'.$iPhrase] = 'alabama';
- break;
- case 'la':
- $aParams[':'.$iPhrase] = 'louisiana';
- break;
- default:
- $aParams[':'.$iPhrase] = $oPhrase->getPhrase();
- break;
- }
- }
- $sSQL = substr($sSQL, 0, -1);
-
- Debug::printSQL($sSQL);
- Debug::printVar('SQL parameters', $aParams);
-
- $aNormPhrases = $this->oDB->getRow($sSQL, $aParams);
-
- Debug::printVar('SQL result', $aNormPhrases);
-
- // now compute all possible tokens
- $aWordLists = array();
- $aTokens = array();
- foreach ($aNormPhrases as $sPhrase) {
- $oWordList = new SimpleWordList($sPhrase);
-
- foreach ($oWordList->getTokens() as $sToken) {
- $aTokens[' '.$sToken] = ' '.$sToken;
- $aTokens[$sToken] = $sToken;
- }
-
- $aWordLists[] = $oWordList;
- }
-
- Debug::printVar('Tokens', $aTokens);
- Debug::printVar('WordLists', $aWordLists);
-
- $oValidTokens = $this->computeValidTokens($aTokens, $sNormQuery);
-
- foreach ($aPhrases as $iPhrase => $oPhrase) {
- $oPhrase->setWordSets($aWordLists[$iPhrase]->getWordSets($oValidTokens));
- }
-
- return $oValidTokens;
- }
-
-
- private function computeValidTokens($aTokens, $sNormQuery)
- {
- $oValidTokens = new TokenList();
-
- if (!empty($aTokens)) {
- $this->addTokensFromDB($oValidTokens, $aTokens, $sNormQuery);
-
- // Try more interpretations for Tokens that could not be matched.
- foreach ($aTokens as $sToken) {
- if ($sToken[0] != ' ' && !$oValidTokens->contains($sToken)) {
- if (preg_match('/^([0-9]{5}) [0-9]{4}$/', $sToken, $aData)) {
- // US ZIP+4 codes - merge in the 5-digit ZIP code
- $oValidTokens->addToken(
- $sToken,
- new Token\Postcode(null, $aData[1], 'us')
- );
- } elseif (preg_match('/^[0-9]+$/', $sToken)) {
- // Unknown single word token with a number.
- // Assume it is a house number.
- $oValidTokens->addToken(
- $sToken,
- new Token\HouseNumber(null, trim($sToken))
- );
- }
- }
- }
- }
-
- return $oValidTokens;
- }
-
-
- private function addTokensFromDB(&$oValidTokens, $aTokens, $sNormQuery)
- {
- // Check which tokens we have, get the ID numbers
- $sSQL = 'SELECT word_id, word_token, word, class, type, country_code,';
- $sSQL .= ' operator, coalesce(search_name_count, 0) as count';
- $sSQL .= ' FROM word WHERE word_token in (';
- $sSQL .= join(',', $this->oDB->getDBQuotedList($aTokens)).')';
-
- Debug::printSQL($sSQL);
-
- $aDBWords = $this->oDB->getAll($sSQL, null, 'Could not get word tokens.');
-
- foreach ($aDBWords as $aWord) {
- $oToken = null;
- $iId = (int) $aWord['word_id'];
-
- if ($aWord['class']) {
- // Special terms need to appear in their normalized form.
- // (postcodes are not normalized in the word table)
- $sNormWord = $this->normalizeString($aWord['word']);
- if ($aWord['word'] && strpos($sNormQuery, $sNormWord) === false) {
- continue;
- }
-
- if ($aWord['class'] == 'place' && $aWord['type'] == 'house') {
- $oToken = new Token\HouseNumber($iId, trim($aWord['word_token']));
- } elseif ($aWord['class'] == 'place' && $aWord['type'] == 'postcode') {
- if ($aWord['word']
- && pg_escape_string($aWord['word']) == $aWord['word']
- ) {
- $oToken = new Token\Postcode(
- $iId,
- $aWord['word'],
- $aWord['country_code']
- );
- }
- } else {
- // near and in operator the same at the moment
- $oToken = new Token\SpecialTerm(
- $iId,
- $aWord['class'],
- $aWord['type'],
- $aWord['operator'] ? Operator::NEAR : Operator::NONE
- );
- }
- } elseif ($aWord['country_code']) {
- $oToken = new Token\Country($iId, $aWord['country_code']);
- } elseif ($aWord['word_token'][0] == ' ') {
- $oToken = new Token\Word(
- $iId,
- (int) $aWord['count'],
- substr_count($aWord['word_token'], ' ')
- );
- // For backward compatibility: ignore all partial tokens with more
- // than one word.
- } elseif (strpos($aWord['word_token'], ' ') === false) {
- $oToken = new Token\Partial(
- $iId,
- $aWord['word_token'],
- (int) $aWord['count']
- );
- }
-
- if ($oToken) {
- // remove any leading spaces
- if ($aWord['word_token'][0] == ' ') {
- $oValidTokens->addToken(substr($aWord['word_token'], 1), $oToken);
- } else {
- $oValidTokens->addToken($aWord['word_token'], $oToken);
- }
- }
- }
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/log.php');
-require_once(CONST_LibDir.'/output.php');
-ini_set('memory_limit', '200M');
-
-$oParams = new Nominatim\ParameterParser();
-$sOutputFormat = $oParams->getSet('format', array('json'), 'json');
-set_exception_handler_by_format($sOutputFormat);
-
-$oDB = new Nominatim\DB(CONST_Database_DSN);
-$oDB->connect();
-
-$sSQL = 'select placex.place_id, country_code,';
-$sSQL .= " name->'name' as name, i.* from placex, import_polygon_delete i";
-$sSQL .= ' where placex.osm_id = i.osm_id and placex.osm_type = i.osm_type';
-$sSQL .= ' and placex.class = i.class and placex.type = i.type';
-$aPolygons = $oDB->getAll($sSQL, null, 'Could not get list of deleted OSM elements.');
-
-if (CONST_Debug) {
- var_dump($aPolygons);
- exit;
-}
-
-if ($sOutputFormat == 'json') {
- javascript_renderData($aPolygons);
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/log.php');
-require_once(CONST_LibDir.'/output.php');
-require_once(CONST_LibDir.'/AddressDetails.php');
-ini_set('memory_limit', '200M');
-
-$oParams = new Nominatim\ParameterParser();
-
-$sOutputFormat = $oParams->getSet('format', array('json'), 'json');
-set_exception_handler_by_format($sOutputFormat);
-
-$aLangPrefOrder = $oParams->getPreferredLanguages();
-
-$sPlaceId = $oParams->getString('place_id');
-$sOsmType = $oParams->getSet('osmtype', array('N', 'W', 'R'));
-$iOsmId = $oParams->getInt('osmid', -1);
-$sClass = $oParams->getString('class');
-
-$bIncludeKeywords = $oParams->getBool('keywords', false);
-$bIncludeAddressDetails = $oParams->getBool('addressdetails', false);
-$bIncludeLinkedPlaces = $oParams->getBool('linkedplaces', true);
-$bIncludeHierarchy = $oParams->getBool('hierarchy', false);
-$bGroupHierarchy = $oParams->getBool('group_hierarchy', false);
-$bIncludePolygonAsGeoJSON = $oParams->getBool('polygon_geojson', false);
-
-$oDB = new Nominatim\DB(CONST_Database_DSN);
-$oDB->connect();
-
-$sLanguagePrefArraySQL = $oDB->getArraySQL($oDB->getDBQuotedList($aLangPrefOrder));
-
-if ($sOsmType && $iOsmId > 0) {
- $sSQL = 'SELECT place_id FROM placex WHERE osm_type = :type AND osm_id = :id';
- $aSQLParams = array(':type' => $sOsmType, ':id' => $iOsmId);
- // osm_type and osm_id are not unique enough
- if ($sClass) {
- $sSQL .= ' AND class= :class';
- $aSQLParams[':class'] = $sClass;
- }
- $sSQL .= ' ORDER BY class ASC';
- $sPlaceId = $oDB->getOne($sSQL, $aSQLParams);
-
-
- // Nothing? Maybe it's an interpolation.
- // XXX Simply returns the first parent street it finds. It should
- // get a house number and get the right interpolation.
- if (!$sPlaceId && $sOsmType == 'W' && (!$sClass || $sClass == 'place')) {
- $sSQL = 'SELECT place_id FROM location_property_osmline'
- .' WHERE osm_id = :id LIMIT 1';
- $sPlaceId = $oDB->getOne($sSQL, array(':id' => $iOsmId));
- }
-
- // Be nice about our error messages for broken geometry
-
- if (!$sPlaceId && $oDB->tableExists('import_polygon_error')) {
- $sSQL = 'SELECT ';
- $sSQL .= ' osm_type, ';
- $sSQL .= ' osm_id, ';
- $sSQL .= ' errormessage, ';
- $sSQL .= ' class, ';
- $sSQL .= ' type, ';
- $sSQL .= " get_name_by_language(name,$sLanguagePrefArraySQL) AS localname,";
- $sSQL .= ' ST_AsText(prevgeometry) AS prevgeom, ';
- $sSQL .= ' ST_AsText(newgeometry) AS newgeom';
- $sSQL .= ' FROM import_polygon_error ';
- $sSQL .= ' WHERE osm_type = :type';
- $sSQL .= ' AND osm_id = :id';
- $sSQL .= ' ORDER BY updated DESC';
- $sSQL .= ' LIMIT 1';
- $aPointDetails = $oDB->getRow($sSQL, array(':type' => $sOsmType, ':id' => $iOsmId));
- if ($aPointDetails) {
- if (preg_match('/\[(-?\d+\.\d+) (-?\d+\.\d+)\]/', $aPointDetails['errormessage'], $aMatches)) {
- $aPointDetails['error_x'] = $aMatches[1];
- $aPointDetails['error_y'] = $aMatches[2];
- } else {
- $aPointDetails['error_x'] = 0;
- $aPointDetails['error_y'] = 0;
- }
- include(CONST_LibDir.'/template/details-error-'.$sOutputFormat.'.php');
- exit;
- }
- }
-
- if ($sPlaceId === false) {
- throw new \Exception('No place with that OSM ID found.', 404);
- }
-} else {
- if ($sPlaceId === false) {
- userError('Required parameters missing. Need either osmtype/osmid or place_id.');
- }
-}
-
-$iPlaceID = (int)$sPlaceId;
-
-if (CONST_Use_US_Tiger_Data) {
- $iParentPlaceID = $oDB->getOne('SELECT parent_place_id FROM location_property_tiger WHERE place_id = '.$iPlaceID);
- if ($iParentPlaceID) {
- $iPlaceID = $iParentPlaceID;
- }
-}
-
-// interpolated house numbers
-$iParentPlaceID = $oDB->getOne('SELECT parent_place_id FROM location_property_osmline WHERE place_id = '.$iPlaceID);
-if ($iParentPlaceID) {
- $iPlaceID = $iParentPlaceID;
-}
-
-// artificial postcodes
-$iParentPlaceID = $oDB->getOne('SELECT parent_place_id FROM location_postcode WHERE place_id = '.$iPlaceID);
-if ($iParentPlaceID) {
- $iPlaceID = $iParentPlaceID;
-}
-
-$hLog = logStart($oDB, 'details', $_SERVER['QUERY_STRING'], $aLangPrefOrder);
-
-// Get the details for this point
-$sSQL = 'SELECT place_id, osm_type, osm_id, class, type, name, admin_level,';
-$sSQL .= ' housenumber, postcode, country_code,';
-$sSQL .= ' importance, wikipedia,';
-$sSQL .= ' ROUND(EXTRACT(epoch FROM indexed_date)) AS indexed_epoch,';
-$sSQL .= ' parent_place_id, ';
-$sSQL .= ' rank_address, ';
-$sSQL .= ' rank_search, ';
-$sSQL .= " get_name_by_language(name,$sLanguagePrefArraySQL) AS localname, ";
-$sSQL .= " ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon') AS isarea, ";
-$sSQL .= ' ST_y(centroid) AS lat, ';
-$sSQL .= ' ST_x(centroid) AS lon, ';
-$sSQL .= ' CASE ';
-$sSQL .= ' WHEN importance = 0 OR importance IS NULL ';
-$sSQL .= ' THEN 0.75-(rank_search::float/40) ';
-$sSQL .= ' ELSE importance ';
-$sSQL .= ' END as calculated_importance, ';
-if ($bIncludePolygonAsGeoJSON) {
- $sSQL .= ' ST_AsGeoJSON(CASE ';
- $sSQL .= ' WHEN ST_NPoints(geometry) > 5000 ';
- $sSQL .= ' THEN ST_SimplifyPreserveTopology(geometry, 0.0001) ';
- $sSQL .= ' ELSE geometry ';
- $sSQL .= ' END) as asgeojson';
-} else {
- $sSQL .= ' ST_AsGeoJSON(centroid) as asgeojson';
-}
-$sSQL .= ' FROM placex ';
-$sSQL .= " WHERE place_id = $iPlaceID";
-
-$aPointDetails = $oDB->getRow($sSQL, null, 'Could not get details of place object.');
-
-if (!$aPointDetails) {
- throw new \Exception('No place with that place ID found.', 404);
-}
-
-$aPointDetails['localname'] = $aPointDetails['localname']?$aPointDetails['localname']:$aPointDetails['housenumber'];
-
-// Get all alternative names (languages, etc)
-$sSQL = 'SELECT (each(name)).key,(each(name)).value FROM placex ';
-$sSQL .= "WHERE place_id = $iPlaceID ORDER BY (each(name)).key";
-$aPointDetails['aNames'] = $oDB->getAssoc($sSQL);
-
-// Address tags
-$sSQL = 'SELECT (each(address)).key as key,(each(address)).value FROM placex ';
-$sSQL .= "WHERE place_id = $iPlaceID ORDER BY key";
-$aPointDetails['aAddressTags'] = $oDB->getAssoc($sSQL);
-
-// Extra tags
-$sSQL = 'SELECT (each(extratags)).key,(each(extratags)).value FROM placex ';
-$sSQL .= "WHERE place_id = $iPlaceID ORDER BY (each(extratags)).key";
-$aPointDetails['aExtraTags'] = $oDB->getAssoc($sSQL);
-
-// Address
-$aAddressLines = false;
-if ($bIncludeAddressDetails) {
- $oDetails = new Nominatim\AddressDetails($oDB, $iPlaceID, -1, $sLanguagePrefArraySQL);
- $aAddressLines = $oDetails->getAddressDetails(true);
-}
-
-// Linked places
-$aLinkedLines = false;
-if ($bIncludeLinkedPlaces) {
- $sSQL = 'SELECT placex.place_id, osm_type, osm_id, class, type, housenumber,';
- $sSQL .= ' admin_level, rank_address, ';
- $sSQL .= " ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon') AS isarea,";
- $sSQL .= " ST_DistanceSpheroid(geometry, placegeometry, 'SPHEROID[\"WGS 84\",6378137,298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]') AS distance, ";
- $sSQL .= " get_name_by_language(name,$sLanguagePrefArraySQL) AS localname, ";
- $sSQL .= ' length(name::text) AS namelength ';
- $sSQL .= ' FROM ';
- $sSQL .= ' placex, ';
- $sSQL .= ' ( ';
- $sSQL .= ' SELECT centroid AS placegeometry ';
- $sSQL .= ' FROM placex ';
- $sSQL .= " WHERE place_id = $iPlaceID ";
- $sSQL .= ' ) AS x';
- $sSQL .= " WHERE linked_place_id = $iPlaceID";
- $sSQL .= ' ORDER BY ';
- $sSQL .= ' rank_address ASC, ';
- $sSQL .= ' rank_search ASC, ';
- $sSQL .= " get_name_by_language(name, $sLanguagePrefArraySQL), ";
- $sSQL .= ' housenumber';
- $aLinkedLines = $oDB->getAll($sSQL);
-}
-
-// All places this is an immediate parent of
-$aHierarchyLines = false;
-if ($bIncludeHierarchy) {
- $sSQL = 'SELECT obj.place_id, osm_type, osm_id, class, type, housenumber,';
- $sSQL .= " admin_level, rank_address, ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon') AS isarea,";
- $sSQL .= " ST_DistanceSpheroid(geometry, placegeometry, 'SPHEROID[\"WGS 84\",6378137,298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]') AS distance, ";
- $sSQL .= " get_name_by_language(name,$sLanguagePrefArraySQL) AS localname, ";
- $sSQL .= ' length(name::text) AS namelength ';
- $sSQL .= ' FROM ';
- $sSQL .= ' ( ';
- $sSQL .= ' SELECT placex.place_id, osm_type, osm_id, class, type, housenumber, admin_level, rank_address, rank_search, geometry, name ';
- $sSQL .= ' FROM placex ';
- $sSQL .= " WHERE parent_place_id = $iPlaceID ";
- $sSQL .= ' ORDER BY ';
- $sSQL .= ' rank_address ASC, ';
- $sSQL .= ' rank_search ASC ';
- $sSQL .= ' LIMIT 500 ';
- $sSQL .= ' ) AS obj,';
- $sSQL .= ' ( ';
- $sSQL .= ' SELECT centroid AS placegeometry ';
- $sSQL .= ' FROM placex ';
- $sSQL .= " WHERE place_id = $iPlaceID ";
- $sSQL .= ' ) AS x';
- $sSQL .= ' ORDER BY ';
- $sSQL .= ' rank_address ASC, ';
- $sSQL .= ' rank_search ASC, ';
- $sSQL .= ' localname, ';
- $sSQL .= ' housenumber';
- $aHierarchyLines = $oDB->getAll($sSQL);
-}
-
-$aPlaceSearchNameKeywords = false;
-$aPlaceSearchAddressKeywords = false;
-if ($bIncludeKeywords) {
- $sSQL = "SELECT * FROM search_name WHERE place_id = $iPlaceID";
- $aPlaceSearchName = $oDB->getRow($sSQL);
-
- if (!empty($aPlaceSearchName)) {
- $sWordIds = substr($aPlaceSearchName['name_vector'], 1, -1);
- if (!empty($sWordIds)) {
- $sSQL = 'SELECT * FROM word WHERE word_id in ('.$sWordIds.')';
- $aPlaceSearchNameKeywords = $oDB->getAll($sSQL);
- }
-
- $sWordIds = substr($aPlaceSearchName['nameaddress_vector'], 1, -1);
- if (!empty($sWordIds)) {
- $sSQL = 'SELECT * FROM word WHERE word_id in ('.$sWordIds.')';
- $aPlaceSearchAddressKeywords = $oDB->getAll($sSQL);
- }
- }
-}
-
-logEnd($oDB, $hLog, 1);
-
-include(CONST_LibDir.'/template/details-'.$sOutputFormat.'.php');
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/log.php');
-require_once(CONST_LibDir.'/PlaceLookup.php');
-require_once(CONST_LibDir.'/output.php');
-ini_set('memory_limit', '200M');
-
-$oParams = new Nominatim\ParameterParser();
-
-// Format for output
-$sOutputFormat = $oParams->getSet('format', array('xml', 'json', 'jsonv2', 'geojson', 'geocodejson'), 'xml');
-set_exception_handler_by_format($sOutputFormat);
-
-// Preferred language
-$aLangPrefOrder = $oParams->getPreferredLanguages();
-
-$oDB = new Nominatim\DB(CONST_Database_DSN);
-$oDB->connect();
-
-$hLog = logStart($oDB, 'place', $_SERVER['QUERY_STRING'], $aLangPrefOrder);
-
-$aSearchResults = array();
-$aCleanedQueryParts = array();
-
-$oPlaceLookup = new Nominatim\PlaceLookup($oDB);
-$oPlaceLookup->loadParamArray($oParams);
-$oPlaceLookup->setIncludeAddressDetails($oParams->getBool('addressdetails', true));
-
-$aOsmIds = explode(',', $oParams->getString('osm_ids', ''));
-
-if (count($aOsmIds) > CONST_Places_Max_ID_count) {
- userError('Bulk User: Only ' . CONST_Places_Max_ID_count . ' ids are allowed in one request.');
-}
-
-foreach ($aOsmIds as $sItem) {
- // Skip empty sItem
- if (empty($sItem)) {
- continue;
- }
-
- $sType = $sItem[0];
- $iId = (int) substr($sItem, 1);
- if ($iId > 0 && ($sType == 'N' || $sType == 'W' || $sType == 'R')) {
- $aCleanedQueryParts[] = $sType . $iId;
- $oPlace = $oPlaceLookup->lookupOSMID($sType, $iId);
- if ($oPlace) {
- // we want to use the search-* output templates, so we need to fill
- // $aSearchResults and slightly change the (reverse search) oPlace
- // key names
- $oResult = $oPlace;
- unset($oResult['aAddress']);
- if (isset($oPlace['aAddress'])) {
- $oResult['address'] = $oPlace['aAddress'];
- }
- if ($sOutputFormat != 'geocodejson') {
- unset($oResult['langaddress']);
- $oResult['name'] = $oPlace['langaddress'];
- }
-
- $aOutlineResult = $oPlaceLookup->getOutlines(
- $oPlace['place_id'],
- $oPlace['lon'],
- $oPlace['lat'],
- Nominatim\ClassTypes\getDefRadius($oPlace)
- );
-
- if ($aOutlineResult) {
- $oResult = array_merge($oResult, $aOutlineResult);
- }
-
- $aSearchResults[] = $oResult;
- }
- }
-}
-
-
-if (CONST_Debug) {
- exit;
-}
-
-$sXmlRootTag = 'lookupresults';
-$sQuery = join(',', $aCleanedQueryParts);
-// we initialize these to avoid warnings in our logfile
-$sViewBox = '';
-$bShowPolygons = '';
-$aExcludePlaceIDs = array();
-$sMoreURL = '';
-
-logEnd($oDB, $hLog, 1);
-
-$sOutputTemplate = ($sOutputFormat == 'jsonv2') ? 'json' : $sOutputFormat;
-include(CONST_LibDir.'/template/search-'.$sOutputTemplate.'.php');
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/log.php');
-require_once(CONST_LibDir.'/output.php');
-ini_set('memory_limit', '200M');
-
-$oParams = new Nominatim\ParameterParser();
-$sOutputFormat = $oParams->getSet('format', array('json'), 'json');
-set_exception_handler_by_format($sOutputFormat);
-
-$iDays = $oParams->getInt('days', false);
-$bReduced = $oParams->getBool('reduced', false);
-$sClass = $oParams->getString('class', false);
-
-$oDB = new Nominatim\DB(CONST_Database_DSN);
-$oDB->connect();
-
-$iTotalBroken = (int) $oDB->getOne('SELECT count(*) FROM import_polygon_error');
-
-$aPolygons = array();
-while ($iTotalBroken && empty($aPolygons)) {
- $sSQL = 'SELECT osm_type, osm_id, class, type, name->\'name\' as "name",';
- $sSQL .= 'country_code, errormessage, updated';
- $sSQL .= ' FROM import_polygon_error';
-
- $aWhere = array();
- if ($iDays) {
- $aWhere[] = "updated > 'now'::timestamp - '".$iDays." day'::interval";
- $iDays++;
- }
-
- if ($bReduced) {
- $aWhere[] = "errormessage like 'Area reduced%'";
- }
- if ($sClass) {
- $sWhere[] = "class = '".pg_escape_string($sClass)."'";
- }
-
- if (!empty($aWhere)) {
- $sSQL .= ' WHERE '.join(' and ', $aWhere);
- }
-
- $sSQL .= ' ORDER BY updated desc LIMIT 1000';
- $aPolygons = $oDB->getAll($sSQL);
-}
-
-if (CONST_Debug) {
- var_dump($aPolygons);
- exit;
-}
-
-if ($sOutputFormat == 'json') {
- javascript_renderData($aPolygons);
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/ParameterParser.php');
-
-$oParams = new Nominatim\ParameterParser();
-
-// Format for output
-$sOutputFormat = $oParams->getSet('format', array('xml', 'json', 'jsonv2', 'geojson', 'geocodejson'), 'jsonv2');
-set_exception_handler_by_format($sOutputFormat);
-
-throw new Exception('Reverse-only import does not support forward searching.', 404);
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/log.php');
-require_once(CONST_LibDir.'/PlaceLookup.php');
-require_once(CONST_LibDir.'/ReverseGeocode.php');
-require_once(CONST_LibDir.'/output.php');
-ini_set('memory_limit', '200M');
-
-$oParams = new Nominatim\ParameterParser();
-
-// Format for output
-$sOutputFormat = $oParams->getSet('format', array('xml', 'json', 'jsonv2', 'geojson', 'geocodejson'), 'xml');
-set_exception_handler_by_format($sOutputFormat);
-
-// Preferred language
-$aLangPrefOrder = $oParams->getPreferredLanguages();
-
-$oDB = new Nominatim\DB(CONST_Database_DSN);
-$oDB->connect();
-
-$hLog = logStart($oDB, 'reverse', $_SERVER['QUERY_STRING'], $aLangPrefOrder);
-
-$oPlaceLookup = new Nominatim\PlaceLookup($oDB);
-$oPlaceLookup->loadParamArray($oParams);
-$oPlaceLookup->setIncludeAddressDetails($oParams->getBool('addressdetails', true));
-
-$sOsmType = $oParams->getSet('osm_type', array('N', 'W', 'R'));
-$iOsmId = $oParams->getInt('osm_id', -1);
-$fLat = $oParams->getFloat('lat');
-$fLon = $oParams->getFloat('lon');
-$iZoom = $oParams->getInt('zoom', 18);
-
-if ($sOsmType && $iOsmId > 0) {
- $aPlace = $oPlaceLookup->lookupOSMID($sOsmType, $iOsmId);
-} elseif ($fLat !== false && $fLon !== false) {
- $oReverseGeocode = new Nominatim\ReverseGeocode($oDB);
- $oReverseGeocode->setZoom($iZoom);
-
- $oLookup = $oReverseGeocode->lookup($fLat, $fLon);
-
- if ($oLookup) {
- $aPlaces = $oPlaceLookup->lookup(array($oLookup->iId => $oLookup));
- if (!empty($aPlaces)) {
- $aPlace = reset($aPlaces);
- }
- }
-} else {
- userError('Need coordinates or OSM object to lookup.');
-}
-
-if (isset($aPlace)) {
- $aOutlineResult = $oPlaceLookup->getOutlines(
- $aPlace['place_id'],
- $aPlace['lon'],
- $aPlace['lat'],
- Nominatim\ClassTypes\getDefRadius($aPlace),
- $fLat,
- $fLon
- );
-
- if ($aOutlineResult) {
- $aPlace = array_merge($aPlace, $aOutlineResult);
- }
-} else {
- $aPlace = array();
-}
-
-logEnd($oDB, $hLog, count($aPlace) ? 1 : 0);
-
-if (CONST_Debug) {
- var_dump($aPlace);
- exit;
-}
-
-if ($sOutputFormat == 'geocodejson') {
- $sQuery = $fLat.','.$fLon;
- if (isset($aPlace['place_id'])) {
- $fDistance = $oDB->getOne(
- 'SELECT ST_Distance(ST_SetSRID(ST_Point(:lon,:lat),4326), centroid) FROM placex where place_id = :placeid',
- array(':lon' => $fLon, ':lat' => $fLat, ':placeid' => $aPlace['place_id'])
- );
- }
-}
-
-$sOutputTemplate = ($sOutputFormat == 'jsonv2') ? 'json' : $sOutputFormat;
-include(CONST_LibDir.'/template/address-'.$sOutputTemplate.'.php');
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/log.php');
-require_once(CONST_LibDir.'/Geocode.php');
-require_once(CONST_LibDir.'/output.php');
-ini_set('memory_limit', '200M');
-
-$oDB = new Nominatim\DB(CONST_Database_DSN);
-$oDB->connect();
-$oParams = new Nominatim\ParameterParser();
-
-$oGeocode = new Nominatim\Geocode($oDB);
-
-$aLangPrefOrder = $oParams->getPreferredLanguages();
-$oGeocode->setLanguagePreference($aLangPrefOrder);
-
-// Format for output
-$sOutputFormat = $oParams->getSet('format', array('xml', 'json', 'jsonv2', 'geojson', 'geocodejson'), 'jsonv2');
-set_exception_handler_by_format($sOutputFormat);
-
-$oGeocode->loadParamArray($oParams, null);
-
-if (CONST_Search_BatchMode && isset($_GET['batch'])) {
- $aBatch = json_decode($_GET['batch'], true);
- $aBatchResults = array();
- foreach ($aBatch as $aBatchParams) {
- $oBatchGeocode = clone $oGeocode;
- $oBatchParams = new Nominatim\ParameterParser($aBatchParams);
- $oBatchGeocode->loadParamArray($oBatchParams);
- $oBatchGeocode->setQueryFromParams($oBatchParams);
- $aSearchResults = $oBatchGeocode->lookup();
- $aBatchResults[] = $aSearchResults;
- }
- include(CONST_LibDir.'/template/search-batch-json.php');
- exit;
-}
-
-$oGeocode->setQueryFromParams($oParams);
-
-if (!$oGeocode->getQueryString()
- && isset($_SERVER['PATH_INFO'])
- && strlen($_SERVER['PATH_INFO']) > 0
- && $_SERVER['PATH_INFO'][0] == '/'
-) {
- $sQuery = substr(rawurldecode($_SERVER['PATH_INFO']), 1);
-
- // reverse order of '/' separated string
- $aPhrases = explode('/', $sQuery);
- $aPhrases = array_reverse($aPhrases);
- $sQuery = join(', ', $aPhrases);
- $oGeocode->setQuery($sQuery);
-}
-
-$hLog = logStart($oDB, 'search', $oGeocode->getQueryString(), $aLangPrefOrder);
-
-$aSearchResults = $oGeocode->lookup();
-
-logEnd($oDB, $hLog, count($aSearchResults));
-
-$sQuery = $oGeocode->getQueryString();
-
-$aMoreParams = $oGeocode->getMoreUrlParams();
-$aMoreParams['format'] = $sOutputFormat;
-if (isset($_SERVER['HTTP_ACCEPT_LANGUAGE'])) {
- $aMoreParams['accept-language'] = $_SERVER['HTTP_ACCEPT_LANGUAGE'];
-}
-
-if (isset($_SERVER['REQUEST_SCHEME'])
- && isset($_SERVER['HTTP_HOST'])
- && isset($_SERVER['DOCUMENT_URI'])
-) {
- $sMoreURL = $_SERVER['REQUEST_SCHEME'].'://'
- .$_SERVER['HTTP_HOST'].$_SERVER['DOCUMENT_URI'].'/?'
- .http_build_query($aMoreParams);
-} else {
- $sMoreURL = '/search.php?'.http_build_query($aMoreParams);
-}
-
-if (CONST_Debug) {
- exit;
-}
-
-$sOutputTemplate = ($sOutputFormat == 'jsonv2') ? 'json' : $sOutputFormat;
-include(CONST_LibDir.'/template/search-'.$sOutputTemplate.'.php');
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/ParameterParser.php');
-require_once(CONST_LibDir.'/Status.php');
-
-$oParams = new Nominatim\ParameterParser();
-$sOutputFormat = $oParams->getSet('format', array('text', 'json'), 'text');
-
-$oDB = new Nominatim\DB(CONST_Database_DSN);
-
-if ($sOutputFormat == 'json') {
- header('content-type: application/json; charset=UTF-8');
-}
-
-
-try {
- $oStatus = new Nominatim\Status($oDB);
- $oStatus->status();
-
- if ($sOutputFormat == 'json') {
- $epoch = $oStatus->dataDate();
- $aResponse = array(
- 'status' => 0,
- 'message' => 'OK',
- 'data_updated' => (new DateTime('@'.$epoch))->format(DateTime::RFC3339),
- 'software_version' => CONST_NominatimVersion
- );
- $sDatabaseVersion = $oStatus->databaseVersion();
- if ($sDatabaseVersion) {
- $aResponse['database_version'] = $sDatabaseVersion;
- }
- javascript_renderData($aResponse);
- } else {
- echo 'OK';
- }
-} catch (Exception $oErr) {
- if ($sOutputFormat == 'json') {
- $aResponse = array(
- 'status' => $oErr->getCode(),
- 'message' => $oErr->getMessage()
- );
- javascript_renderData($aResponse);
- } else {
- header('HTTP/1.0 500 Internal Server Error');
- echo 'ERROR: '.$oErr->getMessage();
- }
-}
-- --- Return the record for the base entry.
+ current_rank_address := 1000;
FOR location IN
SELECT placex.place_id, osm_type, osm_id, name,
coalesce(extratags->'linked_place', extratags->'place') as place_type,
-- If the place had a postcode assigned, take this one only
-- into consideration when it is an area and the place does not have
-- a postcode itself.
- IF location.fromarea AND location.isaddress
+ IF location.fromarea AND location_isaddress
AND (place.address is null or not place.address ? 'postcode')
THEN
place.postcode := null; -- remove the less exact postcode
wikipedia TEXT
);
+{% if 'wikimedia_importance' in db.tables %}
+
+CREATE OR REPLACE FUNCTION get_wikipedia_match(extratags HSTORE, country_code varchar(2))
+ RETURNS wikipedia_article_match
+ AS $$
+DECLARE
+ i INT;
+ wiki_article_title TEXT;
+ wiki_article_language TEXT;
+ result wikipedia_article_match;
+ entry RECORD;
+BEGIN
+ IF extratags ? 'wikipedia' and strpos(extratags->'wikipedia', ':') IN (3,4) THEN
+ wiki_article_language := lower(trim(split_part(extratags->'wikipedia', ':', 1)));
+ wiki_article_title := trim(substr(extratags->'wikipedia',
+ strpos(extratags->'wikipedia', ':') + 1));
+
+ FOR result IN
+ SELECT language, title, importance FROM wikimedia_importance
+ WHERE language = wiki_article_language
+ and title = replace(wiki_article_title, ' ', '_')
+ LOOP
+ RETURN result;
+ END LOOP;
+ END IF;
+
+ FOREACH wiki_article_language IN ARRAY ARRAY['ar','bg','ca','cs','da','de','en','es','eo','eu','fa','fr','ko','hi','hr','id','it','he','lt','hu','ms','nl','ja','no','pl','pt','kk','ro','ru','sk','sl','sr','fi','sv','tr','uk','vi','vo','war','zh']
+ LOOP
+ IF extratags ? ('wikipedia:' || wiki_article_language) THEN
+ wiki_article_title := extratags->('wikipedia:' || wiki_article_language);
+
+ FOR result IN
+ SELECT language, title, importance FROM wikimedia_importance
+ WHERE language = wiki_article_language
+ and title = replace(wiki_article_title, ' ', '_')
+ LOOP
+ RETURN result;
+ END LOOP;
+ END IF;
+
+ END LOOP;
+
+ RETURN NULL;
+END;
+$$
+LANGUAGE plpgsql IMMUTABLE;
+
+{% else %}
-- See: http://stackoverflow.com/questions/6410088/how-can-i-mimic-the-php-urldecode-function-in-postgresql
CREATE OR REPLACE FUNCTION decode_url_part(p varchar)
WHILE langs[i] IS NOT NULL LOOP
wiki_article := extratags->(case when langs[i] in ('english','country') THEN 'wikipedia' ELSE 'wikipedia:'||langs[i] END);
IF wiki_article is not null THEN
- wiki_article := regexp_replace(wiki_article,E'^(.*?)([a-z]{2,3}).wikipedia.org/wiki/',E'\\2:');
- wiki_article := regexp_replace(wiki_article,E'^(.*?)([a-z]{2,3}).wikipedia.org/w/index.php\\?title=',E'\\2:');
- wiki_article := regexp_replace(wiki_article,E'^(.*?)/([a-z]{2,3})/wiki/',E'\\2:');
- --wiki_article := regexp_replace(wiki_article,E'^(.*?)([a-z]{2,3})[=:]',E'\\2:');
wiki_article := replace(wiki_article,' ','_');
IF strpos(wiki_article, ':') IN (3,4) THEN
wiki_article_language := lower(trim(split_part(wiki_article, ':', 1)));
$$
LANGUAGE plpgsql STABLE;
+{% endif %}
CREATE OR REPLACE FUNCTION compute_importance(extratags HSTORE,
country_code varchar(2),
-- Nothing? Then try with the wikidata tag.
IF result.importance is null AND extratags ? 'wikidata' THEN
- FOR match IN SELECT * FROM wikipedia_article
- WHERE wd_page_title = extratags->'wikidata'
- ORDER BY language = 'en' DESC, langcount DESC LIMIT 1
+ FOR match IN
+{% if 'wikimedia_importance' in db.tables %}
+ SELECT * FROM wikimedia_importance
+ WHERE wikidata = extratags->'wikidata'
+ LIMIT 1
+{% else %}
+ SELECT * FROM wikipedia_article
+ WHERE wd_page_title = extratags->'wikidata'
+ ORDER BY language = 'en' DESC, langcount DESC LIMIT 1
+{% endif %}
LOOP
result.importance := match.importance;
result.wikipedia := match.language || ':' || match.title;
-- Still nothing? Fall back to a default.
IF result.importance is null THEN
- result.importance := 0.75001 - (rank_search::float / 40);
+ result.importance := 0.40001 - (rank_search::float / 75);
END IF;
{% if 'secondary_importance' in db.tables %}
END LOOP;
END IF;
- IF parent_place_id is null THEN
- RETURN 0;
- END IF;
-
RETURN parent_place_id;
END;
$$
DECLARE
existing BIGINT[];
BEGIN
- -- Get the existing entry from the interpolation table.
- SELECT array_agg(place_id) INTO existing
- FROM location_property_osmline WHERE osm_id = way_id;
-
- IF existing IS NULL or array_length(existing, 1) = 0 THEN
- INSERT INTO location_property_osmline (osm_id, address, linegeo)
- VALUES (way_id, addr, geom);
+ IF addr is NULL OR NOT addr ? 'interpolation'
+ OR NOT (addr->'interpolation' in ('odd', 'even', 'all')
+ or addr->'interpolation' similar to '[1-9]')
+ THEN
+ -- the new interpolation is illegal, simply remove existing entries
+ DELETE FROM location_property_osmline WHERE osm_id = way_id;
ELSE
- -- Update the interpolation table:
- -- The first entry gets the original data, all other entries
- -- are removed and will be recreated on indexing.
- -- (An interpolation can be split up, if it has more than 2 address nodes)
- UPDATE location_property_osmline
- SET address = addr,
- linegeo = geom,
- startnumber = null,
- indexed_status = 1
- WHERE place_id = existing[1];
- IF array_length(existing, 1) > 1 THEN
- DELETE FROM location_property_osmline
- WHERE place_id = any(existing[2:]);
+ -- Get the existing entry from the interpolation table.
+ SELECT array_agg(place_id) INTO existing
+ FROM location_property_osmline WHERE osm_id = way_id;
+
+ IF existing IS NULL or array_length(existing, 1) = 0 THEN
+ INSERT INTO location_property_osmline (osm_id, address, linegeo)
+ VALUES (way_id, addr, geom);
+ ELSE
+ -- Update the interpolation table:
+ -- The first entry gets the original data, all other entries
+ -- are removed and will be recreated on indexing.
+ -- (An interpolation can be split up, if it has more than 2 address nodes)
+ UPDATE location_property_osmline
+ SET address = addr,
+ linegeo = geom,
+ startnumber = null,
+ indexed_status = 1
+ WHERE place_id = existing[1];
+ IF array_length(existing, 1) > 1 THEN
+ DELETE FROM location_property_osmline
+ WHERE place_id = any(existing[2:]);
+ END IF;
END IF;
END IF;
CREATE OR REPLACE FUNCTION osmline_insert()
RETURNS TRIGGER
AS $$
+DECLARE
+ centroid GEOMETRY;
BEGIN
NEW.place_id := nextval('seq_place');
NEW.indexed_date := now();
END IF;
NEW.indexed_status := 1; --STATUS_NEW
- NEW.country_code := lower(get_country_code(NEW.linegeo));
+ centroid := get_center_point(NEW.linegeo);
+ NEW.country_code := lower(get_country_code(centroid));
NEW.partition := get_partition(NEW.country_code);
- NEW.geometry_sector := geometry_sector(NEW.partition, NEW.linegeo);
+ NEW.geometry_sector := geometry_sector(NEW.partition, centroid);
END IF;
RETURN NEW;
newend INTEGER;
moddiff SMALLINT;
linegeo GEOMETRY;
- splitline GEOMETRY;
+ splitpoint FLOAT;
sectiongeo GEOMETRY;
postcode TEXT;
stepmod SMALLINT;
END IF;
NEW.parent_place_id := get_interpolation_parent(NEW.token_info, NEW.partition,
- ST_PointOnSurface(NEW.linegeo),
- NEW.linegeo);
+ get_center_point(NEW.linegeo),
+ NEW.linegeo);
+
+ -- Cannot find a parent street. We will not be able to display a reliable
+ -- address, so drop entire interpolation.
+ IF NEW.parent_place_id is NULL THEN
+ DELETE FROM location_property_osmline where place_id = OLD.place_id;
+ RETURN NULL;
+ END IF;
NEW.token_info := token_strip_info(NEW.token_info);
IF NEW.address ? '_inherited' THEN
-- formatted postcode and therefore 'postcode' contains a derived
-- variant.
CASE WHEN address ? 'postcode' THEN placex.postcode ELSE NULL::text END as postcode,
- substring(address->'housenumber','[0-9]+')::integer as hnr
+ (address->'housenumber')::integer as hnr
FROM placex, generate_series(1, array_upper(waynodes, 1)) nodeidpos
WHERE osm_type = 'N' and osm_id = waynodes[nodeidpos]::BIGINT
and address is not NULL and address ? 'housenumber'
+ and address->'housenumber' ~ '^[0-9]{1,6}$'
+ and ST_Distance(NEW.linegeo, geometry) < 0.0005
ORDER BY nodeidpos
LOOP
{% if debug %}RAISE WARNING 'processing point % (%)', nextnode.hnr, ST_AsText(nextnode.geometry);{% endif %}
IF linegeo is null THEN
linegeo := NEW.linegeo;
ELSE
- splitline := ST_Split(ST_Snap(linegeo, nextnode.geometry, 0.0005), nextnode.geometry);
- sectiongeo := ST_GeometryN(splitline, 1);
- linegeo := ST_GeometryN(splitline, 2);
+ splitpoint := ST_LineLocatePoint(linegeo, nextnode.geometry);
+ IF splitpoint = 0 THEN
+ -- Corner case where the splitpoint falls on the first point
+ -- and thus would not return a geometry. Skip that section.
+ sectiongeo := NULL;
+ ELSEIF splitpoint = 1 THEN
+ -- Point is at the end of the line.
+ sectiongeo := linegeo;
+ linegeo := NULL;
+ ELSE
+ -- Split the line.
+ sectiongeo := ST_LineSubstring(linegeo, 0, splitpoint);
+ linegeo := ST_LineSubstring(linegeo, splitpoint, 1);
+ END IF;
END IF;
IF prevnode.hnr is not null
-- regularly mapped housenumbers.
-- (Conveniently also fails if one of the house numbers is not a number.)
and abs(prevnode.hnr - nextnode.hnr) > NEW.step
+ -- If the interpolation geometry is broken or two nodes are at the
+ -- same place, then splitting might produce a point. Ignore that.
+ and ST_GeometryType(sectiongeo) = 'ST_LineString'
THEN
IF prevnode.hnr < nextnode.hnr THEN
startnumber := prevnode.hnr;
NEW.address, postcode,
NEW.country_code, NEW.geometry_sector, 0);
END IF;
+ END IF;
- -- early break if we are out of line string,
- -- might happen when a line string loops back on itself
- IF ST_GeometryType(linegeo) != 'ST_LineString' THEN
- RETURN NEW;
- END IF;
+ -- early break if we are out of line string,
+ -- might happen when a line string loops back on itself
+ IF linegeo is null or ST_GeometryType(linegeo) != 'ST_LineString' THEN
+ RETURN NEW;
END IF;
prevnode := nextnode;
centroid GEOMETRY
);
--- feature intersects geometry
--- for areas and linestrings they must touch at least along a line
-CREATE OR REPLACE FUNCTION is_relevant_geometry(de9im TEXT, geom_type TEXT)
-RETURNS BOOLEAN
-AS $$
-BEGIN
- IF substring(de9im from 1 for 2) != 'FF' THEN
- RETURN TRUE;
- END IF;
-
- IF geom_type = 'ST_Point' THEN
- RETURN substring(de9im from 4 for 1) = '0';
- END IF;
-
- IF geom_type in ('ST_LineString', 'ST_MultiLineString') THEN
- RETURN substring(de9im from 4 for 1) = '1';
- END IF;
-
- RETURN substring(de9im from 4 for 1) = '2';
-END
-$$ LANGUAGE plpgsql IMMUTABLE;
-
CREATE OR REPLACE function getNearFeatures(in_partition INTEGER, feature GEOMETRY,
feature_centroid GEOMETRY,
maxrank INTEGER)
isguess, postcode, centroid
FROM location_area_large_{{ partition }}
WHERE geometry && feature
- AND is_relevant_geometry(ST_Relate(geometry, feature), ST_GeometryType(feature))
+ AND CASE WHEN ST_Dimension(feature) = 0
+ THEN _ST_Covers(geometry, feature)
+ WHEN ST_Dimension(feature) = 2
+ THEN ST_Relate(geometry, feature, 'T********')
+ ELSE ST_NPoints(ST_Intersection(geometry, feature)) > 1
+ END
AND rank_address < maxrank
-- Postcodes currently still use rank_search to define for which
-- features they are relevant.
IF in_rank_search <= 4 and not in_estimate THEN
INSERT INTO location_area_country (place_id, country_code, geometry)
- values (in_place_id, in_country_code, in_geometry);
+ (SELECT in_place_id, in_country_code, geom
+ FROM split_geometry(in_geometry) as geom);
RETURN TRUE;
END IF;
{% for partition in db.partitions %}
IF in_partition = {{ partition }} THEN
INSERT INTO location_area_large_{{ partition }} (partition, place_id, country_code, keywords, rank_search, rank_address, isguess, postcode, centroid, geometry)
- values (in_partition, in_place_id, in_country_code, in_keywords, in_rank_search, in_rank_address, in_estimate, postcode, in_centroid, in_geometry);
+ (SELECT in_partition, in_place_id, in_country_code, in_keywords, in_rank_search, in_rank_address, in_estimate, postcode, in_centroid, geom
+ FROM split_geometry(in_geometry) as geom);
RETURN TRUE;
END IF;
{% endfor %}
RETURN null;
END IF;
+ -- Remove the place from the list of places to be deleted
+ DELETE FROM place_to_be_deleted pdel
+ WHERE pdel.osm_type = NEW.osm_type and pdel.osm_id = NEW.osm_id
+ and pdel.class = NEW.class and pdel.type = NEW.type;
+
-- Have we already done this place?
SELECT * INTO existing
FROM place
{% if debug %}RAISE WARNING 'Existing: %',existing.osm_id;{% endif %}
- -- Handle a place changing type by removing the old data.
- -- (This trigger is executed BEFORE INSERT of the NEW tuple.)
IF existing.osm_type IS NULL THEN
DELETE FROM place where osm_type = NEW.osm_type and osm_id = NEW.osm_id and class = NEW.class;
END IF;
END IF;
{% endif %}
- IF existing.osm_type IS NOT NULL THEN
- -- Pathological case caused by the triggerless copy into place during initial import
- -- force delete even for large areas, it will be reinserted later
- UPDATE place SET geometry = ST_SetSRID(ST_Point(0,0), 4326)
- WHERE osm_type = NEW.osm_type and osm_id = NEW.osm_id
- and class = NEW.class and type = NEW.type;
- DELETE FROM place
- WHERE osm_type = NEW.osm_type and osm_id = NEW.osm_id
- and class = NEW.class and type = NEW.type;
+ IF existingplacex.osm_type is not NULL THEN
+ -- Mark any existing place for delete in the placex table
+ UPDATE placex SET indexed_status = 100
+ WHERE placex.osm_type = NEW.osm_type and placex.osm_id = NEW.osm_id
+ and placex.class = NEW.class and placex.type = NEW.type;
END IF;
-- Process it as a new insertion
{% if debug %}RAISE WARNING 'insert done % % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type,NEW.name;{% endif %}
+ IF existing.osm_type is not NULL THEN
+ -- If there is already an entry in place, just update that, if necessary.
+ IF coalesce(existing.name, ''::hstore) != coalesce(NEW.name, ''::hstore)
+ or coalesce(existing.address, ''::hstore) != coalesce(NEW.address, ''::hstore)
+ or coalesce(existing.extratags, ''::hstore) != coalesce(NEW.extratags, ''::hstore)
+ or coalesce(existing.admin_level, 15) != coalesce(NEW.admin_level, 15)
+ or existing.geometry::text != NEW.geometry::text
+ THEN
+ UPDATE place
+ SET name = NEW.name,
+ address = NEW.address,
+ extratags = NEW.extratags,
+ admin_level = NEW.admin_level,
+ geometry = NEW.geometry
+ WHERE osm_type = NEW.osm_type and osm_id = NEW.osm_id
+ and class = NEW.class and type = NEW.type;
+ END IF;
+
+ RETURN NULL;
+ END IF;
+
RETURN NEW;
END IF;
extratags = NEW.extratags,
admin_level = NEW.admin_level,
indexed_status = 2,
- geometry = NEW.geometry
+ geometry = CASE WHEN existingplacex.rank_address = 0
+ THEN simplify_large_polygons(NEW.geometry)
+ ELSE NEW.geometry END
WHERE place_id = existingplacex.place_id;
-- Invalidate linked places: they potentially get a new name and addresses.
END;
$$ LANGUAGE plpgsql;
-
CREATE OR REPLACE FUNCTION place_delete()
RETURNS TRIGGER
AS $$
DECLARE
- has_rank BOOLEAN;
+ deferred BOOLEAN;
BEGIN
-
- {% if debug %}RAISE WARNING 'delete: % % % %',OLD.osm_type,OLD.osm_id,OLD.class,OLD.type;{% endif %}
-
- -- deleting large polygons can have a massive effect on the system - require manual intervention to let them through
- IF st_area(OLD.geometry) > 2 and st_isvalid(OLD.geometry) THEN
- SELECT bool_or(not (rank_address = 0 or rank_address > 25)) as ranked FROM placex WHERE osm_type = OLD.osm_type and osm_id = OLD.osm_id and class = OLD.class and type = OLD.type INTO has_rank;
- IF has_rank THEN
- insert into import_polygon_delete (osm_type, osm_id, class, type) values (OLD.osm_type,OLD.osm_id,OLD.class,OLD.type);
- RETURN NULL;
- END IF;
+ {% if debug %}RAISE WARNING 'Delete for % % %/%', OLD.osm_type, OLD.osm_id, OLD.class, OLD.type;{% endif %}
+
+ deferred := ST_IsValid(OLD.geometry) and ST_Area(OLD.geometry) > 2;
+ IF deferred THEN
+ SELECT bool_or(not (rank_address = 0 or rank_address > 25)) INTO deferred
+ FROM placex
+ WHERE osm_type = OLD.osm_type and osm_id = OLD.osm_id
+ and class = OLD.class and type = OLD.type;
END IF;
- -- mark for delete
- UPDATE placex set indexed_status = 100 where osm_type = OLD.osm_type and osm_id = OLD.osm_id and class = OLD.class and type = OLD.type;
-
- -- interpolations are special
- IF OLD.osm_type='W' and OLD.class = 'place' and OLD.type = 'houses' THEN
- UPDATE location_property_osmline set indexed_status = 100 where osm_id = OLD.osm_id; -- osm_id = wayid (=old.osm_id)
- END IF;
+ INSERT INTO place_to_be_deleted (osm_type, osm_id, class, type, deferred)
+ VALUES(OLD.osm_type, OLD.osm_id, OLD.class, OLD.type, deferred);
- RETURN OLD;
+ RETURN NULL;
END;
-$$
-LANGUAGE plpgsql;
-
+$$ LANGUAGE plpgsql;
--
-- This file is part of Nominatim. (https://nominatim.org)
--
--- Copyright (C) 2022 by the Nominatim developer community.
+-- Copyright (C) 2024 by the Nominatim developer community.
-- For a full list of authors see the git log.
-- Trigger functions for the placex table.
-- Add all names from the place nodes that deviate from the name
-- in the relation with the prefix '_place_'. Deviation means that
-- either the value is different or a given key is missing completely
- SELECT hstore(array_agg('_place_' || key), array_agg(value)) INTO extra_names
- FROM each(location.name - result.name);
- {% if debug %}RAISE WARNING 'Extra names: %', extra_names;{% endif %}
+ IF result.name is null THEN
+ SELECT hstore(array_agg('_place_' || key), array_agg(value))
+ INTO result.name
+ FROM each(location.name);
+ ELSE
+ SELECT hstore(array_agg('_place_' || key), array_agg(value)) INTO extra_names
+ FROM each(location.name - result.name);
+ {% if debug %}RAISE WARNING 'Extra names: %', extra_names;{% endif %}
- IF extra_names is not null THEN
- result.name := result.name || extra_names;
+ IF extra_names is not null THEN
+ result.name := result.name || extra_names;
+ END IF;
END IF;
{% if debug %}RAISE WARNING 'Final names: %', result.name;{% endif %}
CREATE OR REPLACE FUNCTION find_associated_street(poi_osm_type CHAR(1),
- poi_osm_id BIGINT)
+ poi_osm_id BIGINT,
+ bbox GEOMETRY)
RETURNS BIGINT
AS $$
DECLARE
location RECORD;
+ member JSONB;
parent RECORD;
+ result BIGINT;
+ distance FLOAT;
+ new_distance FLOAT;
+ waygeom GEOMETRY;
BEGIN
+{% if db.middle_db_format == '1' %}
FOR location IN
SELECT members FROM planet_osm_rels
WHERE parts @> ARRAY[poi_osm_id]
FOR i IN 1..array_upper(location.members, 1) BY 2 LOOP
IF location.members[i+1] = 'street' THEN
FOR parent IN
- SELECT place_id from placex
+ SELECT place_id, geometry
+ FROM placex
WHERE osm_type = upper(substring(location.members[i], 1, 1))::char(1)
and osm_id = substring(location.members[i], 2)::bigint
and name is not null
and rank_search between 26 and 27
LOOP
- RETURN parent.place_id;
+ -- Find the closest 'street' member.
+ -- Avoid distance computation for the frequent case where there is
+ -- only one street member.
+ IF waygeom is null THEN
+ result := parent.place_id;
+ waygeom := parent.geometry;
+ ELSE
+ distance := coalesce(distance, ST_Distance(waygeom, bbox));
+ new_distance := ST_Distance(parent.geometry, bbox);
+ IF new_distance < distance THEN
+ distance := new_distance;
+ result := parent.place_id;
+ waygeom := parent.geometry;
+ END IF;
+ END IF;
END LOOP;
END IF;
END LOOP;
END LOOP;
- RETURN NULL;
+{% else %}
+ FOR member IN
+ SELECT value FROM planet_osm_rels r, LATERAL jsonb_array_elements(members)
+ WHERE planet_osm_member_ids(members, poi_osm_type::char(1)) && ARRAY[poi_osm_id]
+ and tags->>'type' = 'associatedStreet'
+ and value->>'role' = 'street'
+ LOOP
+ FOR parent IN
+ SELECT place_id, geometry
+ FROM placex
+ WHERE osm_type = (member->>'type')::char(1)
+ and osm_id = (member->>'ref')::bigint
+ and name is not null
+ and rank_search between 26 and 27
+ LOOP
+ -- Find the closest 'street' member.
+ -- Avoid distance computation for the frequent case where there is
+ -- only one street member.
+ IF waygeom is null THEN
+ result := parent.place_id;
+ waygeom := parent.geometry;
+ ELSE
+ distance := coalesce(distance, ST_Distance(waygeom, bbox));
+ new_distance := ST_Distance(parent.geometry, bbox);
+ IF new_distance < distance THEN
+ distance := new_distance;
+ result := parent.place_id;
+ waygeom := parent.geometry;
+ END IF;
+ END IF;
+ END LOOP;
+ END LOOP;
+{% endif %}
+
+ RETURN result;
END;
$$
LANGUAGE plpgsql STABLE;
{% if debug %}RAISE WARNING 'finding street for % %', poi_osm_type, poi_osm_id;{% endif %}
-- Is this object part of an associatedStreet relation?
- parent_place_id := find_associated_street(poi_osm_type, poi_osm_id);
+ parent_place_id := find_associated_street(poi_osm_type, poi_osm_id, bbox);
IF parent_place_id is null THEN
parent_place_id := find_parent_for_address(token_info, poi_partition, bbox);
RETURN location.place_id;
END IF;
- parent_place_id := find_associated_street('W', location.osm_id);
+ parent_place_id := find_associated_street('W', location.osm_id, bbox);
END LOOP;
END IF;
RETURNS placex
AS $$
DECLARE
+{% if db.middle_db_format == '1' %}
relation_members TEXT[];
+{% else %}
+ relation_members JSONB;
+{% endif %}
rel_member RECORD;
linked_placex placex%ROWTYPE;
bnd_name TEXT;
name_vector := array_merge(name_vector, hnr_vector);
END IF;
- IF is_place_addr THEN
- addr_place_ids := token_addr_place_search_tokens(token_info);
- IF not addr_place_ids <@ parent_name_vector THEN
- -- make sure addr:place terms are always searchable
- nameaddress_vector := array_merge(nameaddress_vector, addr_place_ids);
- -- If there is a housenumber, also add the place name as a name,
- -- so we can search it by the usual housenumber+place algorithms.
- IF hnr_vector is not null THEN
- name_vector := array_merge(name_vector, addr_place_ids);
- END IF;
- END IF;
- END IF;
-
-- Cheating here by not recomputing all terms but simply using the ones
-- from the parent object.
nameaddress_vector := array_merge(nameaddress_vector, parent_name_vector);
nameaddress_vector := array_merge(nameaddress_vector, parent_address_vector);
+ -- make sure addr:place terms are always searchable
+ IF is_place_addr THEN
+ addr_place_ids := token_addr_place_search_tokens(token_info);
+ IF hnr_vector is not null AND not addr_place_ids <@ parent_name_vector
+ THEN
+ name_vector := array_merge(name_vector, hnr_vector);
+ END IF;
+ nameaddress_vector := array_merge(nameaddress_vector, addr_place_ids);
+ END IF;
END;
$$
LANGUAGE plpgsql;
NEW.place_id := nextval('seq_place');
NEW.indexed_status := 1; --STATUS_NEW
- NEW.centroid := ST_PointOnSurface(NEW.geometry);
+ NEW.centroid := get_center_point(NEW.geometry);
NEW.country_code := lower(get_country_code(NEW.centroid));
NEW.partition := get_partition(NEW.country_code);
NEW.country_code := NULL;
END IF;
+ -- Simplify polygons with a very large memory footprint when they
+ -- do not take part in address computation.
+ IF NEW.rank_address = 0 THEN
+ NEW.geometry := simplify_large_polygons(NEW.geometry);
+ END IF;
+
END IF;
{% if debug %}RAISE WARNING 'placex_insert:END: % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type;{% endif %}
{% if not disable_diff_updates %}
-- The following is not needed until doing diff updates, and slows the main index process down
- IF NEW.rank_address > 0 THEN
+ IF NEW.rank_address between 2 and 27 THEN
IF (ST_GeometryType(NEW.geometry) in ('ST_Polygon','ST_MultiPolygon') AND ST_IsValid(NEW.geometry)) THEN
-- Performance: We just can't handle re-indexing for country level changes
- IF st_area(NEW.geometry) < 1 THEN
+ IF (NEW.rank_address < 26 and st_area(NEW.geometry) < 1)
+ OR (NEW.rank_address >= 26 and st_area(NEW.geometry) < 0.01)
+ THEN
-- mark items within the geometry for re-indexing
-- RAISE WARNING 'placex poly insert: % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type;
or name is not null
or (NEW.rank_address >= 16 and address ? 'place'));
END IF;
- ELSE
+ ELSEIF ST_GeometryType(NEW.geometry) not in ('ST_LineString', 'ST_MultiLineString')
+ OR ST_Length(NEW.geometry) < 0.5
+ THEN
-- mark nearby items for re-indexing, where 'nearby' depends on the features rank_search and is a complete guess :(
- diameter := update_place_diameter(NEW.rank_search);
+ diameter := update_place_diameter(NEW.rank_address);
IF diameter > 0 THEN
-- RAISE WARNING 'placex point insert: % % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type,diameter;
IF NEW.rank_search >= 26 THEN
DECLARE
i INTEGER;
location RECORD;
+{% if db.middle_db_format == '1' %}
relation_members TEXT[];
+{% else %}
+ relation_member JSONB;
+{% endif %}
geom GEOMETRY;
parent_address_level SMALLINT;
result := deleteLocationArea(NEW.partition, NEW.place_id, NEW.rank_search);
NEW.extratags := NEW.extratags - 'linked_place'::TEXT;
+ IF NEW.extratags = ''::hstore THEN
+ NEW.extratags := NULL;
+ END IF;
-- NEW.linked_place_id contains the precomputed linkee. Save this and restore
-- the previous link status.
END IF;
-- Compute a preliminary centroid.
- NEW.centroid := ST_PointOnSurface(NEW.geometry);
+ NEW.centroid := get_center_point(NEW.geometry);
-- recalculate country and partition
IF NEW.rank_search = 4 AND NEW.address is not NULL AND NEW.address ? 'country' THEN
-- waterway ways are linked when they are part of a relation and have the same class/type
IF NEW.osm_type = 'R' and NEW.class = 'waterway' THEN
+{% if db.middle_db_format == '1' %}
FOR relation_members IN select members from planet_osm_rels r where r.id = NEW.osm_id and r.parts != array[]::bigint[]
LOOP
FOR i IN 1..array_upper(relation_members, 1) BY 2 LOOP
END IF;
END LOOP;
END LOOP;
+{% else %}
+ FOR relation_member IN
+ SELECT value FROM planet_osm_rels r, LATERAL jsonb_array_elements(r.members)
+ WHERE r.id = NEW.osm_id
+ LOOP
+ IF relation_member->>'role' IN ('', 'main_stream', 'side_stream')
+ and relation_member->>'type' = 'W'
+ THEN
+ {% if debug %}RAISE WARNING 'waterway parent %, child %', NEW.osm_id, relation_member;{% endif %}
+ FOR linked_node_id IN
+ SELECT place_id FROM placex
+ WHERE osm_type = 'W' and osm_id = (relation_member->>'ref')::bigint
+ and class = NEW.class and type in ('river', 'stream', 'canal', 'drain', 'ditch')
+ and (relation_member->>'role' != 'side_stream' or NEW.name->'name' = name->'name')
+ LOOP
+ UPDATE placex SET linked_place_id = NEW.place_id WHERE place_id = linked_node_id;
+ {% if 'search_name' in db.tables %}
+ DELETE FROM search_name WHERE place_id = linked_node_id;
+ {% endif %}
+ END LOOP;
+ END IF;
+ END LOOP;
+{% endif %}
{% if debug %}RAISE WARNING 'Waterway processed';{% endif %}
END IF;
{% if debug %}RAISE WARNING 'finding street for % %', NEW.osm_type, NEW.osm_id;{% endif %}
NEW.parent_place_id := null;
- is_place_address := coalesce(not NEW.address ? 'street' and NEW.address ? 'place', FALSE);
+ is_place_address := not token_is_street_address(NEW.token_info);
-- We have to find our parent road.
NEW.parent_place_id := find_parent_for_poi(NEW.osm_type, NEW.osm_id,
SELECT p.country_code, p.postcode, p.name FROM placex p
WHERE p.place_id = NEW.parent_place_id INTO location;
- IF is_place_address THEN
+ IF is_place_address and NEW.address ? 'place' THEN
-- Check if the addr:place tag is part of the parent name
SELECT count(*) INTO i
FROM svals(location.name) AS pname WHERE pname = NEW.address->'place';
ELSE
-- No linked place? As a last resort check if the boundary is tagged with
-- a place type and adapt the rank address.
- IF NEW.rank_address > 0 and NEW.extratags ? 'place' THEN
+ IF NEW.rank_address between 4 and 25 and NEW.extratags ? 'place' THEN
SELECT address_rank INTO place_address_level
FROM compute_place_rank(NEW.country_code, 'A', 'place',
NEW.extratags->'place', 0::SMALLINT, False, null);
END IF;
ELSEIF NEW.rank_address > 25 THEN
max_rank := 25;
+ ELSEIF NEW.class in ('place','boundary') and NEW.type in ('postcode','postal_code') THEN
+ max_rank := NEW.rank_search;
ELSE
max_rank := NEW.rank_address;
END IF;
{% endif %}
END IF;
- IF NEW.postcode is null AND NEW.rank_search > 8 THEN
- NEW.postcode := get_nearest_postcode(NEW.country_code, NEW.geometry);
+ IF NEW.postcode is null AND NEW.rank_search > 8
+ AND (NEW.rank_address > 0
+ OR ST_GeometryType(NEW.geometry) not in ('ST_LineString','ST_MultiLineString')
+ OR ST_Length(NEW.geometry) < 0.02)
+ THEN
+ NEW.postcode := get_nearest_postcode(NEW.country_code,
+ CASE WHEN NEW.rank_address > 25
+ THEN NEW.centroid ELSE NEW.geometry END);
END IF;
{% if debug %}RAISE WARNING 'place update % % finished.', NEW.osm_type, NEW.osm_id;{% endif %}
{% if debug %}RAISE WARNING 'placex_delete:12 % %',OLD.osm_type,OLD.osm_id;{% endif %}
+ UPDATE location_postcode SET indexed_status = 2 WHERE parent_place_id = OLD.place_id;
+
RETURN OLD;
END;
END;
$$
LANGUAGE plpgsql IMMUTABLE;
+
+
+CREATE OR REPLACE FUNCTION weigh_search(search_vector INT[],
+ rankings TEXT,
+ def_weight FLOAT)
+ RETURNS FLOAT
+ AS $$
+DECLARE
+ rank JSON;
+BEGIN
+ FOR rank IN
+ SELECT * FROM json_array_elements(rankings::JSON)
+ LOOP
+ IF true = ALL(SELECT x::int = ANY(search_vector) FROM json_array_elements_text(rank->1) as x) THEN
+ RETURN (rank->>0)::float;
+ END IF;
+ END LOOP;
+ RETURN def_weight;
+END;
+$$
+LANGUAGE plpgsql IMMUTABLE;
-- Assorted helper functions for the triggers.
-CREATE OR REPLACE FUNCTION geometry_sector(partition INTEGER, place geometry)
- RETURNS INTEGER
+CREATE OR REPLACE FUNCTION get_center_point(place GEOMETRY)
+ RETURNS GEOMETRY
AS $$
DECLARE
- NEWgeometry geometry;
+ geom_type TEXT;
+BEGIN
+ geom_type := ST_GeometryType(place);
+ IF geom_type = ' ST_Point' THEN
+ RETURN place;
+ END IF;
+ IF geom_type = 'ST_LineString' THEN
+ RETURN ST_LineInterpolatePoint(place, 0.5);
+ END IF;
+
+ RETURN ST_PointOnSurface(place);
+END;
+$$
+LANGUAGE plpgsql IMMUTABLE;
+
+
+CREATE OR REPLACE FUNCTION geometry_sector(partition INTEGER, place GEOMETRY)
+ RETURNS INTEGER
+ AS $$
BEGIN
--- RAISE WARNING '%',place;
- NEWgeometry := ST_PointOnSurface(place);
- RETURN (partition*1000000) + (500-ST_X(NEWgeometry)::integer)*1000 + (500-ST_Y(NEWgeometry)::integer);
+ RETURN (partition*1000000) + (500-ST_X(place)::INTEGER)*1000 + (500-ST_Y(place)::INTEGER);
END;
$$
LANGUAGE plpgsql IMMUTABLE;
+
CREATE OR REPLACE FUNCTION array_merge(a INTEGER[], b INTEGER[])
RETURNS INTEGER[]
AS $$
$$
LANGUAGE plpgsql IMMUTABLE;
+
+CREATE OR REPLACE FUNCTION get_rel_node_members(members JSONB, memberLabels TEXT[])
+ RETURNS SETOF BIGINT
+ AS $$
+DECLARE
+ member JSONB;
+BEGIN
+ FOR member IN SELECT * FROM jsonb_array_elements(members)
+ LOOP
+ IF member->>'type' = 'N' and member->>'role' = ANY(memberLabels) THEN
+ RETURN NEXT (member->>'ref')::bigint;
+ END IF;
+ END LOOP;
+
+ RETURN;
+END;
+$$
+LANGUAGE plpgsql IMMUTABLE;
+
+
-- Copy 'name' to or from the default language.
--
-- \param country_code Country code of the object being named.
RETURNS TEXT
AS $$
DECLARE
- place_centre GEOMETRY;
nearcountry RECORD;
+ countries TEXT[];
BEGIN
- place_centre := ST_PointOnSurface(place);
-
--- RAISE WARNING 'get_country_code, start: %', ST_AsText(place_centre);
+-- RAISE WARNING 'get_country_code, start: %', ST_AsText(place);
-- Try for a OSM polygon
- FOR nearcountry IN
- SELECT country_code from location_area_country
- WHERE country_code is not null and st_covers(geometry, place_centre) limit 1
- LOOP
- RETURN nearcountry.country_code;
- END LOOP;
+ SELECT array_agg(country_code) FROM location_area_country
+ WHERE country_code is not null and st_covers(geometry, place)
+ INTO countries;
--- RAISE WARNING 'osm fallback: %', ST_AsText(place_centre);
+ IF array_length(countries, 1) = 1 THEN
+ RETURN countries[1];
+ END IF;
+
+ IF array_length(countries, 1) > 1 THEN
+ -- more than one country found, confirm against the fallback data what to choose
+ FOR nearcountry IN
+ SELECT country_code FROM country_osm_grid
+ WHERE ST_Covers(geometry, place) AND country_code = ANY(countries)
+ ORDER BY area ASC
+ LOOP
+ RETURN nearcountry.country_code;
+ END LOOP;
+ -- Still nothing? Choose the country code with the smallest partition number.
+ -- And failing that, just go by the alphabet.
+ FOR nearcountry IN
+ SELECT cc,
+ (SELECT partition FROM country_name WHERE country_code = cc) as partition
+ FROM unnest(countries) cc
+ ORDER BY partition, cc
+ LOOP
+ RETURN nearcountry.cc;
+ END LOOP;
+
+ -- Should never be reached.
+ RETURN countries[1];
+ END IF;
+
+-- RAISE WARNING 'osm fallback: %', ST_AsText(place);
-- Try for OSM fallback data
-- The order is to deal with places like HongKong that are 'states' within another polygon
FOR nearcountry IN
SELECT country_code from country_osm_grid
- WHERE st_covers(geometry, place_centre) order by area asc limit 1
+ WHERE st_covers(geometry, place) order by area asc limit 1
LOOP
RETURN nearcountry.country_code;
END LOOP;
--- RAISE WARNING 'near osm fallback: %', ST_AsText(place_centre);
+-- RAISE WARNING 'near osm fallback: %', ST_AsText(place);
RETURN NULL;
END;
END IF;
RETURN ST_Envelope(ST_Collect(
- ST_Project(geom, radius, 0.785398)::geometry,
- ST_Project(geom, radius, 3.9269908)::geometry));
+ ST_Project(geom::geography, radius, 0.785398)::geometry,
+ ST_Project(geom::geography, radius, 3.9269908)::geometry));
END;
$$
LANGUAGE plpgsql IMMUTABLE;
RETURNS BOOLEAN
AS $$
DECLARE
- locationid INTEGER;
- secgeo GEOMETRY;
postcode TEXT;
BEGIN
PERFORM deleteLocationArea(partition, place_id, rank_search);
postcode := upper(trim (in_postcode));
END IF;
- IF ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon') THEN
- FOR secgeo IN select split_geometry(geometry) AS geom LOOP
- PERFORM insertLocationAreaLarge(partition, place_id, country_code, keywords, rank_search, rank_address, false, postcode, centroid, secgeo);
- END LOOP;
-
- ELSEIF ST_GeometryType(geometry) = 'ST_Point' THEN
- secgeo := place_node_fuzzy_area(geometry, rank_search);
- PERFORM insertLocationAreaLarge(partition, place_id, country_code, keywords, rank_search, rank_address, true, postcode, centroid, secgeo);
+ IF ST_Dimension(geometry) = 2 THEN
+ RETURN insertLocationAreaLarge(partition, place_id, country_code, keywords,
+ rank_search, rank_address, false, postcode,
+ centroid, geometry);
+ END IF;
+ IF ST_Dimension(geometry) = 0 THEN
+ RETURN insertLocationAreaLarge(partition, place_id, country_code, keywords,
+ rank_search, rank_address, true, postcode,
+ centroid, place_node_fuzzy_area(geometry, rank_search));
END IF;
- RETURN true;
+ RETURN false;
END;
$$
LANGUAGE plpgsql;
geo RECORD;
area FLOAT;
remainingdepth INTEGER;
- added INTEGER;
BEGIN
-
-- RAISE WARNING 'quad_split_geometry: maxarea=%, depth=%',maxarea,maxdepth;
- IF (ST_GeometryType(geometry) not in ('ST_Polygon','ST_MultiPolygon') OR NOT ST_IsValid(geometry)) THEN
+ IF not ST_IsValid(geometry) THEN
+ RETURN;
+ END IF;
+
+ IF ST_Dimension(geometry) != 2 OR maxdepth <= 1 THEN
RETURN NEXT geometry;
RETURN;
END IF;
remainingdepth := maxdepth - 1;
area := ST_AREA(geometry);
- IF remainingdepth < 1 OR area < maxarea THEN
+ IF area < maxarea THEN
RETURN NEXT geometry;
RETURN;
END IF;
xmid := (xmin+xmax)/2;
ymid := (ymin+ymax)/2;
- added := 0;
FOR seg IN 1..4 LOOP
IF seg = 1 THEN
secbox := ST_SetSRID(ST_MakeBox2D(ST_Point(xmid,ymid),ST_Point(xmax,ymax)),4326);
END IF;
- IF st_intersects(geometry, secbox) THEN
- secgeo := st_intersection(geometry, secbox);
- IF NOT ST_IsEmpty(secgeo) AND ST_GeometryType(secgeo) in ('ST_Polygon','ST_MultiPolygon') THEN
- FOR geo IN select quad_split_geometry(secgeo, maxarea, remainingdepth) as geom LOOP
- IF NOT ST_IsEmpty(geo.geom) AND ST_GeometryType(geo.geom) in ('ST_Polygon','ST_MultiPolygon') THEN
- added := added + 1;
- RETURN NEXT geo.geom;
- END IF;
- END LOOP;
- END IF;
+ secgeo := st_intersection(geometry, secbox);
+ IF NOT ST_IsEmpty(secgeo) AND ST_Dimension(secgeo) = 2 THEN
+ FOR geo IN SELECT quad_split_geometry(secgeo, maxarea, remainingdepth) as geom LOOP
+ IF NOT ST_IsEmpty(geo.geom) AND ST_Dimension(geo.geom) = 2 THEN
+ RETURN NEXT geo.geom;
+ END IF;
+ END LOOP;
END IF;
END LOOP;
DECLARE
geo RECORD;
BEGIN
- -- 10000000000 is ~~ 1x1 degree
- FOR geo IN select quad_split_geometry(geometry, 0.25, 20) as geom LOOP
- RETURN NEXT geo.geom;
- END LOOP;
+ IF ST_GeometryType(geometry) = 'ST_MultiPolygon'
+ and ST_Area(geometry) * 10 > ST_Area(Box2D(geometry))
+ THEN
+ FOR geo IN
+ SELECT quad_split_geometry(g, 0.25, 20) as geom
+ FROM (SELECT (ST_Dump(geometry)).geom::geometry(Polygon, 4326) AS g) xx
+ LOOP
+ RETURN NEXT geo.geom;
+ END LOOP;
+ ELSE
+ FOR geo IN
+ SELECT quad_split_geometry(geometry, 0.25, 20) as geom
+ LOOP
+ RETURN NEXT geo.geom;
+ END LOOP;
+ END IF;
RETURN;
END;
$$
LANGUAGE plpgsql IMMUTABLE;
+CREATE OR REPLACE FUNCTION simplify_large_polygons(geometry GEOMETRY)
+ RETURNS GEOMETRY
+ AS $$
+BEGIN
+ IF ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon')
+ and ST_MemSize(geometry) > 3000000
+ THEN
+ geometry := ST_SimplifyPreserveTopology(geometry, 0.0001);
+ END IF;
+ RETURN geometry;
+END;
+$$
+LANGUAGE plpgsql IMMUTABLE;
+
CREATE OR REPLACE FUNCTION place_force_delete(placeid BIGINT)
RETURNS BOOLEAN
SELECT osm_type, osm_id, class, type FROM placex WHERE place_id = placeid INTO osmtype, osmid, pclass, ptype;
DELETE FROM import_polygon_delete where osm_type = osmtype and osm_id = osmid and class = pclass and type = ptype;
DELETE FROM import_polygon_error where osm_type = osmtype and osm_id = osmid and class = pclass and type = ptype;
- -- force delete from place/placex by making it a very small geometry
- UPDATE place set geometry = ST_SetSRID(ST_Point(0,0), 4326) where osm_type = osmtype and osm_id = osmid and class = pclass and type = ptype;
- DELETE FROM place where osm_type = osmtype and osm_id = osmid and class = pclass and type = ptype;
+ -- force delete by directly entering it into the to-be-deleted table
+ INSERT INTO place_to_be_deleted (osm_type, osm_id, class, type, deferred)
+ VALUES(osmtype, osmid, pclass, ptype, false);
+ PERFORM flush_deleted_places();
RETURN TRUE;
END;
END;
$$
LANGUAGE plpgsql;
+
+CREATE OR REPLACE FUNCTION flush_deleted_places()
+ RETURNS INTEGER
+ AS $$
+BEGIN
+ -- deleting large polygons can have a massive effect on the system - require manual intervention to let them through
+ INSERT INTO import_polygon_delete (osm_type, osm_id, class, type)
+ SELECT osm_type, osm_id, class, type FROM place_to_be_deleted WHERE deferred;
+
+ -- delete from place table
+ ALTER TABLE place DISABLE TRIGGER place_before_delete;
+ DELETE FROM place USING place_to_be_deleted
+ WHERE place.osm_type = place_to_be_deleted.osm_type
+ and place.osm_id = place_to_be_deleted.osm_id
+ and place.class = place_to_be_deleted.class
+ and place.type = place_to_be_deleted.type
+ and not deferred;
+ ALTER TABLE place ENABLE TRIGGER place_before_delete;
+
+ -- Mark for delete in the placex table
+ UPDATE placex SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE placex.osm_type = 'N' and place_to_be_deleted.osm_type = 'N'
+ and placex.osm_id = place_to_be_deleted.osm_id
+ and placex.class = place_to_be_deleted.class
+ and placex.type = place_to_be_deleted.type
+ and not deferred;
+ UPDATE placex SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE placex.osm_type = 'W' and place_to_be_deleted.osm_type = 'W'
+ and placex.osm_id = place_to_be_deleted.osm_id
+ and placex.class = place_to_be_deleted.class
+ and placex.type = place_to_be_deleted.type
+ and not deferred;
+ UPDATE placex SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE placex.osm_type = 'R' and place_to_be_deleted.osm_type = 'R'
+ and placex.osm_id = place_to_be_deleted.osm_id
+ and placex.class = place_to_be_deleted.class
+ and placex.type = place_to_be_deleted.type
+ and not deferred;
+
+ -- Mark for delete in interpolations
+ UPDATE location_property_osmline SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE place_to_be_deleted.osm_type = 'W'
+ and place_to_be_deleted.class = 'place'
+ and place_to_be_deleted.type = 'houses'
+ and location_property_osmline.osm_id = place_to_be_deleted.osm_id
+ and not deferred;
+
+ -- Clear todo list.
+ TRUNCATE TABLE place_to_be_deleted;
+
+ RETURN NULL;
+END;
+$$ LANGUAGE plpgsql;
ON placex USING BTREE (parent_place_id) {{db.tablespace.search_index}}
WHERE parent_place_id IS NOT NULL;
---
+-- Used to find postcode areas after a search in location_postcode.
+CREATE INDEX IF NOT EXISTS idx_placex_postcode_areas
+ ON placex USING BTREE (country_code, postcode) {{db.tablespace.search_index}}
+ WHERE osm_type = 'R' AND class = 'boundary' AND type = 'postal_code';
+---
CREATE INDEX IF NOT EXISTS idx_placex_geometry ON placex
USING GIST (geometry) {{db.tablespace.search_index}};
+-- Index is needed during import but can be dropped as soon as a full
+-- geometry index is in place. The partial index is almost as big as the full
+-- index.
+---
+DROP INDEX IF EXISTS idx_placex_geometry_lower_rank_ways;
---
CREATE INDEX IF NOT EXISTS idx_placex_geometry_reverse_lookupPolygon
ON placex USING gist (geometry) {{db.tablespace.search_index}}
AND rank_address between 4 and 25 AND type != 'postcode'
AND name is not null AND indexed_status = 0 AND linked_place_id is null;
---
+-- used in reverse large area lookup
+CREATE INDEX IF NOT EXISTS idx_placex_geometry_reverse_lookupPlaceNode
+ ON placex USING gist (ST_Buffer(geometry, reverse_place_diameter(rank_search)))
+ {{db.tablespace.search_index}}
+ WHERE rank_address between 4 and 25 AND type != 'postcode'
+ AND name is not null AND linked_place_id is null AND osm_type = 'N';
+---
CREATE INDEX IF NOT EXISTS idx_osmline_parent_place_id
ON location_property_osmline USING BTREE (parent_place_id) {{db.tablespace.search_index}}
WHERE parent_place_id is not null;
---
DROP INDEX IF EXISTS idx_placex_geometry_address_area_candidates;
DROP INDEX IF EXISTS idx_placex_geometry_buildings;
- DROP INDEX IF EXISTS idx_placex_geometry_lower_rank_ways;
DROP INDEX IF EXISTS idx_placex_wikidata;
DROP INDEX IF EXISTS idx_placex_rank_address_sector;
DROP INDEX IF EXISTS idx_placex_rank_boundaries_sector;
---
CREATE UNIQUE INDEX IF NOT EXISTS idx_place_osm_unique
ON place USING btree(osm_id, osm_type, class, type) {{db.tablespace.address_index}};
+---
+-- Table needed for running updates with osm2pgsql on place.
+ CREATE TABLE IF NOT EXISTS place_to_be_deleted (
+ osm_type CHAR(1),
+ osm_id BIGINT,
+ class TEXT,
+ type TEXT,
+ deferred BOOLEAN
+ );
+---
+ CREATE INDEX IF NOT EXISTS idx_location_postcode_parent_place_id
+ ON location_postcode USING BTREE (parent_place_id) {{db.tablespace.address_index}};
{% endif %}
-- Indices only needed for search.
---
CREATE INDEX IF NOT EXISTS idx_search_name_centroid
ON search_name USING GIST (centroid) {{db.tablespace.search_index}};
-
- {% if postgres.has_index_non_key_column %}
----
- CREATE INDEX IF NOT EXISTS idx_placex_housenumber
- ON placex USING btree (parent_place_id)
- INCLUDE (housenumber) {{db.tablespace.search_index}}
- WHERE housenumber is not null;
----
- CREATE INDEX IF NOT EXISTS idx_osmline_parent_osm_id_with_hnr
- ON location_property_osmline USING btree(parent_place_id)
- INCLUDE (startnumber, endnumber) {{db.tablespace.search_index}}
- WHERE startnumber is not null;
- {% endif %}
+---
+ CREATE INDEX IF NOT EXISTS idx_placex_housenumber
+ ON placex USING btree (parent_place_id)
+ INCLUDE (housenumber) {{db.tablespace.search_index}}
+ WHERE housenumber is not null;
+---
+ CREATE INDEX IF NOT EXISTS idx_osmline_parent_osm_id_with_hnr
+ ON location_property_osmline USING btree(parent_place_id)
+ INCLUDE (startnumber, endnumber) {{db.tablespace.search_index}}
+ WHERE startnumber is not null;
{% endif %}
-- Usage: - POI is within building with housenumber
CREATE INDEX idx_placex_geometry_buildings ON placex
- USING {{postgres.spgist_geom}} (geometry) {{db.tablespace.address_index}}
+ USING SPGIST (geometry) {{db.tablespace.address_index}}
WHERE address is not null and rank_search = 30
and ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon');
-- Usage: - linking of similar named places to boundaries
-- - linking of place nodes with same type to boundaries
--- - lookupPolygon()
CREATE INDEX idx_placex_geometry_placenode ON placex
- USING {{postgres.spgist_geom}} (geometry) {{db.tablespace.address_index}}
+ USING SPGIST (geometry) {{db.tablespace.address_index}}
WHERE osm_type = 'N' and rank_search < 26
and class = 'place' and type != 'postcode';
-- Usage: - is node part of a way?
-- - find parent of interpolation spatially
CREATE INDEX idx_placex_geometry_lower_rank_ways ON placex
- USING {{postgres.spgist_geom}} (geometry) {{db.tablespace.address_index}}
+ USING SPGIST (geometry) {{db.tablespace.address_index}}
WHERE osm_type = 'W' and rank_search >= 26;
-- Usage: - linking place nodes by wikidata tag to boundaries
DROP SEQUENCE IF EXISTS file;
CREATE SEQUENCE file start 1;
--- null table so it won't error
--- deliberately no drop - importing the table is expensive and static, if it is already there better to avoid removing it
-CREATE TABLE IF NOT EXISTS wikipedia_article (
- language text NOT NULL,
- title text NOT NULL,
- langcount integer,
- othercount integer,
- totalcount integer,
- lat double precision,
- lon double precision,
- importance double precision,
- osm_type character(1),
- osm_id bigint,
- wd_page_title text,
- instance_of text
-);
-
-CREATE TABLE IF NOT EXISTS wikipedia_redirect (
- language text,
- from_title text,
- to_title text
-);
+{% if 'wikimedia_importance' not in db.tables and 'wikipedia_article' not in db.tables %}
+-- create dummy tables here, if nothing was imported
+CREATE TABLE wikimedia_importance (
+ language TEXT NOT NULL,
+ title TEXT NOT NULL,
+ importance double precision NOT NULL,
+ wikidata TEXT
+) {{db.tablespace.address_data}};
+{% endif %}
-- osm2pgsql does not create indexes on the middle tables for Nominatim
-- Add one for lookup of associated street relations.
-CREATE INDEX planet_osm_rels_parts_associated_idx ON planet_osm_rels USING gin(parts) WHERE tags @> ARRAY['associatedStreet'];
+{% if db.middle_db_format == '1' %}
+CREATE INDEX planet_osm_rels_parts_associated_idx ON planet_osm_rels USING gin(parts)
+ {{db.tablespace.address_index}}
+ WHERE tags @> ARRAY['associatedStreet'];
+{% else %}
+CREATE INDEX planet_osm_rels_relation_members_idx ON planet_osm_rels USING gin(planet_osm_member_ids(members, 'R'::character(1)))
+ WITH (fastupdate=off)
+ {{db.tablespace.address_index}};
+{% endif %}
-- Needed for lookups if a node is part of an interpolation.
CREATE INDEX IF NOT EXISTS idx_place_interpolations
--index only on parent_place_id
CREATE INDEX IF NOT EXISTS idx_location_property_tiger_parent_place_id_imp
ON location_property_tiger_import (parent_place_id)
-{% if postgres.has_index_non_key_column %}
INCLUDE (startnumber, endnumber, step)
-{% endif %}
{{db.tablespace.aux_index}};
CREATE UNIQUE INDEX IF NOT EXISTS idx_location_property_tiger_place_id_imp
ON location_property_tiger_import (place_id) {{db.tablespace.aux_index}};
$$ LANGUAGE SQL IMMUTABLE STRICT;
+CREATE OR REPLACE FUNCTION token_is_street_address(info JSONB)
+ RETURNS BOOLEAN
+AS $$
+ SELECT info->>'street' is not null or info->>'place' is null;
+$$ LANGUAGE SQL IMMUTABLE;
+
+
CREATE OR REPLACE FUNCTION token_has_addr_street(info JSONB)
RETURNS BOOLEAN
AS $$
- SELECT info->>'street' is not null;
+ SELECT info->>'street' is not null and info->>'street' != '{}';
$$ LANGUAGE SQL IMMUTABLE;
+++ /dev/null
--- SPDX-License-Identifier: GPL-2.0-only
---
--- This file is part of Nominatim. (https://nominatim.org)
---
--- Copyright (C) 2022 by the Nominatim developer community.
--- For a full list of authors see the git log.
-
-DROP TABLE IF EXISTS word;
-CREATE TABLE word (
- word_id INTEGER,
- word_token text NOT NULL,
- type text NOT NULL,
- word text,
- info jsonb
-) {{db.tablespace.search_data}};
-
-CREATE INDEX idx_word_word_token ON word
- USING BTREE (word_token) {{db.tablespace.search_index}};
--- Used when updating country names from the boundary relation.
-CREATE INDEX idx_word_country_names ON word
- USING btree(word) {{db.tablespace.address_index}}
- WHERE type = 'C';
--- Used when inserting new postcodes on updates.
-CREATE INDEX idx_word_postcodes ON word
- USING btree(word) {{db.tablespace.address_index}}
- WHERE type = 'P';
--- Used when inserting full words.
-CREATE INDEX idx_word_full_word ON word
- USING btree(word) {{db.tablespace.address_index}}
- WHERE type = 'W';
--- Used when inserting analyzed housenumbers (exclude old-style entries).
-CREATE INDEX idx_word_housenumbers ON word
- USING btree(word) {{db.tablespace.address_index}}
- WHERE type = 'H' and word is not null;
-
-GRANT SELECT ON word TO "{{config.DATABASE_WEBUSER}}";
-
-DROP SEQUENCE IF EXISTS seq_word;
-CREATE SEQUENCE seq_word start 1;
-GRANT SELECT ON seq_word to "{{config.DATABASE_WEBUSER}}";
+++ /dev/null
--- SPDX-License-Identifier: GPL-2.0-only
---
--- This file is part of Nominatim. (https://nominatim.org)
---
--- Copyright (C) 2022 by the Nominatim developer community.
--- For a full list of authors see the git log.
-
--- Get tokens used for searching the given place.
---
--- These are the tokens that will be saved in the search_name table.
-CREATE OR REPLACE FUNCTION token_get_name_search_tokens(info JSONB)
- RETURNS INTEGER[]
-AS $$
- SELECT (info->>'names')::INTEGER[]
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
--- Get tokens for matching the place name against others.
---
--- This should usually be restricted to full name tokens.
-CREATE OR REPLACE FUNCTION token_get_name_match_tokens(info JSONB)
- RETURNS INTEGER[]
-AS $$
- SELECT (info->>'names')::INTEGER[]
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
--- Return the housenumber tokens applicable for the place.
-CREATE OR REPLACE FUNCTION token_get_housenumber_search_tokens(info JSONB)
- RETURNS INTEGER[]
-AS $$
- SELECT (info->>'hnr_tokens')::INTEGER[]
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
--- Return the housenumber in the form that it can be matched during search.
-CREATE OR REPLACE FUNCTION token_normalized_housenumber(info JSONB)
- RETURNS TEXT
-AS $$
- SELECT info->>'hnr';
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION token_has_addr_street(info JSONB)
- RETURNS BOOLEAN
-AS $$
- SELECT info->>'street' is not null;
-$$ LANGUAGE SQL IMMUTABLE;
-
-
-CREATE OR REPLACE FUNCTION token_has_addr_place(info JSONB)
- RETURNS BOOLEAN
-AS $$
- SELECT info->>'place_match' is not null;
-$$ LANGUAGE SQL IMMUTABLE;
-
-
-CREATE OR REPLACE FUNCTION token_matches_street(info JSONB, street_tokens INTEGER[])
- RETURNS BOOLEAN
-AS $$
- SELECT (info->>'street')::INTEGER[] && street_tokens
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION token_matches_place(info JSONB, place_tokens INTEGER[])
- RETURNS BOOLEAN
-AS $$
- SELECT (info->>'place_match')::INTEGER[] && place_tokens
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION token_addr_place_search_tokens(info JSONB)
- RETURNS INTEGER[]
-AS $$
- SELECT (info->>'place_search')::INTEGER[]
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION token_get_address_keys(info JSONB)
- RETURNS SETOF TEXT
-AS $$
- SELECT * FROM jsonb_object_keys(info->'addr');
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION token_get_address_search_tokens(info JSONB, key TEXT)
- RETURNS INTEGER[]
-AS $$
- SELECT (info->'addr'->key->>0)::INTEGER[];
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION token_matches_address(info JSONB, key TEXT, tokens INTEGER[])
- RETURNS BOOLEAN
-AS $$
- SELECT (info->'addr'->key->>1)::INTEGER[] && tokens;
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION token_get_postcode(info JSONB)
- RETURNS TEXT
-AS $$
- SELECT info->>'postcode';
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
-
--- Return token info that should be saved permanently in the database.
-CREATE OR REPLACE FUNCTION token_strip_info(info JSONB)
- RETURNS JSONB
-AS $$
- SELECT NULL::JSONB;
-$$ LANGUAGE SQL IMMUTABLE STRICT;
-
---------------- private functions ----------------------------------------------
-
--- Functions for term normalisation and access to the 'word' table.
-
-CREATE OR REPLACE FUNCTION transliteration(text) RETURNS text
- AS '{{ modulepath }}/nominatim.so', 'transliteration'
-LANGUAGE c IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION gettokenstring(text) RETURNS text
- AS '{{ modulepath }}/nominatim.so', 'gettokenstring'
-LANGUAGE c IMMUTABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION make_standard_name(name TEXT) RETURNS TEXT
- AS $$
-DECLARE
- o TEXT;
-BEGIN
- o := public.gettokenstring(public.transliteration(name));
- RETURN trim(substr(o,1,length(o)));
-END;
-$$
-LANGUAGE plpgsql IMMUTABLE;
-
--- returns NULL if the word is too common
-CREATE OR REPLACE FUNCTION getorcreate_word_id(lookup_word TEXT)
- RETURNS INTEGER
- AS $$
-DECLARE
- lookup_token TEXT;
- return_word_id INTEGER;
- count INTEGER;
-BEGIN
- lookup_token := trim(lookup_word);
- SELECT min(word_id), max(search_name_count) FROM word
- WHERE word_token = lookup_token and class is null and type is null
- INTO return_word_id, count;
- IF return_word_id IS NULL THEN
- return_word_id := nextval('seq_word');
- INSERT INTO word VALUES (return_word_id, lookup_token, null, null, null, null, 0);
- ELSE
- IF count > {{ max_word_freq }} THEN
- return_word_id := NULL;
- END IF;
- END IF;
- RETURN return_word_id;
-END;
-$$
-LANGUAGE plpgsql;
-
-
--- Create housenumber tokens from an OSM addr:housenumber.
--- The housnumber is split at comma and semicolon as necessary.
--- The function returns the normalized form of the housenumber suitable
--- for comparison.
-CREATE OR REPLACE FUNCTION create_housenumbers(housenumbers TEXT[],
- OUT tokens TEXT,
- OUT normtext TEXT)
- AS $$
-BEGIN
- SELECT array_to_string(array_agg(trans), ';'), array_agg(tid)::TEXT
- INTO normtext, tokens
- FROM (SELECT lookup_word as trans, getorcreate_housenumber_id(lookup_word) as tid
- FROM (SELECT make_standard_name(h) as lookup_word
- FROM unnest(housenumbers) h) x) y;
-END;
-$$ LANGUAGE plpgsql STABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION getorcreate_housenumber_id(lookup_word TEXT)
- RETURNS INTEGER
- AS $$
-DECLARE
- lookup_token TEXT;
- return_word_id INTEGER;
-BEGIN
- lookup_token := ' ' || trim(lookup_word);
- SELECT min(word_id) FROM word
- WHERE word_token = lookup_token and class='place' and type='house'
- INTO return_word_id;
- IF return_word_id IS NULL THEN
- return_word_id := nextval('seq_word');
- INSERT INTO word VALUES (return_word_id, lookup_token, null,
- 'place', 'house', null, 0);
- END IF;
- RETURN return_word_id;
-END;
-$$
-LANGUAGE plpgsql;
-
-
-CREATE OR REPLACE FUNCTION create_postcode_id(postcode TEXT)
- RETURNS BOOLEAN
- AS $$
-DECLARE
- r RECORD;
- lookup_token TEXT;
- return_word_id INTEGER;
-BEGIN
- lookup_token := ' ' || make_standard_name(postcode);
- FOR r IN
- SELECT word_id FROM word
- WHERE word_token = lookup_token and word = postcode
- and class='place' and type='postcode'
- LOOP
- RETURN false;
- END LOOP;
-
- INSERT INTO word VALUES (nextval('seq_word'), lookup_token, postcode,
- 'place', 'postcode', null, 0);
- RETURN true;
-END;
-$$
-LANGUAGE plpgsql;
-
-
-CREATE OR REPLACE FUNCTION getorcreate_name_id(lookup_word TEXT, src_word TEXT)
- RETURNS INTEGER
- AS $$
-DECLARE
- lookup_token TEXT;
- nospace_lookup_token TEXT;
- return_word_id INTEGER;
-BEGIN
- lookup_token := ' '||trim(lookup_word);
- SELECT min(word_id) FROM word
- WHERE word_token = lookup_token and class is null and type is null
- INTO return_word_id;
- IF return_word_id IS NULL THEN
- return_word_id := nextval('seq_word');
- INSERT INTO word VALUES (return_word_id, lookup_token, src_word,
- null, null, null, 0);
- END IF;
- RETURN return_word_id;
-END;
-$$
-LANGUAGE plpgsql;
-
-
--- Normalize a string and lookup its word ids (partial words).
-CREATE OR REPLACE FUNCTION addr_ids_from_name(lookup_word TEXT)
- RETURNS INTEGER[]
- AS $$
-DECLARE
- words TEXT[];
- id INTEGER;
- return_word_id INTEGER[];
- word_ids INTEGER[];
- j INTEGER;
-BEGIN
- words := string_to_array(make_standard_name(lookup_word), ' ');
- IF array_upper(words, 1) IS NOT NULL THEN
- FOR j IN 1..array_upper(words, 1) LOOP
- IF (words[j] != '') THEN
- SELECT array_agg(word_id) INTO word_ids
- FROM word
- WHERE word_token = words[j] and class is null and type is null;
-
- IF word_ids IS NULL THEN
- id := nextval('seq_word');
- INSERT INTO word VALUES (id, words[j], null, null, null, null, 0);
- return_word_id := return_word_id || id;
- ELSE
- return_word_id := array_merge(return_word_id, word_ids);
- END IF;
- END IF;
- END LOOP;
- END IF;
-
- RETURN return_word_id;
-END;
-$$
-LANGUAGE plpgsql;
-
-
--- Normalize a string and look up its name ids (full words).
-CREATE OR REPLACE FUNCTION word_ids_from_name(lookup_word TEXT)
- RETURNS INTEGER[]
- AS $$
-DECLARE
- lookup_token TEXT;
- return_word_ids INTEGER[];
-BEGIN
- lookup_token := ' '|| make_standard_name(lookup_word);
- SELECT array_agg(word_id) FROM word
- WHERE word_token = lookup_token and class is null and type is null
- INTO return_word_ids;
- RETURN return_word_ids;
-END;
-$$
-LANGUAGE plpgsql STABLE STRICT;
-
-
-CREATE OR REPLACE FUNCTION make_keywords(src HSTORE)
- RETURNS INTEGER[]
- AS $$
-DECLARE
- result INTEGER[];
- s TEXT;
- w INTEGER;
- words TEXT[];
- value TEXT;
- j INTEGER;
-BEGIN
- result := '{}'::INTEGER[];
-
- FOR value IN SELECT unnest(regexp_split_to_array(svals(src), E'[,;]')) LOOP
- -- full name
- s := make_standard_name(value);
- w := getorcreate_name_id(s, value);
-
- IF not(ARRAY[w] <@ result) THEN
- result := result || w;
- END IF;
-
- -- partial single-word terms
- words := string_to_array(s, ' ');
- IF array_upper(words, 1) IS NOT NULL THEN
- FOR j IN 1..array_upper(words, 1) LOOP
- IF (words[j] != '') THEN
- w = getorcreate_word_id(words[j]);
- IF w IS NOT NULL AND NOT (ARRAY[w] <@ result) THEN
- result := result || w;
- END IF;
- END IF;
- END LOOP;
- END IF;
-
- -- consider parts before an opening braket a full word as well
- words := regexp_split_to_array(value, E'[(]');
- IF array_upper(words, 1) > 1 THEN
- s := make_standard_name(words[1]);
- IF s != '' THEN
- w := getorcreate_name_id(s, words[1]);
- IF w IS NOT NULL AND NOT (ARRAY[w] <@ result) THEN
- result := result || w;
- END IF;
- END IF;
- END IF;
-
- s := regexp_replace(value, '市$', '');
- IF s != value THEN
- s := make_standard_name(s);
- IF s != '' THEN
- w := getorcreate_name_id(s, value);
- IF NOT (ARRAY[w] <@ result) THEN
- result := result || w;
- END IF;
- END IF;
- END IF;
-
- END LOOP;
-
- RETURN result;
-END;
-$$
-LANGUAGE plpgsql;
-
-
-CREATE OR REPLACE FUNCTION precompute_words(src TEXT)
- RETURNS INTEGER
- AS $$
-DECLARE
- s TEXT;
- w INTEGER;
- words TEXT[];
- i INTEGER;
- j INTEGER;
-BEGIN
- s := make_standard_name(src);
- w := getorcreate_name_id(s, src);
-
- w := getorcreate_word_id(s);
-
- words := string_to_array(s, ' ');
- IF array_upper(words, 1) IS NOT NULL THEN
- FOR j IN 1..array_upper(words, 1) LOOP
- IF (words[j] != '') THEN
- w := getorcreate_word_id(words[j]);
- END IF;
- END LOOP;
- END IF;
-
- words := regexp_split_to_array(src, E'[,;()]');
- IF array_upper(words, 1) != 1 THEN
- FOR j IN 1..array_upper(words, 1) LOOP
- s := make_standard_name(words[j]);
- IF s != '' THEN
- w := getorcreate_word_id(s);
- END IF;
- END LOOP;
- END IF;
-
- s := regexp_replace(src, '市$', '');
- IF s != src THEN
- s := make_standard_name(s);
- IF s != '' THEN
- w := getorcreate_name_id(s, src);
- END IF;
- END IF;
-
- RETURN 1;
-END;
-$$
-LANGUAGE plpgsql;
+++ /dev/null
--- SPDX-License-Identifier: GPL-2.0-only
---
--- This file is part of Nominatim. (https://nominatim.org)
---
--- Copyright (C) 2022 by the Nominatim developer community.
--- For a full list of authors see the git log.
-
--- Required for details lookup.
-CREATE INDEX IF NOT EXISTS idx_word_word_id
- ON word USING BTREE (word_id) {{db.tablespace.search_index}};
+++ /dev/null
--- SPDX-License-Identifier: GPL-2.0-only
---
--- This file is part of Nominatim. (https://nominatim.org)
---
--- Copyright (C) 2022 by the Nominatim developer community.
--- For a full list of authors see the git log.
-
-DROP TABLE IF EXISTS word;
-CREATE TABLE word (
- word_id INTEGER,
- word_token text NOT NULL,
- word text,
- class text,
- type text,
- country_code varchar(2),
- search_name_count INTEGER,
- operator TEXT
-) {{db.tablespace.search_data}};
-
-CREATE INDEX idx_word_word_token ON word
- USING BTREE (word_token) {{db.tablespace.search_index}};
-CREATE INDEX idx_word_word ON word
- USING BTREE (word) {{db.tablespace.search_index}} WHERE word is not null;
-GRANT SELECT ON word TO "{{config.DATABASE_WEBUSER}}";
-
-DROP SEQUENCE IF EXISTS seq_word;
-CREATE SEQUENCE seq_word start 1;
-GRANT SELECT ON seq_word to "{{config.DATABASE_WEBUSER}}";
+++ /dev/null
-# Creates and installs manual page
-
-configure_file(${CMAKE_CURRENT_SOURCE_DIR}/create-manpage.tmpl create_manpage.py)
-
-find_program(ARGPARSEMANPAGE argparse-manpage)
-
-ADD_CUSTOM_TARGET(manpage
- COMMAND ${ARGPARSEMANPAGE} --pyfile ${CMAKE_CURRENT_BINARY_DIR}/create_manpage.py
- --function get_parser --project-name Nominatim
- --url https://nominatim.org > ${CMAKE_CURRENT_SOURCE_DIR}/nominatim.1
- --author 'the Nominatim developer community'
- --author-email info@nominatim.org
-)
-
-install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/nominatim.1 DESTINATION share/man/man1 )
--- /dev/null
+import sys
+import os
+from pathlib import Path
+
+sys.path.append(str(Path(__file__, '..', '..', 'src').resolve()))
+
+from nominatim_db.cli import get_set_parser
+
+def get_parser():
+ parser = get_set_parser()
+
+ return parser.parser
+++ /dev/null
-#!/usr/bin/env python3
-import sys
-import os
-
-sys.path.append('@PROJECT_SOURCE_DIR@')
-
-from nominatim.cli import get_set_parser
-
-def get_parser():
- parser = get_set_parser(phpcgi_path='@PHPCGI_BIN@')
-
- return parser.parser
nominatim
.SH SYNOPSIS
.B nominatim
-[-h] {import,freeze,replication,special-phrases,add-data,index,refresh,admin,export,serve,search,reverse,lookup,details,status} ...
+[-h] [--version] {import,freeze,replication,special-phrases,add-data,index,refresh,admin,export,convert,serve,search,reverse,lookup,details,status} ...
.SH DESCRIPTION
Command\-line tools for importing, updating, administrating and
.br
.SH OPTIONS
+.TP
+\fB\-\-version\fR
+Print Nominatim version and exit
.SS
\fBSub-commands\fR
Analyse and maintain the database.
.TP
\fBnominatim\fR \fI\,export\/\fR
- Export addresses as CSV file from the database.
+ Export places as CSV file from the database.
+.TP
+\fBnominatim\fR \fI\,convert\/\fR
+ Convert an existing database into a different format. (EXPERIMENTAL)
.TP
\fBnominatim\fR \fI\,serve\/\fR
Start a simple web server for serving the API.
Execute API details query.
.TP
\fBnominatim\fR \fI\,status\/\fR
- Execute API status query.
+
.SH OPTIONS 'nominatim import'
usage: nominatim import [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
- (--osm-file FILE | --continue {load-data,indexing,db-postprocess})
+ [--osm-file FILE]
+ [--continue {import-from-file,load-data,indexing,db-postprocess}]
[--osm2pgsql-cache SIZE] [--reverse-only]
- [--no-partitions] [--no-updates] [--ignore-errors]
- [--index-noanalyse]
+ [--no-partitions] [--no-updates] [--offline]
+ [--ignore-errors] [--index-noanalyse]
+ [--prepare-database]
Create a new Nominatim database from an OSM file.
.br
OSM file to be imported (repeat for importing multiple files)
.TP
-\fB\-\-continue\fR {load\-data,indexing,db\-postprocess}
+\fB\-\-continue\fR {import\-from\-file,load\-data,indexing,db\-postprocess}
Continue an import that was interrupted
.TP
\fB\-\-no\-updates\fR
Do not keep tables that are only needed for updating the database later
+.TP
+\fB\-\-offline\fR
+Do not attempt to load any additional data from the internet
+
.TP
\fB\-\-ignore\-errors\fR
Continue import even when errors in SQL are present
\fB\-\-index\-noanalyse\fR
Do not perform analyse operations during index (expert only)
+.TP
+\fB\-\-prepare\-database\fR
+Create the database but do not import any data
+
.SH OPTIONS 'nominatim freeze'
usage: nominatim freeze [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
.SH OPTIONS 'nominatim replication'
usage: nominatim replication [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
[--init] [--no-update-functions]
- [--check-for-updates] [--once] [--no-index]
- [--osm2pgsql-cache SIZE]
+ [--check-for-updates] [--once] [--catch-up]
+ [--no-index] [--osm2pgsql-cache SIZE]
[--socket-timeout SOCKET_TIMEOUT]
Update the database using an online replication service.
.TP
\fB\-\-no\-update\-functions\fR
-Do not update the trigger function to support differential updates.
+Do not update the trigger function to support differential updates (EXPERT)
.TP
\fB\-\-check\-for\-updates\fR
\fB\-\-once\fR
Download and apply updates only once. When not set, updates are continuously applied
+.TP
+\fB\-\-catch\-up\fR
+Download and apply updates until no new data is available on the server
+
.TP
\fB\-\-no\-index\fR
Do not index the new data. Only usable together with \-\-once
.br
'test/testdb/full_en_phrases_test.csv'.
.br
+
+.br
+ The import can be further configured to ignore specific key/value pairs.
+.br
+ This is particularly useful when importing phrases from the wiki. The
+.br
+ default configuration excludes some very common tags like building=yes.
+.br
+ The configuration can be customized by putting a file `phrase\-settings.json`
+.br
+ with custom rules into the project directory or by using the `\-\-config`
+.br
+ option to point to another configuration file.
+.br
.br
data. See the online documentation at
.br
- https://nominatim.org/release\-docs/latest/admin/Import/#installing\-tiger\-housenumber\-data\-for\-the\-us
+ https://nominatim.org/release\-docs/latest/customize/Tiger/
.br
for more information.
.br
.SH OPTIONS 'nominatim refresh'
usage: nominatim refresh [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
- [--postcodes] [--word-counts] [--address-levels]
- [--functions] [--wiki-data] [--importance]
- [--website] [--no-diff-updates]
- [--enable-debug-statements]
+ [--postcodes] [--word-tokens] [--word-counts]
+ [--address-levels] [--functions] [--wiki-data]
+ [--secondary-importance] [--importance] [--website]
+ [--data-object OBJECT] [--data-area OBJECT]
+ [--no-diff-updates] [--enable-debug-statements]
Recompute auxiliary data used by the indexing process.
.br
\fB\-\-postcodes\fR
Update postcode centroid table
+.TP
+\fB\-\-word\-tokens\fR
+Clean up search terms
+
.TP
\fB\-\-word\-counts\fR
Compute frequency of full\-word search terms
\fB\-\-wiki\-data\fR
Update Wikipedia/data importance numbers
+.TP
+\fB\-\-secondary\-importance\fR
+Update secondary importance raster data
+
.TP
\fB\-\-importance\fR
Recompute place importances (expensive!)
\fB\-\-website\fR
Refresh the directory that serves the scripts for the web API
+.TP
+\fB\-\-data\-object\fR OBJECT
+Mark the given OSM object as requiring an update (format: [NWR]<id>)
+
+.TP
+\fB\-\-data\-area\fR OBJECT
+Mark the area around the given OSM object as requiring an update (format: [NWR]<id>)
+
.TP
\fB\-\-no\-diff\-updates\fR
Do not enable code for propagating updates
.SH OPTIONS 'nominatim admin'
usage: nominatim admin [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
- (--warm | --check-database | --migrate | --analyse-indexing)
+ (--warm | --check-database | --migrate | --analyse-indexing | --collect-os-info | --clean-deleted AGE)
[--search-only] [--reverse-only]
[--osm-id OSM_ID | --place-id PLACE_ID]
\fB\-\-analyse\-indexing\fR
Print performance analysis of the indexing process
+.TP
+\fB\-\-collect\-os\-info\fR
+Generate a report about the host system information
+
+.TP
+\fB\-\-clean\-deleted\fR AGE
+Clean up deleted relations
+
.TP
\fB\-\-search\-only\fR
Only pre\-warm tables for search queries
.SH OPTIONS 'nominatim export'
usage: nominatim export [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
- [--output-type {continent,country,state,county,city,suburb,street,path}]
- [--output-format OUTPUT_FORMAT]
- [--output-all-postcodes] [--language LANGUAGE]
+ [--output-type {country,state,county,city,suburb,street,path}]
+ [--output-format OUTPUT_FORMAT] [--language LANGUAGE]
[--restrict-to-country COUNTRY_CODE]
[--restrict-to-osm-node ID] [--restrict-to-osm-way ID]
[--restrict-to-osm-relation ID]
- Export addresses as CSV file from the database.
+ Export places as CSV file from the database.
+.br
+
+.br
+
.br
Number of parallel threads to use
.TP
-\fB\-\-output\-type\fR {continent,country,state,county,city,suburb,street,path}
+\fB\-\-output\-type\fR {country,state,county,city,suburb,street,path}
Type of places to output (default: street)
.TP
\fB\-\-output\-format\fR \fI\,OUTPUT_FORMAT\/\fR
-Semicolon\-separated list of address types (see \-\-output\-type). Multiple ranks can be merged into one column by simply using a comma\-separated list.
-
-.TP
-\fB\-\-output\-all\-postcodes\fR
-List all postcodes for address instead of just the most likely one
+Semicolon\-separated list of address types (see \-\-output\-type). Additionally accepts:placeid,postcode
.TP
\fB\-\-language\fR \fI\,LANGUAGE\/\fR
\fB\-\-restrict\-to\-osm\-relation\fR ID
Export only children of this OSM relation
+.SH OPTIONS 'nominatim convert'
+usage: nominatim convert [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
+ [--format {sqlite}] --output OUTPUT [--with-reverse]
+ [--with-search] [--with-details]
+
+ Convert an existing database into a different format. (EXPERIMENTAL)
+.br
+
+.br
+ Dump a read\-only version of the database in a different format.
+.br
+ At the moment only a SQLite database suitable for reverse lookup
+.br
+ can be created.
+.br
+
+
+
+.TP
+\fB\-\-format\fR {sqlite}
+Format of the output database (must be sqlite currently)
+
+.TP
+\fB\-\-output\fR \fI\,OUTPUT\/\fR, \fB\-o\fR \fI\,OUTPUT\/\fR
+File to write the database to.
+
+.TP
+\fB\-q\fR, \fB\-\-quiet\fR
+Print only error messages
+
+.TP
+\fB\-v\fR, \fB\-\-verbose\fR
+Increase verboseness of output
+
+.TP
+\fB\-\-project\-dir\fR DIR
+Base directory of the Nominatim installation (default:.)
+
+.TP
+\fB\-j\fR NUM, \fB\-\-threads\fR NUM
+Number of parallel threads to use
+
+.TP
+\fB\-\-with\-reverse\fR, \fB\-\-without\-reverse\fR
+Enable/disable support for reverse and lookup API (default: enabled)
+
+.TP
+\fB\-\-with\-search\fR, \fB\-\-without\-search\fR
+Enable/disable support for search API (default: disabled)
+
+.TP
+\fB\-\-with\-details\fR, \fB\-\-without\-details\fR
+Enable/disable support for details API (default: enabled)
+
.SH OPTIONS 'nominatim serve'
usage: nominatim serve [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
- [--server SERVER]
+ [--server SERVER] [--engine {php,falcon,starlette}]
Start a simple web server for serving the API.
.br
.br
- This command starts the built\-in PHP webserver to serve the website
+ This command starts a built\-in webserver to serve the website
.br
from the current project directory. This webserver is only suitable
.br
for testing and development. Do not use it in production setups!
.br
+.br
+ There are different webservers available. The default 'php' engine
+.br
+ runs the classic PHP frontend. The other engines are Python servers
+.br
+ which run the new Python frontend code. This is highly experimental
+.br
+ at the moment and may not include the full API.
+.br
+
.br
By the default, the webserver can be accessed at: http://127.0.0.1:8088
.br
\fB\-\-server\fR \fI\,SERVER\/\fR
The address the server will listen to.
+.TP
+\fB\-\-engine\fR {php,falcon,starlette}
+Webserver framework to run. (default: falcon)
+
.SH OPTIONS 'nominatim search'
usage: nominatim search [-h] [-q] [-v] [--project-dir DIR] [-j NUM]
- [--query QUERY] [--street STREET] [--city CITY]
- [--county COUNTY] [--state STATE] [--country COUNTRY]
- [--postalcode POSTALCODE]
- [--format {xml,json,jsonv2,geojson,geocodejson}]
+ [--query QUERY] [--amenity AMENITY] [--street STREET]
+ [--city CITY] [--county COUNTY] [--state STATE]
+ [--country COUNTRY] [--postalcode POSTALCODE]
+ [--format {xml,geojson,geocodejson,json,jsonv2,debug}]
[--addressdetails] [--extratags] [--namedetails]
[--lang LANGS]
[--polygon-output {geojson,kml,svg,text}]
\fB\-\-query\fR \fI\,QUERY\/\fR
Free\-form query string
+.TP
+\fB\-\-amenity\fR \fI\,AMENITY\/\fR
+Structured query: name and/or type of POI
+
.TP
\fB\-\-street\fR \fI\,STREET\/\fR
Structured query: housenumber and street
Structured query: postcode
.TP
-\fB\-\-format\fR {xml,json,jsonv2,geojson,geocodejson}
+\fB\-\-format\fR {xml,geojson,geocodejson,json,jsonv2,debug}
Format of result
.TP
.SH OPTIONS 'nominatim reverse'
usage: nominatim reverse [-h] [-q] [-v] [--project-dir DIR] [-j NUM] --lat LAT
- --lon LON [--zoom ZOOM]
- [--format {xml,json,jsonv2,geojson,geocodejson}]
+ --lon LON [--zoom ZOOM] [--layer LAYER]
+ [--format {xml,geojson,geocodejson,json,jsonv2,debug}]
[--addressdetails] [--extratags] [--namedetails]
[--lang LANGS]
[--polygon-output {geojson,kml,svg,text}]
Level of detail required for the address
.TP
-\fB\-\-format\fR {xml,json,jsonv2,geojson,geocodejson}
+\fB\-\-layer\fR LAYER
+OSM id to lookup in format <NRW><id> (may be repeated)
+
+.TP
+\fB\-\-format\fR {xml,geojson,geocodejson,json,jsonv2,debug}
Format of result
.TP
.SH OPTIONS 'nominatim lookup'
usage: nominatim lookup [-h] [-q] [-v] [--project-dir DIR] [-j NUM] --id OSMID
- [--format {xml,json,jsonv2,geojson,geocodejson}]
+ [--format {xml,geojson,geocodejson,json,jsonv2,debug}]
[--addressdetails] [--extratags] [--namedetails]
[--lang LANGS]
[--polygon-output {geojson,kml,svg,text}]
OSM id to lookup in format <NRW><id> (may be repeated)
.TP
-\fB\-\-format\fR {xml,json,jsonv2,geojson,geocodejson}
+\fB\-\-format\fR {xml,geojson,geocodejson,json,jsonv2,debug}
Format of result
.TP
-site_name: Nominatim Documentation
-theme: readthedocs
-docs_dir: ${CMAKE_CURRENT_BINARY_DIR}
+site_name: Nominatim Manual
+theme:
+ font: false
+ name: material
+ features:
+ - navigation.tabs
+ - toc.integrate
+ plugins:
+ - privacy
+copyright: Copyright © Nominatim developer community
+docs_dir: docs
site_url: https://nominatim.org
repo_url: https://github.com/openstreetmap/Nominatim
nav:
- 'Basic Installation': 'admin/Installation.md'
- 'Import' : 'admin/Import.md'
- 'Update' : 'admin/Update.md'
- - 'Deploy' : 'admin/Deployment.md'
+ - 'Deploy' : 'admin/Deployment-Python.md'
- 'Nominatim UI' : 'admin/Setup-Nominatim-UI.md'
- 'Advanced Installations' : 'admin/Advanced-Installations.md'
- 'Maintenance' : 'admin/Maintenance.md'
- 'Migration from older Versions' : 'admin/Migration.md'
- 'Troubleshooting' : 'admin/Faq.md'
+ - 'Installation on Ubuntu 22' : 'admin/Install-on-Ubuntu-22.md'
+ - 'Installation on Ubuntu 24' : 'admin/Install-on-Ubuntu-24.md'
- 'Customization Guide':
- 'Overview': 'customize/Overview.md'
- 'Import Styles': 'customize/Import-Styles.md'
- 'Configuration Settings': 'customize/Settings.md'
+ - 'API Result Formatting': 'customize/Result-Formatting.md'
- 'Per-Country Data': 'customize/Country-Settings.md'
- 'Place Ranking' : 'customize/Ranking.md'
- 'Importance' : 'customize/Importance.md'
- 'Special Phrases': 'customize/Special-Phrases.md'
- 'External data: US housenumbers from TIGER': 'customize/Tiger.md'
- 'External data: Postcodes': 'customize/Postcodes.md'
+ - 'Conversion to SQLite': 'customize/SQLite.md'
+ - 'Library Guide':
+ - 'Getting Started': 'library/Getting-Started.md'
+ - 'Nominatim API class': 'library/NominatimAPI.md'
+ - 'Configuration': 'library/Configuration.md'
+ - 'Input Parameter Types': 'library/Input-Parameter-Types.md'
+ - 'Result Handling': 'library/Result-Handling.md'
+ - 'Low-level DB Access': 'library/Low-Level-DB-Access.md'
- 'Developers Guide':
- 'Architecture Overview' : 'develop/overview.md'
- 'Database Layout' : 'develop/Database-Layout.md'
- 'Setup for Development' : 'develop/Development-Environment.md'
- 'Testing' : 'develop/Testing.md'
- 'External Data Sources': 'develop/data-sources.md'
- - 'Appendix':
- - 'Installation on Ubuntu 18' : 'appendix/Install-on-Ubuntu-18.md'
- - 'Installation on Ubuntu 20' : 'appendix/Install-on-Ubuntu-20.md'
- - 'Installation on Ubuntu 22' : 'appendix/Install-on-Ubuntu-22.md'
markdown_extensions:
- codehilite
- admonition
+ - pymdownx.superfences
+ - pymdownx.tabbed:
+ alternate_style: true
- def_list
- toc:
- permalink:
+ toc_depth: 4
+ permalink: 🔗
extra_css: [extra.css, styles.css]
+exclude_docs: |
+ mk_install_instructions.py
+site_dir: site-html
plugins:
- search
- mkdocstrings:
handlers:
- python-legacy:
- rendering:
- show_source: false
- show_signature_annotations: false
+ python:
+ paths: ["src"]
+ options:
+ show_source: False
+ show_bases: False
+ - gen-files:
+ scripts:
+ - docs/mk_install_instructions.py
+++ /dev/null
-# just use the pgxs makefile
-
-foreach(suffix ${PostgreSQL_ADDITIONAL_VERSIONS} "14" "13" "12" "11" "10" "9.6")
- list(APPEND PG_CONFIG_HINTS
- "/usr/pgsql-${suffix}/bin")
-endforeach()
-
-find_program(PG_CONFIG pg_config HINTS ${PG_CONFIG_HINTS})
-
-
-
-execute_process(COMMAND ${PG_CONFIG} --pgxs
- OUTPUT_VARIABLE PGXS
- OUTPUT_STRIP_TRAILING_WHITESPACE)
-
-if (NOT EXISTS "${PGXS}")
-message(FATAL_ERROR "Postgresql server package not found.")
-endif()
-
-ADD_CUSTOM_COMMAND( OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/dummy
- COMMAND PGXS=${PGXS} PG_CONFIG=${PG_CONFIG} MODSRCDIR=${CMAKE_CURRENT_SOURCE_DIR} $(MAKE) -f ${CMAKE_CURRENT_SOURCE_DIR}/Makefile
- COMMENT "Running external makefile ${PGXS}"
- )
-
-ADD_CUSTOM_TARGET( nominatim_lib ALL
- DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/dummy
- )
-
+++ /dev/null
-MODULES = nominatim
-PG_CPPFLAGS = -I$(MODSRCDIR)
-include $(PGXS)
-
-VPATH = $(MODSRCDIR)
-
-all:
- chmod 755 nominatim.so
-
-install:
- @echo Library does not need to be installed.
+++ /dev/null
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-#include "postgres.h"
-#include "fmgr.h"
-#include "mb/pg_wchar.h"
-#include <utfasciitable.h>
-
-#ifdef PG_MODULE_MAGIC
-PG_MODULE_MAGIC;
-#endif
-
-Datum transliteration( PG_FUNCTION_ARGS );
-Datum gettokenstring( PG_FUNCTION_ARGS );
-void str_replace(char* buffer, int* len, int* changes, char* from, int fromlen, char* to, int tolen, int);
-void str_dupspaces(char* buffer);
-
-PG_FUNCTION_INFO_V1( transliteration );
-Datum
-transliteration( PG_FUNCTION_ARGS )
-{
- static char * ascii = UTFASCII;
- static uint16 asciilookup[65536] = UTFASCIILOOKUP;
- char * asciipos;
-
- text *source;
- unsigned char *sourcedata;
- int sourcedatalength;
-
- unsigned int c1,c2,c3,c4;
- unsigned int * wchardata;
- unsigned int * wchardatastart;
-
- text *result;
- unsigned char *resultdata;
- int resultdatalength;
- int iLen;
-
- if (GetDatabaseEncoding() != PG_UTF8)
- {
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("requires UTF8 database encoding")));
- }
-
- if (PG_ARGISNULL(0))
- {
- PG_RETURN_NULL();
- }
-
- // The original string
- source = PG_GETARG_TEXT_P(0);
- sourcedata = (unsigned char *)VARDATA(source);
- sourcedatalength = VARSIZE(source) - VARHDRSZ;
-
- // Intermediate wchar version of string
- wchardatastart = wchardata = (unsigned int *)palloc((sourcedatalength+1)*sizeof(int));
-
- // Based on pg_utf2wchar_with_len from wchar.c
- // Postgresql strings are not zero terminalted
- while (sourcedatalength > 0)
- {
- if ((*sourcedata & 0x80) == 0)
- {
- *wchardata = *sourcedata++;
- wchardata++;
- sourcedatalength--;
- }
- else if ((*sourcedata & 0xe0) == 0xc0)
- {
- if (sourcedatalength < 2) break;
- c1 = *sourcedata++ & 0x1f;
- c2 = *sourcedata++ & 0x3f;
- *wchardata = (c1 << 6) | c2;
- if (*wchardata < 65536) wchardata++;
- sourcedatalength -= 2;
- }
- else if ((*sourcedata & 0xf0) == 0xe0)
- {
- if (sourcedatalength < 3) break;
- c1 = *sourcedata++ & 0x0f;
- c2 = *sourcedata++ & 0x3f;
- c3 = *sourcedata++ & 0x3f;
- *wchardata = (c1 << 12) | (c2 << 6) | c3;
- if (*wchardata < 65536) wchardata++;
- sourcedatalength -= 3;
- }
- else if ((*sourcedata & 0xf8) == 0xf0)
- {
- if (sourcedatalength < 4) break;
- c1 = *sourcedata++ & 0x07;
- c2 = *sourcedata++ & 0x3f;
- c3 = *sourcedata++ & 0x3f;
- c4 = *sourcedata++ & 0x3f;
- *wchardata = (c1 << 18) | (c2 << 12) | (c3 << 6) | c4;
- if (*wchardata < 65536) wchardata++;
- sourcedatalength -= 4;
- }
- else if ((*sourcedata & 0xfc) == 0xf8)
- {
- // table does not extend beyond 4 char long, just skip
- if (sourcedatalength < 5) break;
- sourcedatalength -= 5;
- sourcedata += 5;
- }
- else if ((*sourcedata & 0xfe) == 0xfc)
- {
- // table does not extend beyond 4 char long, just skip
- if (sourcedatalength < 6) break;
- sourcedatalength -= 6;
- sourcedata += 6;
- }
- else
- {
- // assume lenngth 1, silently drop bogus characters
- sourcedatalength--;
- sourcedata += 1;
- }
- }
- *wchardata = 0;
-
- // calc the length of transliteration string
- resultdatalength = 0;
- wchardata = wchardatastart;
- while(*wchardata)
- {
- if (*(asciilookup + *wchardata) > 0) resultdatalength += *(ascii + *(asciilookup + *wchardata));
- wchardata++;
- }
-
- // allocate & create the result
- result = (text *)palloc(resultdatalength + VARHDRSZ);
- SET_VARSIZE(result, resultdatalength + VARHDRSZ);
- resultdata = (unsigned char *)VARDATA(result);
-
- wchardata = wchardatastart;
- while(*wchardata)
- {
- if (*(asciilookup + *wchardata) > 0)
- {
- asciipos = ascii + *(asciilookup + *wchardata);
- for(iLen = *asciipos; iLen > 0; iLen--)
- {
- asciipos++;
- *resultdata = *asciipos;
- resultdata++;
- }
- }
- /*else
- {
- ereport( WARNING, ( errcode( ERRCODE_SUCCESSFUL_COMPLETION ),
- errmsg( "missing char: %i\n", *wchardata )));
-
- }*/
- wchardata++;
- }
-
- pfree(wchardatastart);
-
- PG_RETURN_TEXT_P(result);
-}
-
-// Set isspace=1 if the replacement _only_ adds a space before the search string. I.e. to == " " + from
-void str_replace(char* buffer, int* len, int* changes, char* from, int fromlen, char* to, int tolen, int isspace)
-{
- char *p;
-
- // Search string is too long to be present
- if (fromlen > *len) return;
-
- p = strstr(buffer, from);
- while(p)
- {
- if (!isspace || (p > buffer && *(p-1) != ' '))
- {
- (*changes)++;
- if (tolen != fromlen) memmove(p+tolen, p+fromlen, *len-(p-buffer)+1);
- memcpy(p, to, tolen);
- *len += tolen - fromlen;
- }
- p = strstr(p+1, from);
- }
-}
-
-void str_dupspaces(char* buffer)
-{
- char *out;
- int wasspace;
-
- out = buffer;
- wasspace = 0;
- while(*buffer)
- {
- if (wasspace && *buffer != ' ') wasspace = 0;
- if (!wasspace)
- {
- *out = *buffer;
- out++;
- wasspace = (*buffer == ' ');
- }
- buffer++;
- }
- *out = 0;
-}
-
-PG_FUNCTION_INFO_V1( gettokenstring );
-Datum
-gettokenstring( PG_FUNCTION_ARGS )
-{
- text *source;
- unsigned char *sourcedata;
- int sourcedatalength;
-
- char * buffer;
- int len;
- int changes;
-
- text *result;
-
- if (GetDatabaseEncoding() != PG_UTF8)
- {
- ereport(ERROR,
- (errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
- errmsg("requires UTF8 database encoding")));
- }
-
- if (PG_ARGISNULL(0))
- {
- PG_RETURN_NULL();
- }
-
- // The original string
- source = PG_GETARG_TEXT_P(0);
- sourcedata = (unsigned char *)VARDATA(source);
- sourcedatalength = VARSIZE(source) - VARHDRSZ;
-
- // Buffer for doing the replace in - string could get slightly longer (double is massive overkill)
- buffer = (char *)palloc((sourcedatalength*2)*sizeof(char));
- memcpy(buffer+1, sourcedata, sourcedatalength);
- buffer[0] = 32;
- buffer[sourcedatalength+1] = 32;
- buffer[sourcedatalength+2] = 0;
- len = sourcedatalength+3;
-
- changes = 1;
- str_dupspaces(buffer);
- while(changes)
- {
- changes = 0;
- #include <tokenstringreplacements.inc>
- str_dupspaces(buffer);
- }
-
- // 'and' in various languages
- str_replace(buffer, &len, &changes, " and ", 5, " ", 1, 0);
- str_replace(buffer, &len, &changes, " und ", 5, " ", 1, 0);
- str_replace(buffer, &len, &changes, " en ", 4, " ", 1, 0);
- str_replace(buffer, &len, &changes, " et ", 4, " ", 1, 0);
- str_replace(buffer, &len, &changes, " y ", 3, " ", 1, 0);
-
- // 'the' (and similar)
- str_replace(buffer, &len, &changes, " the ", 5, " ", 1, 0);
- str_replace(buffer, &len, &changes, " der ", 5, " ", 1, 0);
- str_replace(buffer, &len, &changes, " den ", 5, " ", 1, 0);
- str_replace(buffer, &len, &changes, " die ", 5, " ", 1, 0);
- str_replace(buffer, &len, &changes, " das ", 5, " ", 1, 0);
- str_replace(buffer, &len, &changes, " la ", 4, " ", 1, 0);
- str_replace(buffer, &len, &changes, " le ", 4, " ", 1, 0);
- str_replace(buffer, &len, &changes, " el ", 4, " ", 1, 0);
- str_replace(buffer, &len, &changes, " il ", 4, " ", 1, 0);
-
- // german
- str_replace(buffer, &len, &changes, "ae", 2, "a", 1, 0);
- str_replace(buffer, &len, &changes, "oe", 2, "o", 1, 0);
- str_replace(buffer, &len, &changes, "ue", 2, "u", 1, 0);
- str_replace(buffer, &len, &changes, "sss", 3, "ss", 2, 0);
- str_replace(buffer, &len, &changes, "ih", 2, "i", 1, 0);
- str_replace(buffer, &len, &changes, "eh", 2, "e", 1, 0);
-
- // russian
- str_replace(buffer, &len, &changes, "ie", 2, "i", 1, 0);
- str_replace(buffer, &len, &changes, "yi", 2, "i", 1, 0);
-
- // allocate & create the result
- len--;// Drop the terminating zero
- result = (text *)palloc(len + VARHDRSZ);
- SET_VARSIZE(result, len + VARHDRSZ);
- memcpy(VARDATA(result), buffer, len);
-
- pfree(buffer);
-
- PG_RETURN_TEXT_P(result);
-}
-
+++ /dev/null
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
- str_replace(buffer, &len, &changes, " national wildlife refuge area ", 31, " nwra ", 6, 0);
- str_replace(buffer, &len, &changes, " national recreation area ", 26, " nra ", 5, 0);
- str_replace(buffer, &len, &changes, " air national guard base ", 25, " angb ", 6, 0);
- str_replace(buffer, &len, &changes, " zhilishchien komplieks ", 24, " zh k ", 6, 0);
- str_replace(buffer, &len, &changes, " trung tam thuong mdhi ", 23, " tttm ", 6, 0);
- str_replace(buffer, &len, &changes, " poligono industrial ", 21, " pgind ", 7, 0);
- str_replace(buffer, &len, &changes, " trung hoc pho thong ", 21, " thpt ", 6, 0);
- str_replace(buffer, &len, &changes, " onze lieve vrouw e ", 20, " olv ", 5, 0);
- str_replace(buffer, &len, &changes, " strada provinciale ", 20, " sp ", 4, 0);
- str_replace(buffer, &len, &changes, "onze lieve vrouw e ", 19, " olv ", 5, 0);
- str_replace(buffer, &len, &changes, " punto kilometrico ", 19, " pk ", 4, 0);
- str_replace(buffer, &len, &changes, " cong vien van hoa ", 19, " cvvh ", 6, 0);
- str_replace(buffer, &len, &changes, " can cu khong quan ", 19, " cckq ", 6, 0);
- str_replace(buffer, &len, &changes, "strada provinciale ", 19, " sp ", 4, 0);
- str_replace(buffer, &len, &changes, " strada regionale ", 18, " sr ", 4, 0);
- str_replace(buffer, &len, &changes, " strada comunale ", 17, " sc ", 4, 0);
- str_replace(buffer, &len, &changes, "strada regionale ", 17, " sr ", 4, 0);
- str_replace(buffer, &len, &changes, " trung hoc co so ", 17, " thcs ", 6, 0);
- str_replace(buffer, &len, &changes, " san bay quoc te ", 17, " sbqt ", 6, 0);
- str_replace(buffer, &len, &changes, " cong ty co phyn ", 17, " ctcp ", 6, 0);
- str_replace(buffer, &len, &changes, " khu cong nghiep ", 17, " kcn ", 5, 0);
- str_replace(buffer, &len, &changes, " air force base ", 16, " afb ", 5, 0);
- str_replace(buffer, &len, &changes, " strada statale ", 16, " ss ", 4, 0);
- str_replace(buffer, &len, &changes, " vien bcyo tang ", 16, " vbt ", 5, 0);
- str_replace(buffer, &len, &changes, "strada comunale ", 16, " sc ", 4, 0);
- str_replace(buffer, &len, &changes, " circunvalacion ", 16, " ccvcn ", 7, 0);
- str_replace(buffer, &len, &changes, " paseo maritimo ", 16, " psmar ", 7, 0);
- str_replace(buffer, &len, &changes, " wielkopolskie ", 15, " wlkp ", 6, 0);
- str_replace(buffer, &len, &changes, " national park ", 15, " np ", 4, 0);
- str_replace(buffer, &len, &changes, " middle school ", 15, " ms ", 4, 0);
- str_replace(buffer, &len, &changes, " international ", 15, " intl ", 6, 0);
- str_replace(buffer, &len, &changes, " burgermeister ", 15, " bgm ", 5, 0);
- str_replace(buffer, &len, &changes, " vuon quoc gia ", 15, " vqg ", 5, 0);
- str_replace(buffer, &len, &changes, " qucyng truong ", 15, " qt ", 4, 0);
- str_replace(buffer, &len, &changes, "strada statale ", 15, " ss ", 4, 0);
- str_replace(buffer, &len, &changes, " state highway ", 15, " sh ", 4, 0);
- str_replace(buffer, &len, &changes, "burgermeister ", 14, " bgm ", 5, 0);
- str_replace(buffer, &len, &changes, " right of way ", 14, " rowy ", 6, 0);
- str_replace(buffer, &len, &changes, " hauptbahnhof ", 14, " hbf ", 5, 0);
- str_replace(buffer, &len, &changes, " apartamentos ", 14, " aptos ", 7, 0);
- str_replace(buffer, &len, &changes, " wielkopolski ", 14, " wlkp ", 6, 0);
- str_replace(buffer, &len, &changes, " burgemeester ", 14, " bg ", 4, 0);
- str_replace(buffer, &len, &changes, " camino nuevo ", 14, " c n ", 5, 0);
- str_replace(buffer, &len, &changes, " camino hondo ", 14, " c h ", 5, 0);
- str_replace(buffer, &len, &changes, " urbanizacion ", 14, " urb ", 5, 0);
- str_replace(buffer, &len, &changes, " camino viejo ", 14, " c v ", 5, 0);
- str_replace(buffer, &len, &changes, " wielkopolska ", 14, " wlkp ", 6, 0);
- str_replace(buffer, &len, &changes, " wojewodztwie ", 14, " woj ", 5, 0);
- str_replace(buffer, &len, &changes, " county route ", 14, " cr ", 4, 0);
- str_replace(buffer, &len, &changes, " prolongacion ", 14, " prol ", 6, 0);
- str_replace(buffer, &len, &changes, " thoroughfare ", 14, " thor ", 6, 0);
- str_replace(buffer, &len, &changes, " san van dong ", 14, " svd ", 5, 0);
- str_replace(buffer, &len, &changes, " tong cong ty ", 14, " tct ", 5, 0);
- str_replace(buffer, &len, &changes, " khu nghi mat ", 14, " knm ", 5, 0);
- str_replace(buffer, &len, &changes, " nha thi dzu ", 13, " ntd ", 5, 0);
- str_replace(buffer, &len, &changes, " khu du lich ", 13, " kdl ", 5, 0);
- str_replace(buffer, &len, &changes, " demarcacion ", 13, " demar ", 7, 0);
- str_replace(buffer, &len, &changes, " cau ldhc bo ", 13, " clb ", 5, 0);
- str_replace(buffer, &len, &changes, " interchange ", 13, " intg ", 6, 0);
- str_replace(buffer, &len, &changes, " distributor ", 13, " dstr ", 6, 0);
- str_replace(buffer, &len, &changes, " state route ", 13, " sr ", 4, 0);
- str_replace(buffer, &len, &changes, " wojewodztwo ", 13, " woj ", 5, 0);
- str_replace(buffer, &len, &changes, " reservation ", 13, " res ", 5, 0);
- str_replace(buffer, &len, &changes, " monseigneur ", 13, " mgr ", 5, 0);
- str_replace(buffer, &len, &changes, " transversal ", 13, " trval ", 7, 0);
- str_replace(buffer, &len, &changes, " extrarradio ", 13, " extrr ", 7, 0);
- str_replace(buffer, &len, &changes, " high school ", 13, " hs ", 4, 0);
- str_replace(buffer, &len, &changes, " mazowieckie ", 13, " maz ", 5, 0);
- str_replace(buffer, &len, &changes, " residencial ", 13, " resid ", 7, 0);
- str_replace(buffer, &len, &changes, " cong truong ", 13, " ct ", 4, 0);
- str_replace(buffer, &len, &changes, " cooperativa ", 13, " coop ", 6, 0);
- str_replace(buffer, &len, &changes, " diseminado ", 12, " disem ", 7, 0);
- str_replace(buffer, &len, &changes, " barranquil ", 12, " bqllo ", 7, 0);
- str_replace(buffer, &len, &changes, " fire track ", 12, " ftrk ", 6, 0);
- str_replace(buffer, &len, &changes, " south east ", 12, " se ", 4, 0);
- str_replace(buffer, &len, &changes, " north east ", 12, " ne ", 4, 0);
- str_replace(buffer, &len, &changes, " university ", 12, " univ ", 6, 0);
- str_replace(buffer, &len, &changes, " south west ", 12, " sw ", 4, 0);
- str_replace(buffer, &len, &changes, " monasterio ", 12, " mtrio ", 7, 0);
- str_replace(buffer, &len, &changes, " vecindario ", 12, " vecin ", 7, 0);
- str_replace(buffer, &len, &changes, " carreterin ", 12, " ctrin ", 7, 0);
- str_replace(buffer, &len, &changes, " callejuela ", 12, " cjla ", 6, 0);
- str_replace(buffer, &len, &changes, " north-east ", 12, " ne ", 4, 0);
- str_replace(buffer, &len, &changes, " south-west ", 12, " sw ", 4, 0);
- str_replace(buffer, &len, &changes, " gebroeders ", 12, " gebr ", 6, 0);
- str_replace(buffer, &len, &changes, " serviceway ", 12, " swy ", 5, 0);
- str_replace(buffer, &len, &changes, " quadrangle ", 12, " qdgl ", 6, 0);
- str_replace(buffer, &len, &changes, " commandant ", 12, " cmdt ", 6, 0);
- str_replace(buffer, &len, &changes, " extramuros ", 12, " extrm ", 7, 0);
- str_replace(buffer, &len, &changes, " escalinata ", 12, " escal ", 7, 0);
- str_replace(buffer, &len, &changes, " north-west ", 12, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " bulevardul ", 12, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " particular ", 12, " parti ", 7, 0);
- str_replace(buffer, &len, &changes, " mazowiecka ", 12, " maz ", 5, 0);
- str_replace(buffer, &len, &changes, " mazowiecki ", 12, " maz ", 5, 0);
- str_replace(buffer, &len, &changes, " north west ", 12, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " industrial ", 12, " ind ", 5, 0);
- str_replace(buffer, &len, &changes, " costanilla ", 12, " cstan ", 7, 0);
- str_replace(buffer, &len, &changes, " khach sdhn ", 12, " ks ", 4, 0);
- str_replace(buffer, &len, &changes, " south-east ", 12, " se ", 4, 0);
- str_replace(buffer, &len, &changes, " phi truong ", 12, " pt ", 4, 0);
- str_replace(buffer, &len, &changes, " expressway ", 12, " exp ", 5, 0);
- str_replace(buffer, &len, &changes, " fondamenta ", 12, " f ta ", 6, 0);
- str_replace(buffer, &len, &changes, " apartments ", 12, " apts ", 6, 0);
- str_replace(buffer, &len, &changes, " cul de sac ", 12, " cds ", 5, 0);
- str_replace(buffer, &len, &changes, " corralillo ", 12, " crrlo ", 7, 0);
- str_replace(buffer, &len, &changes, " mitropolit ", 12, " mit ", 5, 0);
- str_replace(buffer, &len, &changes, " etorbidea ", 11, " etorb ", 7, 0);
- str_replace(buffer, &len, &changes, " ploshchad ", 11, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " cobertizo ", 11, " cbtiz ", 7, 0);
- str_replace(buffer, &len, &changes, " underpass ", 11, " upas ", 6, 0);
- str_replace(buffer, &len, &changes, " crossroad ", 11, " crd ", 5, 0);
- str_replace(buffer, &len, &changes, " fundatura ", 11, " fnd ", 5, 0);
- str_replace(buffer, &len, &changes, " foreshore ", 11, " fshr ", 6, 0);
- str_replace(buffer, &len, &changes, " parklands ", 11, " pkld ", 6, 0);
- str_replace(buffer, &len, &changes, " esplanade ", 11, " esp ", 5, 0);
- str_replace(buffer, &len, &changes, " centreway ", 11, " cnwy ", 6, 0);
- str_replace(buffer, &len, &changes, " formation ", 11, " form ", 6, 0);
- str_replace(buffer, &len, &changes, " explanada ", 11, " expla ", 7, 0);
- str_replace(buffer, &len, &changes, " viviendas ", 11, " vvdas ", 7, 0);
- str_replace(buffer, &len, &changes, " northeast ", 11, " ne ", 4, 0);
- str_replace(buffer, &len, &changes, " cong vien ", 11, " cv ", 4, 0);
- str_replace(buffer, &len, &changes, " northwest ", 11, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " buildings ", 11, " bldgs ", 7, 0);
- str_replace(buffer, &len, &changes, " errepidea ", 11, " err ", 5, 0);
- str_replace(buffer, &len, &changes, " extension ", 11, " ex ", 4, 0);
- str_replace(buffer, &len, &changes, " municipal ", 11, " mun ", 5, 0);
- str_replace(buffer, &len, &changes, " southeast ", 11, " se ", 4, 0);
- str_replace(buffer, &len, &changes, " sanatorio ", 11, " sanat ", 7, 0);
- str_replace(buffer, &len, &changes, " thanh pho ", 11, " tp ", 4, 0);
- str_replace(buffer, &len, &changes, " firetrail ", 11, " fit ", 5, 0);
- str_replace(buffer, &len, &changes, " santuario ", 11, " santu ", 7, 0);
- str_replace(buffer, &len, &changes, " southwest ", 11, " sw ", 4, 0);
- str_replace(buffer, &len, &changes, " autopista ", 11, " auto ", 6, 0);
- str_replace(buffer, &len, &changes, " president ", 11, " pres ", 6, 0);
- str_replace(buffer, &len, &changes, " rinconada ", 11, " rcda ", 6, 0);
- str_replace(buffer, &len, &changes, " kardinaal ", 11, " kard ", 6, 0);
- str_replace(buffer, &len, &changes, " plazoleta ", 11, " pzta ", 6, 0);
- str_replace(buffer, &len, &changes, " duong sat ", 11, " ds ", 4, 0);
- str_replace(buffer, &len, &changes, " trung tam ", 11, " tt ", 4, 0);
- str_replace(buffer, &len, &changes, " piazzetta ", 11, " pta ", 5, 0);
- str_replace(buffer, &len, &changes, " boardwalk ", 11, " bwlk ", 6, 0);
- str_replace(buffer, &len, &changes, " bulievard ", 11, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " luitenant ", 11, " luit ", 6, 0);
- str_replace(buffer, &len, &changes, " courtyard ", 11, " ctyd ", 6, 0);
- str_replace(buffer, &len, &changes, " reservoir ", 11, " res ", 5, 0);
- str_replace(buffer, &len, &changes, " bulevardu ", 11, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " community ", 11, " comm ", 6, 0);
- str_replace(buffer, &len, &changes, " concourse ", 11, " con ", 5, 0);
- str_replace(buffer, &len, &changes, " profiesor ", 11, " prof ", 6, 0);
- str_replace(buffer, &len, &changes, " promenade ", 11, " prom ", 6, 0);
- str_replace(buffer, &len, &changes, " gienieral ", 11, " ghien ", 7, 0);
- str_replace(buffer, &len, &changes, " puistikko ", 11, " pko ", 5, 0);
- str_replace(buffer, &len, &changes, " balneario ", 11, " balnr ", 7, 0);
- str_replace(buffer, &len, &changes, " carretera ", 11, " ctra ", 6, 0);
- str_replace(buffer, &len, &changes, " ingenieur ", 11, " ir ", 4, 0);
- str_replace(buffer, &len, &changes, " boulevard ", 11, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " deviation ", 11, " devn ", 6, 0);
- str_replace(buffer, &len, &changes, " hipodromo ", 11, " hipod ", 7, 0);
- str_replace(buffer, &len, &changes, " professor ", 11, " prof ", 6, 0);
- str_replace(buffer, &len, &changes, " triangle ", 10, " tri ", 5, 0);
- str_replace(buffer, &len, &changes, " dotsient ", 10, " dots ", 6, 0);
- str_replace(buffer, &len, &changes, " boundary ", 10, " bdy ", 5, 0);
- str_replace(buffer, &len, &changes, " salizada ", 10, " s da ", 6, 0);
- str_replace(buffer, &len, &changes, " trunkway ", 10, " tkwy ", 6, 0);
- str_replace(buffer, &len, &changes, " cinturon ", 10, " cint ", 6, 0);
- str_replace(buffer, &len, &changes, "president ", 10, " pres ", 6, 0);
- str_replace(buffer, &len, &changes, " military ", 10, " mil ", 5, 0);
- str_replace(buffer, &len, &changes, " jonkheer ", 10, " jhr ", 5, 0);
- str_replace(buffer, &len, &changes, " motorway ", 10, " mwy ", 5, 0);
- str_replace(buffer, &len, &changes, " steenweg ", 10, " stwg ", 6, 0);
- str_replace(buffer, &len, &changes, " crescent ", 10, " cr ", 4, 0);
- str_replace(buffer, &len, &changes, " kanunnik ", 10, " kan ", 5, 0);
- str_replace(buffer, &len, &changes, " koningin ", 10, " kon ", 5, 0);
- str_replace(buffer, &len, &changes, " crossing ", 10, " xing ", 6, 0);
- str_replace(buffer, &len, &changes, " callejon ", 10, " cjon ", 6, 0);
- str_replace(buffer, &len, &changes, " pasadizo ", 10, " pzo ", 5, 0);
- str_replace(buffer, &len, &changes, " crossway ", 10, " cowy ", 6, 0);
- str_replace(buffer, &len, &changes, " cottages ", 10, " cotts ", 7, 0);
- str_replace(buffer, &len, &changes, " mountain ", 10, " mtn ", 5, 0);
- str_replace(buffer, &len, &changes, " business ", 10, " bus ", 5, 0);
- str_replace(buffer, &len, &changes, " pierwszy ", 10, " 1 ", 3, 0);
- str_replace(buffer, &len, &changes, " pierwsza ", 10, " 1 ", 3, 0);
- str_replace(buffer, &len, &changes, " pierwsze ", 10, " 1 ", 3, 0);
- str_replace(buffer, &len, &changes, " barriada ", 10, " barda ", 7, 0);
- str_replace(buffer, &len, &changes, " entrance ", 10, " ent ", 5, 0);
- str_replace(buffer, &len, &changes, " causeway ", 10, " cway ", 6, 0);
- str_replace(buffer, &len, &changes, " generaal ", 10, " gen ", 5, 0);
- str_replace(buffer, &len, &changes, " driveway ", 10, " dvwy ", 6, 0);
- str_replace(buffer, &len, &changes, " township ", 10, " twp ", 5, 0);
- str_replace(buffer, &len, &changes, " stazione ", 10, " staz ", 6, 0);
- str_replace(buffer, &len, &changes, " broadway ", 10, " bway ", 6, 0);
- str_replace(buffer, &len, &changes, " alleyway ", 10, " alwy ", 6, 0);
- str_replace(buffer, &len, &changes, " quadrant ", 10, " qdrt ", 6, 0);
- str_replace(buffer, &len, &changes, " apeadero ", 10, " apdro ", 7, 0);
- str_replace(buffer, &len, &changes, " arboleda ", 10, " arb ", 5, 0);
- str_replace(buffer, &len, &changes, " escalera ", 10, " esca ", 6, 0);
- str_replace(buffer, &len, &changes, " rdhp hat ", 10, " rh ", 4, 0);
- str_replace(buffer, &len, &changes, " transito ", 10, " trans ", 7, 0);
- str_replace(buffer, &len, &changes, " ddhi hoc ", 10, " dh ", 4, 0);
- str_replace(buffer, &len, &changes, " travesia ", 10, " trva ", 6, 0);
- str_replace(buffer, &len, &changes, " barranco ", 10, " branc ", 7, 0);
- str_replace(buffer, &len, &changes, " namestie ", 10, " nam ", 5, 0);
- str_replace(buffer, &len, &changes, " viaducto ", 10, " vcto ", 6, 0);
- str_replace(buffer, &len, &changes, " convento ", 10, " cnvto ", 7, 0);
- str_replace(buffer, &len, &changes, " estacion ", 10, " estcn ", 7, 0);
- str_replace(buffer, &len, &changes, "puistikko ", 10, " pko ", 5, 0);
- str_replace(buffer, &len, &changes, " precinct ", 10, " pct ", 5, 0);
- str_replace(buffer, &len, &changes, " heiligen ", 10, " hl ", 4, 0);
- str_replace(buffer, &len, &changes, " edificio ", 10, " edifc ", 7, 0);
- str_replace(buffer, &len, &changes, " prazuela ", 10, " przla ", 7, 0);
- str_replace(buffer, &len, &changes, " thi trzn ", 10, " tt ", 4, 0);
- str_replace(buffer, &len, &changes, " ridgeway ", 10, " rgwy ", 6, 0);
- str_replace(buffer, &len, &changes, " riverway ", 10, " rvwy ", 6, 0);
- str_replace(buffer, &len, &changes, " corredor ", 10, " crrdo ", 7, 0);
- str_replace(buffer, &len, &changes, " passatge ", 10, " ptge ", 6, 0);
- str_replace(buffer, &len, &changes, " junction ", 10, " jnc ", 5, 0);
- str_replace(buffer, &len, &changes, " hospital ", 10, " hosp ", 6, 0);
- str_replace(buffer, &len, &changes, " highroad ", 10, " hrd ", 5, 0);
- str_replace(buffer, &len, &changes, " torrente ", 10, " trrnt ", 7, 0);
- str_replace(buffer, &len, &changes, " avinguda ", 10, " av ", 4, 0);
- str_replace(buffer, &len, &changes, " portillo ", 10, " ptilo ", 7, 0);
- str_replace(buffer, &len, &changes, " diagonal ", 10, " diag ", 6, 0);
- str_replace(buffer, &len, &changes, " buu dien ", 10, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " alqueria ", 10, " alque ", 7, 0);
- str_replace(buffer, &len, &changes, " poligono ", 10, " polig ", 7, 0);
- str_replace(buffer, &len, &changes, " roadside ", 10, " rdsd ", 6, 0);
- str_replace(buffer, &len, &changes, " glorieta ", 10, " gta ", 5, 0);
- str_replace(buffer, &len, &changes, " fundacul ", 10, " fdc ", 5, 0);
- str_replace(buffer, &len, &changes, " cao dang ", 10, " cd ", 4, 0);
- str_replace(buffer, &len, &changes, " rosebowl ", 10, " rsbl ", 6, 0);
- str_replace(buffer, &len, &changes, " complejo ", 10, " compj ", 7, 0);
- str_replace(buffer, &len, &changes, " carretil ", 10, " crtil ", 7, 0);
- str_replace(buffer, &len, &changes, " intrarea ", 10, " int ", 5, 0);
- str_replace(buffer, &len, &changes, " gran via ", 10, " g v ", 5, 0);
- str_replace(buffer, &len, &changes, " approach ", 10, " app ", 5, 0);
- str_replace(buffer, &len, &changes, " stradela ", 10, " sdla ", 6, 0);
- str_replace(buffer, &len, &changes, " conjunto ", 10, " cjto ", 6, 0);
- str_replace(buffer, &len, &changes, " arterial ", 10, " artl ", 6, 0);
- str_replace(buffer, &len, &changes, " plazuela ", 10, " plzla ", 7, 0);
- str_replace(buffer, &len, &changes, " frontage ", 10, " frtg ", 6, 0);
- str_replace(buffer, &len, &changes, " faubourg ", 10, " fg ", 4, 0);
- str_replace(buffer, &len, &changes, " mansions ", 10, " mans ", 6, 0);
- str_replace(buffer, &len, &changes, " turnpike ", 10, " tpk ", 5, 0);
- str_replace(buffer, &len, &changes, " piazzale ", 10, " p le ", 6, 0);
- str_replace(buffer, &len, &changes, " tieu hoc ", 10, " th ", 4, 0);
- str_replace(buffer, &len, &changes, " bulevard ", 10, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " sendera ", 9, " sedra ", 7, 0);
- str_replace(buffer, &len, &changes, " cutting ", 9, " cutt ", 6, 0);
- str_replace(buffer, &len, &changes, " cantina ", 9, " canti ", 7, 0);
- str_replace(buffer, &len, &changes, " cantera ", 9, " cantr ", 7, 0);
- str_replace(buffer, &len, &changes, " rotonda ", 9, " rtda ", 6, 0);
- str_replace(buffer, &len, &changes, " pasillo ", 9, " psllo ", 7, 0);
- str_replace(buffer, &len, &changes, " landing ", 9, " ldg ", 5, 0);
- str_replace(buffer, &len, &changes, " kolonel ", 9, " kol ", 5, 0);
- str_replace(buffer, &len, &changes, " cong ty ", 9, " cty ", 5, 0);
- str_replace(buffer, &len, &changes, " fairway ", 9, " fawy ", 6, 0);
- str_replace(buffer, &len, &changes, " highway ", 9, " hwy ", 5, 0);
- str_replace(buffer, &len, &changes, " lookout ", 9, " lkt ", 5, 0);
- str_replace(buffer, &len, &changes, " meander ", 9, " mr ", 4, 0);
- str_replace(buffer, &len, &changes, " carrera ", 9, " cra ", 5, 0);
- str_replace(buffer, &len, &changes, " station ", 9, " stn ", 5, 0);
- str_replace(buffer, &len, &changes, " kapitan ", 9, " kap ", 5, 0);
- str_replace(buffer, &len, &changes, " medical ", 9, " med ", 5, 0);
- str_replace(buffer, &len, &changes, " broeder ", 9, " br ", 4, 0);
- str_replace(buffer, &len, &changes, " poblado ", 9, " pbdo ", 6, 0);
- str_replace(buffer, &len, &changes, " impasse ", 9, " imp ", 5, 0);
- str_replace(buffer, &len, &changes, " gardens ", 9, " gdn ", 5, 0);
- str_replace(buffer, &len, &changes, " nha tho ", 9, " nt ", 4, 0);
- str_replace(buffer, &len, &changes, " nha hat ", 9, " nh ", 4, 0);
- str_replace(buffer, &len, &changes, " freeway ", 9, " fwy ", 5, 0);
- str_replace(buffer, &len, &changes, " trasera ", 9, " tras ", 6, 0);
- str_replace(buffer, &len, &changes, " portico ", 9, " prtco ", 7, 0);
- str_replace(buffer, &len, &changes, " terrace ", 9, " ter ", 5, 0);
- str_replace(buffer, &len, &changes, " heights ", 9, " hts ", 5, 0);
- str_replace(buffer, &len, &changes, " camping ", 9, " campg ", 7, 0);
- str_replace(buffer, &len, &changes, " callizo ", 9, " cllzo ", 7, 0);
- str_replace(buffer, &len, &changes, " footway ", 9, " ftwy ", 6, 0);
- str_replace(buffer, &len, &changes, " calzada ", 9, " czada ", 7, 0);
- str_replace(buffer, &len, &changes, " dominee ", 9, " ds ", 4, 0);
- str_replace(buffer, &len, &changes, " meadows ", 9, " mdws ", 6, 0);
- str_replace(buffer, &len, &changes, " sendero ", 9, " send ", 6, 0);
- str_replace(buffer, &len, &changes, " osiedle ", 9, " os ", 4, 0);
- str_replace(buffer, &len, &changes, " estrada ", 9, " estda ", 7, 0);
- str_replace(buffer, &len, &changes, " avenida ", 9, " av ", 4, 0);
- str_replace(buffer, &len, &changes, " zgornji ", 9, " zg ", 4, 0);
- str_replace(buffer, &len, &changes, " zgornje ", 9, " zg ", 4, 0);
- str_replace(buffer, &len, &changes, " zgornja ", 9, " zg ", 4, 0);
- str_replace(buffer, &len, &changes, " arrabal ", 9, " arral ", 7, 0);
- str_replace(buffer, &len, &changes, " espalda ", 9, " eslda ", 7, 0);
- str_replace(buffer, &len, &changes, " entrada ", 9, " entd ", 6, 0);
- str_replace(buffer, &len, &changes, " kleiner ", 9, " kl ", 4, 0);
- str_replace(buffer, &len, &changes, " kleines ", 9, " kl ", 4, 0);
- str_replace(buffer, &len, &changes, " viaduct ", 9, " via ", 5, 0);
- str_replace(buffer, &len, &changes, " roadway ", 9, " rdwy ", 6, 0);
- str_replace(buffer, &len, &changes, " strasse ", 9, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " spodnje ", 9, " sp ", 4, 0);
- str_replace(buffer, &len, &changes, " spodnji ", 9, " sp ", 4, 0);
- str_replace(buffer, &len, &changes, " spodnja ", 9, " sp ", 4, 0);
- str_replace(buffer, &len, &changes, " fabrica ", 9, " fca ", 5, 0);
- str_replace(buffer, &len, &changes, " muntele ", 9, " mt ", 4, 0);
- str_replace(buffer, &len, &changes, " maantee ", 9, " mt ", 4, 0);
- str_replace(buffer, &len, &changes, " srednje ", 9, " sr ", 4, 0);
- str_replace(buffer, &len, &changes, " unterer ", 9, " u ", 3, 0);
- str_replace(buffer, &len, &changes, " unteres ", 9, " u ", 3, 0);
- str_replace(buffer, &len, &changes, " plateau ", 9, " plat ", 6, 0);
- str_replace(buffer, &len, &changes, " srednji ", 9, " sr ", 4, 0);
- str_replace(buffer, &len, &changes, " empresa ", 9, " empr ", 6, 0);
- str_replace(buffer, &len, &changes, " angosta ", 9, " angta ", 7, 0);
- str_replace(buffer, &len, &changes, " costera ", 9, " coste ", 7, 0);
- str_replace(buffer, &len, &changes, " tinh lo ", 9, " tl ", 4, 0);
- str_replace(buffer, &len, &changes, " quoc lo ", 9, " ql ", 4, 0);
- str_replace(buffer, &len, &changes, " auf der ", 9, " a d ", 5, 0);
- str_replace(buffer, &len, &changes, " bulvari ", 9, " bl ", 4, 0);
- str_replace(buffer, &len, &changes, " ddhi lo ", 9, " dl ", 4, 0);
- str_replace(buffer, &len, &changes, " namesti ", 9, " nam ", 5, 0);
- str_replace(buffer, &len, &changes, " passeig ", 9, " pg ", 4, 0);
- str_replace(buffer, &len, &changes, " carrero ", 9, " cro ", 5, 0);
- str_replace(buffer, &len, &changes, " cortijo ", 9, " crtjo ", 7, 0);
- str_replace(buffer, &len, &changes, " san bay ", 9, " sb ", 4, 0);
- str_replace(buffer, &len, &changes, " riviera ", 9, " rvra ", 6, 0);
- str_replace(buffer, &len, &changes, " caddesi ", 9, " cd ", 4, 0);
- str_replace(buffer, &len, &changes, " andador ", 9, " andad ", 7, 0);
- str_replace(buffer, &len, &changes, " walkway ", 9, " wkwy ", 6, 0);
- str_replace(buffer, &len, &changes, " granden ", 9, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " grosser ", 9, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " grosses ", 9, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " reserve ", 9, " res ", 5, 0);
- str_replace(buffer, &len, &changes, " alameda ", 9, " alam ", 6, 0);
- str_replace(buffer, &len, &changes, " retreat ", 9, " rtt ", 5, 0);
- str_replace(buffer, &len, &changes, " acequia ", 9, " aceq ", 6, 0);
- str_replace(buffer, &len, &changes, " platsen ", 9, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " bahnhof ", 9, " bf ", 4, 0);
- str_replace(buffer, &len, &changes, " autovia ", 9, " autov ", 7, 0);
- str_replace(buffer, &len, &changes, " srednja ", 9, " sr ", 4, 0);
- str_replace(buffer, &len, &changes, " galeria ", 9, " gale ", 6, 0);
- str_replace(buffer, &len, &changes, " circuit ", 9, " cct ", 5, 0);
- str_replace(buffer, &len, &changes, " svingen ", 9, " sv ", 4, 0);
- str_replace(buffer, &len, &changes, " plassen ", 9, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " mirador ", 9, " mrdor ", 7, 0);
- str_replace(buffer, &len, &changes, " laneway ", 9, " lnwy ", 6, 0);
- str_replace(buffer, &len, &changes, " kolonia ", 9, " kol ", 5, 0);
- str_replace(buffer, &len, &changes, " outlook ", 9, " otlk ", 6, 0);
- str_replace(buffer, &len, &changes, " caravan ", 9, " cvn ", 5, 0);
- str_replace(buffer, &len, &changes, " osiedlu ", 9, " os ", 4, 0);
- str_replace(buffer, &len, &changes, " palacio ", 9, " palac ", 7, 0);
- str_replace(buffer, &len, &changes, " pantano ", 9, " pant ", 6, 0);
- str_replace(buffer, &len, &changes, " partida ", 9, " ptda ", 6, 0);
- str_replace(buffer, &len, &changes, " calleja ", 9, " cllja ", 7, 0);
- str_replace(buffer, &len, &changes, " mevrouw ", 9, " mevr ", 6, 0);
- str_replace(buffer, &len, &changes, " meester ", 9, " mr ", 4, 0);
- str_replace(buffer, &len, &changes, " pastoor ", 9, " past ", 6, 0);
- str_replace(buffer, &len, &changes, " prinses ", 9, " pr ", 4, 0);
- str_replace(buffer, &len, &changes, " bulevar ", 9, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " tollway ", 9, " tlwy ", 6, 0);
- str_replace(buffer, &len, &changes, "steenweg ", 9, " stwg ", 6, 0);
- str_replace(buffer, &len, &changes, " caserio ", 9, " csrio ", 7, 0);
- str_replace(buffer, &len, &changes, " mercado ", 9, " merc ", 6, 0);
- str_replace(buffer, &len, &changes, " alejach ", 9, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " kvartal ", 9, " kv ", 4, 0);
- str_replace(buffer, &len, &changes, " parkway ", 9, " pwy ", 5, 0);
- str_replace(buffer, &len, &changes, " passage ", 9, " ps ", 4, 0);
- str_replace(buffer, &len, &changes, " pathway ", 9, " pway ", 6, 0);
- str_replace(buffer, &len, &changes, " splaiul ", 9, " sp ", 4, 0);
- str_replace(buffer, &len, &changes, " soseaua ", 9, " sos ", 5, 0);
- str_replace(buffer, &len, &changes, " colonia ", 9, " col ", 5, 0);
- str_replace(buffer, &len, &changes, " wielkie ", 9, " wlk ", 5, 0);
- str_replace(buffer, &len, &changes, " trzecie ", 9, " 3 ", 3, 0);
- str_replace(buffer, &len, &changes, " llanura ", 9, " llnra ", 7, 0);
- str_replace(buffer, &len, &changes, " malecon ", 9, " malec ", 7, 0);
- str_replace(buffer, &len, &changes, " trzecia ", 9, " 3 ", 3, 0);
- str_replace(buffer, &len, &changes, " trailer ", 9, " trlr ", 6, 0);
- str_replace(buffer, &len, &changes, " cuadra ", 8, " cuadr ", 7, 0);
- str_replace(buffer, &len, &changes, " cty cp ", 8, " ctcp ", 6, 0);
- str_replace(buffer, &len, &changes, " paraje ", 8, " praje ", 7, 0);
- str_replace(buffer, &len, &changes, " parque ", 8, " pque ", 6, 0);
- str_replace(buffer, &len, &changes, " piazza ", 8, " p za ", 6, 0);
- str_replace(buffer, &len, &changes, " puerta ", 8, " pta ", 5, 0);
- str_replace(buffer, &len, &changes, " little ", 8, " lt ", 4, 0);
- str_replace(buffer, &len, &changes, " pueblo ", 8, " pblo ", 6, 0);
- str_replace(buffer, &len, &changes, " puente ", 8, " pnte ", 6, 0);
- str_replace(buffer, &len, &changes, " jardin ", 8, " jdin ", 6, 0);
- str_replace(buffer, &len, &changes, " granja ", 8, " granj ", 7, 0);
- str_replace(buffer, &len, &changes, " market ", 8, " mkt ", 5, 0);
- str_replace(buffer, &len, &changes, " pasaje ", 8, " psaje ", 7, 0);
- str_replace(buffer, &len, &changes, " rotary ", 8, " rty ", 5, 0);
- str_replace(buffer, &len, &changes, " corral ", 8, " crral ", 7, 0);
- str_replace(buffer, &len, &changes, " siding ", 8, " sdng ", 6, 0);
- str_replace(buffer, &len, &changes, " nucleo ", 8, " ncleo ", 7, 0);
- str_replace(buffer, &len, &changes, " muelle ", 8, " muell ", 7, 0);
- str_replace(buffer, &len, &changes, " carril ", 8, " crril ", 7, 0);
- str_replace(buffer, &len, &changes, " portal ", 8, " prtal ", 7, 0);
- str_replace(buffer, &len, &changes, " ramble ", 8, " rmbl ", 6, 0);
- str_replace(buffer, &len, &changes, " pocket ", 8, " pkt ", 5, 0);
- str_replace(buffer, &len, &changes, " chalet ", 8, " chlet ", 7, 0);
- str_replace(buffer, &len, &changes, " canton ", 8, " cant ", 6, 0);
- str_replace(buffer, &len, &changes, " ladera ", 8, " ldera ", 7, 0);
- str_replace(buffer, &len, &changes, " parade ", 8, " pde ", 5, 0);
- str_replace(buffer, &len, &changes, " dehesa ", 8, " dhsa ", 6, 0);
- str_replace(buffer, &len, &changes, " museum ", 8, " mus ", 5, 0);
- str_replace(buffer, &len, &changes, " middle ", 8, " mid ", 5, 0);
- str_replace(buffer, &len, &changes, " cuesta ", 8, " custa ", 7, 0);
- str_replace(buffer, &len, &changes, " gracht ", 8, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " virful ", 8, " vf ", 4, 0);
- str_replace(buffer, &len, &changes, " m tele ", 8, " mt ", 4, 0);
- str_replace(buffer, &len, &changes, " varful ", 8, " vf ", 4, 0);
- str_replace(buffer, &len, &changes, " str la ", 8, " sdla ", 6, 0);
- str_replace(buffer, &len, &changes, " arcade ", 8, " arc ", 5, 0);
- str_replace(buffer, &len, &changes, " strada ", 8, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " access ", 8, " accs ", 6, 0);
- str_replace(buffer, &len, &changes, " bajada ", 8, " bjada ", 7, 0);
- str_replace(buffer, &len, &changes, " veliki ", 8, " v ", 3, 0);
- str_replace(buffer, &len, &changes, "strasse ", 8, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " velike ", 8, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " untere ", 8, " u ", 3, 0);
- str_replace(buffer, &len, &changes, " velika ", 8, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " artery ", 8, " arty ", 6, 0);
- str_replace(buffer, &len, &changes, " avenue ", 8, " av ", 4, 0);
- str_replace(buffer, &len, &changes, " miasto ", 8, " m ", 3, 0);
- str_replace(buffer, &len, &changes, " bypass ", 8, " byp ", 5, 0);
- str_replace(buffer, &len, &changes, " placem ", 8, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " barrio ", 8, " bo ", 4, 0);
- str_replace(buffer, &len, &changes, " center ", 8, " ctr ", 5, 0);
- str_replace(buffer, &len, &changes, " bldngs ", 8, " bldgs ", 7, 0);
- str_replace(buffer, &len, &changes, " puerto ", 8, " pto ", 5, 0);
- str_replace(buffer, &len, &changes, " wielka ", 8, " wlk ", 5, 0);
- str_replace(buffer, &len, &changes, " tunnel ", 8, " tun ", 5, 0);
- str_replace(buffer, &len, &changes, " wielki ", 8, " wlk ", 5, 0);
- str_replace(buffer, &len, &changes, " bridge ", 8, " bri ", 5, 0);
- str_replace(buffer, &len, &changes, " trzeci ", 8, " 3 ", 3, 0);
- str_replace(buffer, &len, &changes, " veliko ", 8, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " quelle ", 8, " qu ", 4, 0);
- str_replace(buffer, &len, &changes, " acceso ", 8, " acces ", 7, 0);
- str_replace(buffer, &len, &changes, " bulvar ", 8, " bl ", 4, 0);
- str_replace(buffer, &len, &changes, " sokagi ", 8, " sk ", 4, 0);
- str_replace(buffer, &len, &changes, "platsen ", 8, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " stigen ", 8, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " brucke ", 8, " br ", 4, 0);
- str_replace(buffer, &len, &changes, " an der ", 8, " a d ", 5, 0);
- str_replace(buffer, &len, &changes, " thi xa ", 8, " tx ", 4, 0);
- str_replace(buffer, &len, &changes, " nordre ", 8, " ndr ", 5, 0);
- str_replace(buffer, &len, &changes, " rambla ", 8, " rbla ", 6, 0);
- str_replace(buffer, &len, &changes, " sondre ", 8, " sdr ", 5, 0);
- str_replace(buffer, &len, &changes, "quoc lo ", 8, " ql ", 4, 0);
- str_replace(buffer, &len, &changes, " phuong ", 8, " p ", 3, 0);
- str_replace(buffer, &len, &changes, " vastra ", 8, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " carrer ", 8, " c ", 3, 0);
- str_replace(buffer, &len, &changes, " oberes ", 8, " o ", 3, 0);
- str_replace(buffer, &len, &changes, " raitti ", 8, " r ", 3, 0);
- str_replace(buffer, &len, &changes, " puisto ", 8, " ps ", 4, 0);
- str_replace(buffer, &len, &changes, " arroyo ", 8, " arry ", 6, 0);
- str_replace(buffer, &len, &changes, " penger ", 8, " pgr ", 5, 0);
- str_replace(buffer, &len, &changes, " oberer ", 8, " o ", 3, 0);
- str_replace(buffer, &len, &changes, " kleine ", 8, " kl ", 4, 0);
- str_replace(buffer, &len, &changes, " grosse ", 8, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, "granden ", 8, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " villas ", 8, " vlls ", 6, 0);
- str_replace(buffer, &len, &changes, " taival ", 8, " tvl ", 5, 0);
- str_replace(buffer, &len, &changes, " in der ", 8, " i d ", 5, 0);
- str_replace(buffer, &len, &changes, " centre ", 8, " ctr ", 5, 0);
- str_replace(buffer, &len, &changes, " drugie ", 8, " 2 ", 3, 0);
- str_replace(buffer, &len, &changes, " dokter ", 8, " dr ", 4, 0);
- str_replace(buffer, &len, &changes, " grange ", 8, " gra ", 5, 0);
- str_replace(buffer, &len, &changes, " doctor ", 8, " dr ", 4, 0);
- str_replace(buffer, &len, &changes, " vicolo ", 8, " v lo ", 6, 0);
- str_replace(buffer, &len, &changes, " kort e ", 8, " k ", 3, 0);
- str_replace(buffer, &len, &changes, " koning ", 8, " kon ", 5, 0);
- str_replace(buffer, &len, &changes, " straat ", 8, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " svieti ", 8, " sv ", 4, 0);
- str_replace(buffer, &len, &changes, " callej ", 8, " cjon ", 6, 0);
- str_replace(buffer, &len, &changes, " ground ", 8, " grnd ", 6, 0);
- str_replace(buffer, &len, &changes, " vereda ", 8, " vreda ", 7, 0);
- str_replace(buffer, &len, &changes, " chemin ", 8, " ch ", 4, 0);
- str_replace(buffer, &len, &changes, " street ", 8, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " strand ", 8, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " sainte ", 8, " ste ", 5, 0);
- str_replace(buffer, &len, &changes, " camino ", 8, " cno ", 5, 0);
- str_replace(buffer, &len, &changes, " garden ", 8, " gdn ", 5, 0);
- str_replace(buffer, &len, &changes, " follow ", 8, " folw ", 6, 0);
- str_replace(buffer, &len, &changes, " estate ", 8, " est ", 5, 0);
- str_replace(buffer, &len, &changes, " doktor ", 8, " d r ", 5, 0);
- str_replace(buffer, &len, &changes, " subway ", 8, " sbwy ", 6, 0);
- str_replace(buffer, &len, &changes, " ulitsa ", 8, " ul ", 4, 0);
- str_replace(buffer, &len, &changes, " square ", 8, " sq ", 4, 0);
- str_replace(buffer, &len, &changes, " towers ", 8, " twrs ", 6, 0);
- str_replace(buffer, &len, &changes, "plassen ", 8, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " county ", 8, " co ", 4, 0);
- str_replace(buffer, &len, &changes, " brazal ", 8, " brzal ", 7, 0);
- str_replace(buffer, &len, &changes, " circus ", 8, " crcs ", 6, 0);
- str_replace(buffer, &len, &changes, "svingen ", 8, " sv ", 4, 0);
- str_replace(buffer, &len, &changes, " rampla ", 8, " rampa ", 7, 0);
- str_replace(buffer, &len, &changes, " bloque ", 8, " blque ", 7, 0);
- str_replace(buffer, &len, &changes, " circle ", 8, " cir ", 5, 0);
- str_replace(buffer, &len, &changes, " island ", 8, " is ", 4, 0);
- str_replace(buffer, &len, &changes, " common ", 8, " comm ", 6, 0);
- str_replace(buffer, &len, &changes, " ribera ", 8, " rbra ", 6, 0);
- str_replace(buffer, &len, &changes, " sector ", 8, " sect ", 6, 0);
- str_replace(buffer, &len, &changes, " rincon ", 8, " rcon ", 6, 0);
- str_replace(buffer, &len, &changes, " van de ", 8, " vd ", 4, 0);
- str_replace(buffer, &len, &changes, " corner ", 8, " cnr ", 5, 0);
- str_replace(buffer, &len, &changes, " subida ", 8, " sbida ", 7, 0);
- str_replace(buffer, &len, &changes, " banda ", 7, " b ", 3, 0);
- str_replace(buffer, &len, &changes, " bulev ", 7, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " barro ", 7, " bo ", 4, 0);
- str_replace(buffer, &len, &changes, " cllon ", 7, " cjon ", 6, 0);
- str_replace(buffer, &len, &changes, " p zza ", 7, " p za ", 6, 0);
- str_replace(buffer, &len, &changes, " drugi ", 7, " 2 ", 3, 0);
- str_replace(buffer, &len, &changes, " druga ", 7, " 2 ", 3, 0);
- str_replace(buffer, &len, &changes, " placu ", 7, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " aleji ", 7, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " aleja ", 7, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " aleje ", 7, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " stary ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " stara ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " dolny ", 7, " dln ", 5, 0);
- str_replace(buffer, &len, &changes, " dolna ", 7, " dln ", 5, 0);
- str_replace(buffer, &len, &changes, " gorne ", 7, " gn ", 4, 0);
- str_replace(buffer, &len, &changes, " gorna ", 7, " gn ", 4, 0);
- str_replace(buffer, &len, &changes, " stare ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " gorny ", 7, " gn ", 4, 0);
- str_replace(buffer, &len, &changes, " ulicy ", 7, " ul ", 4, 0);
- str_replace(buffer, &len, &changes, " ulica ", 7, " ul ", 4, 0);
- str_replace(buffer, &len, &changes, " o l v ", 7, " olv ", 5, 0);
- str_replace(buffer, &len, &changes, " plein ", 7, " pln ", 5, 0);
- str_replace(buffer, &len, &changes, " markt ", 7, " mkt ", 5, 0);
- str_replace(buffer, &len, &changes, " lange ", 7, " l ", 3, 0);
- str_replace(buffer, &len, &changes, " viale ", 7, " v le ", 6, 0);
- str_replace(buffer, &len, &changes, "gracht ", 7, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " prins ", 7, " pr ", 4, 0);
- str_replace(buffer, &len, &changes, "straat ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " plass ", 7, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " sving ", 7, " sv ", 4, 0);
- str_replace(buffer, &len, &changes, " gaten ", 7, " g ", 3, 0);
- str_replace(buffer, &len, &changes, " veien ", 7, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " vliet ", 7, " vlt ", 5, 0);
- str_replace(buffer, &len, &changes, " dolne ", 7, " dln ", 5, 0);
- str_replace(buffer, &len, &changes, " b dul ", 7, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " sodra ", 7, " s ", 3, 0);
- str_replace(buffer, &len, &changes, " norra ", 7, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " gamla ", 7, " gla ", 5, 0);
- str_replace(buffer, &len, &changes, " grand ", 7, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " vagen ", 7, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " gatan ", 7, " g ", 3, 0);
- str_replace(buffer, &len, &changes, " ostra ", 7, " o ", 3, 0);
- str_replace(buffer, &len, &changes, "vastra ", 7, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " cadde ", 7, " cd ", 4, 0);
- str_replace(buffer, &len, &changes, " duong ", 7, " d ", 3, 0);
- str_replace(buffer, &len, &changes, " sokak ", 7, " sk ", 4, 0);
- str_replace(buffer, &len, &changes, " plats ", 7, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, "stigen ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " vayla ", 7, " vla ", 5, 0);
- str_replace(buffer, &len, &changes, "taival ", 7, " tvl ", 5, 0);
- str_replace(buffer, &len, &changes, " sveti ", 7, " sv ", 4, 0);
- str_replace(buffer, &len, &changes, " aukio ", 7, " auk ", 5, 0);
- str_replace(buffer, &len, &changes, " sveta ", 7, " sv ", 4, 0);
- str_replace(buffer, &len, &changes, " cesta ", 7, " c ", 3, 0);
- str_replace(buffer, &len, &changes, " piata ", 7, " pta ", 5, 0);
- str_replace(buffer, &len, &changes, " aleea ", 7, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " kaari ", 7, " kri ", 5, 0);
- str_replace(buffer, &len, &changes, "penger ", 7, " pgr ", 5, 0);
- str_replace(buffer, &len, &changes, " ranta ", 7, " rt ", 4, 0);
- str_replace(buffer, &len, &changes, " rinne ", 7, " rn ", 4, 0);
- str_replace(buffer, &len, &changes, "raitti ", 7, " r ", 3, 0);
- str_replace(buffer, &len, &changes, "puisto ", 7, " ps ", 4, 0);
- str_replace(buffer, &len, &changes, " polku ", 7, " p ", 3, 0);
- str_replace(buffer, &len, &changes, " porta ", 7, " pta ", 5, 0);
- str_replace(buffer, &len, &changes, " ponte ", 7, " p te ", 6, 0);
- str_replace(buffer, &len, &changes, " paseo ", 7, " po ", 4, 0);
- str_replace(buffer, &len, &changes, " fbrca ", 7, " fca ", 5, 0);
- str_replace(buffer, &len, &changes, " allee ", 7, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " cours ", 7, " crs ", 5, 0);
- str_replace(buffer, &len, &changes, "sainte ", 7, " ste ", 5, 0);
- str_replace(buffer, &len, &changes, "square ", 7, " sq ", 4, 0);
- str_replace(buffer, &len, &changes, " largo ", 7, " l go ", 6, 0);
- str_replace(buffer, &len, &changes, " wharf ", 7, " whrf ", 6, 0);
- str_replace(buffer, &len, &changes, " corte ", 7, " c te ", 6, 0);
- str_replace(buffer, &len, &changes, " corso ", 7, " c so ", 6, 0);
- str_replace(buffer, &len, &changes, " campo ", 7, " c po ", 6, 0);
- str_replace(buffer, &len, &changes, " santa ", 7, " sta ", 5, 0);
- str_replace(buffer, &len, &changes, " calle ", 7, " c ", 3, 0);
- str_replace(buffer, &len, &changes, " strip ", 7, " strp ", 6, 0);
- str_replace(buffer, &len, &changes, " alley ", 7, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " north ", 7, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " block ", 7, " blk ", 5, 0);
- str_replace(buffer, &len, &changes, " gully ", 7, " gly ", 5, 0);
- str_replace(buffer, &len, &changes, " sielo ", 7, " s ", 3, 0);
- str_replace(buffer, &len, &changes, " brace ", 7, " br ", 4, 0);
- str_replace(buffer, &len, &changes, " ronde ", 7, " rnde ", 6, 0);
- str_replace(buffer, &len, &changes, " grove ", 7, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " break ", 7, " brk ", 5, 0);
- str_replace(buffer, &len, &changes, " roads ", 7, " rds ", 5, 0);
- str_replace(buffer, &len, &changes, " track ", 7, " trk ", 5, 0);
- str_replace(buffer, &len, &changes, " house ", 7, " ho ", 4, 0);
- str_replace(buffer, &len, &changes, " trail ", 7, " trl ", 5, 0);
- str_replace(buffer, &len, &changes, " mount ", 7, " mt ", 4, 0);
- str_replace(buffer, &len, &changes, " cross ", 7, " crss ", 6, 0);
- str_replace(buffer, &len, &changes, " beach ", 7, " bch ", 5, 0);
- str_replace(buffer, &len, &changes, " point ", 7, " pt ", 4, 0);
- str_replace(buffer, &len, &changes, " basin ", 7, " basn ", 6, 0);
- str_replace(buffer, &len, &changes, " green ", 7, " gn ", 4, 0);
- str_replace(buffer, &len, &changes, " plaza ", 7, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " lille ", 7, " ll ", 4, 0);
- str_replace(buffer, &len, &changes, " slope ", 7, " slpe ", 6, 0);
- str_replace(buffer, &len, &changes, " placa ", 7, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " place ", 7, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " shunt ", 7, " shun ", 6, 0);
- str_replace(buffer, &len, &changes, " saint ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " ulice ", 7, " ul ", 4, 0);
- str_replace(buffer, &len, &changes, " amble ", 7, " ambl ", 6, 0);
- str_replace(buffer, &len, &changes, " route ", 7, " rt ", 4, 0);
- str_replace(buffer, &len, &changes, " sound ", 7, " snd ", 5, 0);
- str_replace(buffer, &len, &changes, " store ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " front ", 7, " frnt ", 6, 0);
- str_replace(buffer, &len, &changes, " elbow ", 7, " elb ", 5, 0);
- str_replace(buffer, &len, &changes, " glade ", 7, " gl ", 4, 0);
- str_replace(buffer, &len, &changes, " south ", 7, " s ", 3, 0);
- str_replace(buffer, &len, &changes, " round ", 7, " rnd ", 5, 0);
- str_replace(buffer, &len, &changes, " drive ", 7, " dr ", 4, 0);
- str_replace(buffer, &len, &changes, " croft ", 7, " cft ", 5, 0);
- str_replace(buffer, &len, &changes, " platz ", 7, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " ferry ", 7, " fy ", 4, 0);
- str_replace(buffer, &len, &changes, " ridge ", 7, " rdge ", 6, 0);
- str_replace(buffer, &len, &changes, " tanav ", 7, " tn ", 4, 0);
- str_replace(buffer, &len, &changes, " banan ", 7, " ba ", 4, 0);
- str_replace(buffer, &len, &changes, " quays ", 7, " qys ", 5, 0);
- str_replace(buffer, &len, &changes, " sankt ", 7, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " vkhod ", 7, " vkh ", 5, 0);
- str_replace(buffer, &len, &changes, " chase ", 7, " ch ", 4, 0);
- str_replace(buffer, &len, &changes, " vista ", 7, " vsta ", 6, 0);
- str_replace(buffer, &len, &changes, " rhein ", 7, " rh ", 4, 0);
- str_replace(buffer, &len, &changes, " court ", 7, " ct ", 4, 0);
- str_replace(buffer, &len, &changes, "brucke ", 7, " br ", 4, 0);
- str_replace(buffer, &len, &changes, " upper ", 7, " up ", 4, 0);
- str_replace(buffer, &len, &changes, " river ", 7, " r ", 3, 0);
- str_replace(buffer, &len, &changes, " range ", 7, " rnge ", 6, 0);
- str_replace(buffer, &len, &changes, " lower ", 7, " lr ", 4, 0);
- str_replace(buffer, &len, &changes, " kalea ", 7, " k ", 3, 0);
- str_replace(buffer, &len, &changes, " crest ", 7, " crst ", 6, 0);
- str_replace(buffer, &len, &changes, " obere ", 7, " o ", 3, 0);
- str_replace(buffer, &len, &changes, " manor ", 7, " mnr ", 5, 0);
- str_replace(buffer, &len, &changes, " byway ", 7, " bywy ", 6, 0);
- str_replace(buffer, &len, &changes, " reach ", 7, " rch ", 5, 0);
- str_replace(buffer, &len, &changes, " copse ", 7, " cps ", 5, 0);
- str_replace(buffer, &len, &changes, "quelle ", 7, " qu ", 4, 0);
- str_replace(buffer, &len, &changes, " creek ", 7, " cr ", 4, 0);
- str_replace(buffer, &len, &changes, " close ", 7, " c ", 3, 0);
- str_replace(buffer, &len, &changes, " fort ", 6, " ft ", 4, 0);
- str_replace(buffer, &len, &changes, " apch ", 6, " app ", 5, 0);
- str_replace(buffer, &len, &changes, " mont ", 6, " mt ", 4, 0);
- str_replace(buffer, &len, &changes, " bdul ", 6, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, "saint ", 6, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " back ", 6, " bk ", 4, 0);
- str_replace(buffer, &len, &changes, " c le ", 6, " c ", 3, 0);
- str_replace(buffer, &len, &changes, "place ", 6, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " frwy ", 6, " fwy ", 5, 0);
- str_replace(buffer, &len, &changes, " quai ", 6, " qu ", 4, 0);
- str_replace(buffer, &len, &changes, " ally ", 6, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " m te ", 6, " mt ", 4, 0);
- str_replace(buffer, &len, &changes, " lane ", 6, " ln ", 4, 0);
- str_replace(buffer, &len, &changes, "aukio ", 6, " auk ", 5, 0);
- str_replace(buffer, &len, &changes, " loop ", 6, " lp ", 4, 0);
- str_replace(buffer, &len, &changes, " line ", 6, " ln ", 4, 0);
- str_replace(buffer, &len, &changes, " alue ", 6, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " link ", 6, " lk ", 4, 0);
- str_replace(buffer, &len, &changes, " glde ", 6, " gl ", 4, 0);
- str_replace(buffer, &len, &changes, " alea ", 6, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " gate ", 6, " g ", 3, 0);
- str_replace(buffer, &len, &changes, " intr ", 6, " int ", 5, 0);
- str_replace(buffer, &len, &changes, " gdns ", 6, " gdn ", 5, 0);
- str_replace(buffer, &len, &changes, " hird ", 6, " hrd ", 5, 0);
- str_replace(buffer, &len, &changes, " varf ", 6, " vf ", 4, 0);
- str_replace(buffer, &len, &changes, " virf ", 6, " vf ", 4, 0);
- str_replace(buffer, &len, &changes, " hgts ", 6, " hts ", 5, 0);
- str_replace(buffer, &len, &changes, " expy ", 6, " exp ", 5, 0);
- str_replace(buffer, &len, &changes, "markt ", 6, " mkt ", 5, 0);
- str_replace(buffer, &len, &changes, " bypa ", 6, " byp ", 5, 0);
- str_replace(buffer, &len, &changes, "o l v ", 6, " olv ", 5, 0);
- str_replace(buffer, &len, &changes, " cres ", 6, " cr ", 4, 0);
- str_replace(buffer, &len, &changes, " bdwy ", 6, " bway ", 6, 0);
- str_replace(buffer, &len, &changes, " csac ", 6, " cds ", 5, 0);
- str_replace(buffer, &len, &changes, " nowy ", 6, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " laan ", 6, " ln ", 4, 0);
- str_replace(buffer, &len, &changes, " crsg ", 6, " xing ", 6, 0);
- str_replace(buffer, &len, &changes, "vliet ", 6, " vlt ", 5, 0);
- str_replace(buffer, &len, &changes, " city ", 6, " cty ", 5, 0);
- str_replace(buffer, &len, &changes, "sving ", 6, " sv ", 4, 0);
- str_replace(buffer, &len, &changes, "plass ", 6, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, "gaten ", 6, " g ", 3, 0);
- str_replace(buffer, &len, &changes, "veien ", 6, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " gata ", 6, " g ", 3, 0);
- str_replace(buffer, &len, &changes, " sint ", 6, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " caus ", 6, " cway ", 6, 0);
- str_replace(buffer, &len, &changes, " cove ", 6, " cv ", 4, 0);
- str_replace(buffer, &len, &changes, "plein ", 6, " pln ", 5, 0);
- str_replace(buffer, &len, &changes, " cswy ", 6, " cway ", 6, 0);
- str_replace(buffer, &len, &changes, " plac ", 6, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " nowa ", 6, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " kolo ", 6, " k ", 3, 0);
- str_replace(buffer, &len, &changes, " katu ", 6, " k ", 3, 0);
- str_replace(buffer, &len, &changes, " duze ", 6, " dz ", 4, 0);
- str_replace(buffer, &len, &changes, " blvd ", 6, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " p ta ", 6, " pta ", 5, 0);
- str_replace(buffer, &len, &changes, " maly ", 6, " ml ", 4, 0);
- str_replace(buffer, &len, &changes, " mala ", 6, " ml ", 4, 0);
- str_replace(buffer, &len, &changes, " bdge ", 6, " bri ", 5, 0);
- str_replace(buffer, &len, &changes, " nowe ", 6, " n ", 3, 0);
- str_replace(buffer, &len, &changes, " brdg ", 6, " bri ", 5, 0);
- str_replace(buffer, &len, &changes, " male ", 6, " ml ", 4, 0);
- str_replace(buffer, &len, &changes, " drwy ", 6, " dvwy ", 6, 0);
- str_replace(buffer, &len, &changes, " duza ", 6, " dz ", 4, 0);
- str_replace(buffer, &len, &changes, " utca ", 6, " u ", 3, 0);
- str_replace(buffer, &len, &changes, " east ", 6, " e ", 3, 0);
- str_replace(buffer, &len, &changes, " duzy ", 6, " dz ", 4, 0);
- str_replace(buffer, &len, &changes, "kaari ", 6, " kri ", 5, 0);
- str_replace(buffer, &len, &changes, " quan ", 6, " q ", 3, 0);
- str_replace(buffer, &len, &changes, " svwy ", 6, " swy ", 5, 0);
- str_replace(buffer, &len, &changes, " shwy ", 6, " sh ", 4, 0);
- str_replace(buffer, &len, &changes, " road ", 6, " rd ", 4, 0);
- str_replace(buffer, &len, &changes, "sankt ", 6, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " quay ", 6, " qy ", 4, 0);
- str_replace(buffer, &len, &changes, "plats ", 6, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " rise ", 6, " ri ", 4, 0);
- str_replace(buffer, &len, &changes, " berg ", 6, " bg ", 4, 0);
- str_replace(buffer, &len, &changes, " tcty ", 6, " tct ", 5, 0);
- str_replace(buffer, &len, &changes, " viad ", 6, " via ", 5, 0);
- str_replace(buffer, &len, &changes, " view ", 6, " vw ", 4, 0);
- str_replace(buffer, &len, &changes, " vdct ", 6, " via ", 5, 0);
- str_replace(buffer, &len, &changes, " vale ", 6, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " avda ", 6, " av ", 4, 0);
- str_replace(buffer, &len, &changes, " grad ", 6, " ghr ", 5, 0);
- str_replace(buffer, &len, &changes, " walk ", 6, " wlk ", 5, 0);
- str_replace(buffer, &len, &changes, " west ", 6, " w ", 3, 0);
- str_replace(buffer, &len, &changes, " yard ", 6, " yd ", 4, 0);
- str_replace(buffer, &len, &changes, " blok ", 6, " bl ", 4, 0);
- str_replace(buffer, &len, &changes, " terr ", 6, " ter ", 5, 0);
- str_replace(buffer, &len, &changes, " cmno ", 6, " cno ", 5, 0);
- str_replace(buffer, &len, &changes, " stra ", 6, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " thfr ", 6, " thor ", 6, 0);
- str_replace(buffer, &len, &changes, " turn ", 6, " tn ", 4, 0);
- str_replace(buffer, &len, &changes, " tpke ", 6, " tpk ", 5, 0);
- str_replace(buffer, &len, &changes, " burg ", 6, " bg ", 4, 0);
- str_replace(buffer, &len, &changes, "vayla ", 6, " vla ", 5, 0);
- str_replace(buffer, &len, &changes, "vagen ", 6, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " tori ", 6, " tr ", 4, 0);
- str_replace(buffer, &len, &changes, "gatan ", 6, " g ", 3, 0);
- str_replace(buffer, &len, &changes, "grand ", 6, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " pass ", 6, " ps ", 4, 0);
- str_replace(buffer, &len, &changes, " pkwy ", 6, " pwy ", 5, 0);
- str_replace(buffer, &len, &changes, " park ", 6, " pk ", 4, 0);
- str_replace(buffer, &len, &changes, "rinne ", 6, " rn ", 4, 0);
- str_replace(buffer, &len, &changes, " mtwy ", 6, " mwy ", 5, 0);
- str_replace(buffer, &len, &changes, " mndr ", 6, " mr ", 4, 0);
- str_replace(buffer, &len, &changes, " kyla ", 6, " kl ", 4, 0);
- str_replace(buffer, &len, &changes, " kuja ", 6, " kj ", 4, 0);
- str_replace(buffer, &len, &changes, "platz ", 6, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, "ranta ", 6, " rt ", 4, 0);
- str_replace(buffer, &len, &changes, " mile ", 6, " mi ", 4, 0);
- str_replace(buffer, &len, &changes, " pfad ", 6, " p ", 3, 0);
- str_replace(buffer, &len, &changes, " mews ", 6, " m ", 3, 0);
- str_replace(buffer, &len, &changes, "polku ", 6, " p ", 3, 0);
- str_replace(buffer, &len, &changes, " psge ", 6, " ps ", 4, 0);
- str_replace(buffer, &len, &changes, " plza ", 6, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, "ostra ", 6, " o ", 3, 0);
- str_replace(buffer, &len, &changes, "gamla ", 6, " gla ", 5, 0);
- str_replace(buffer, &len, &changes, " stig ", 6, " st ", 4, 0);
- str_replace(buffer, &len, &changes, "norra ", 6, " n ", 3, 0);
- str_replace(buffer, &len, &changes, "sodra ", 6, " s ", 3, 0);
- str_replace(buffer, &len, &changes, " pike ", 6, " pk ", 4, 0);
- str_replace(buffer, &len, &changes, " dorf ", 6, " df ", 4, 0);
- str_replace(buffer, &len, &changes, " piaz ", 6, " p za ", 6, 0);
- str_replace(buffer, &len, &changes, " phwy ", 6, " pway ", 6, 0);
- str_replace(buffer, &len, &changes, "pfad ", 5, " p ", 3, 0);
- str_replace(buffer, &len, &changes, " mnt ", 5, " mt ", 4, 0);
- str_replace(buffer, &len, &changes, "gata ", 5, " g ", 3, 0);
- str_replace(buffer, &len, &changes, " bhf ", 5, " bf ", 4, 0);
- str_replace(buffer, &len, &changes, " bad ", 5, " b ", 3, 0);
- str_replace(buffer, &len, &changes, "gate ", 5, " g ", 3, 0);
- str_replace(buffer, &len, &changes, " zum ", 5, " z ", 3, 0);
- str_replace(buffer, &len, &changes, "stig ", 5, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " blv ", 5, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, "kuja ", 5, " kj ", 4, 0);
- str_replace(buffer, &len, &changes, " bul ", 5, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " str ", 5, " st ", 4, 0);
- str_replace(buffer, &len, &changes, "alue ", 5, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " cen ", 5, " ctr ", 5, 0);
- str_replace(buffer, &len, &changes, " ave ", 5, " av ", 4, 0);
- str_replace(buffer, &len, &changes, "kyla ", 5, " kl ", 4, 0);
- str_replace(buffer, &len, &changes, " ale ", 5, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " spl ", 5, " sp ", 4, 0);
- str_replace(buffer, &len, &changes, " all ", 5, " al ", 4, 0);
- str_replace(buffer, &len, &changes, " k s ", 5, " ks ", 4, 0);
- str_replace(buffer, &len, &changes, " aly ", 5, " al ", 4, 0);
- str_replace(buffer, &len, &changes, "dorf ", 5, " df ", 4, 0);
- str_replace(buffer, &len, &changes, " bvd ", 5, " bd ", 4, 0);
- str_replace(buffer, &len, &changes, " vag ", 5, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " iii ", 5, " 3 ", 3, 0);
- str_replace(buffer, &len, &changes, " tie ", 5, " t ", 3, 0);
- str_replace(buffer, &len, &changes, " sok ", 5, " sk ", 4, 0);
- str_replace(buffer, &len, &changes, "burg ", 5, " bg ", 4, 0);
- str_replace(buffer, &len, &changes, "katu ", 5, " k ", 3, 0);
- str_replace(buffer, &len, &changes, "berg ", 5, " bg ", 4, 0);
- str_replace(buffer, &len, &changes, "tori ", 5, " tr ", 4, 0);
- str_replace(buffer, &len, &changes, " kte ", 5, " k ", 3, 0);
- str_replace(buffer, &len, &changes, " gro ", 5, " gr ", 4, 0);
- str_replace(buffer, &len, &changes, " grn ", 5, " gn ", 4, 0);
- str_replace(buffer, &len, &changes, " gld ", 5, " gl ", 4, 0);
- str_replace(buffer, &len, &changes, " san ", 5, " s ", 3, 0);
- str_replace(buffer, &len, &changes, " hse ", 5, " ho ", 4, 0);
- str_replace(buffer, &len, &changes, " gte ", 5, " g ", 3, 0);
- str_replace(buffer, &len, &changes, " rte ", 5, " rt ", 4, 0);
- str_replace(buffer, &len, &changes, " rue ", 5, " r ", 3, 0);
- str_replace(buffer, &len, &changes, " che ", 5, " ch ", 4, 0);
- str_replace(buffer, &len, &changes, " pas ", 5, " ps ", 4, 0);
- str_replace(buffer, &len, &changes, " plz ", 5, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " pnt ", 5, " pt ", 4, 0);
- str_replace(buffer, &len, &changes, " pky ", 5, " pwy ", 5, 0);
- str_replace(buffer, &len, &changes, " pza ", 5, " pl ", 4, 0);
- str_replace(buffer, &len, &changes, " rvr ", 5, " r ", 3, 0);
- str_replace(buffer, &len, &changes, " riv ", 5, " r ", 3, 0);
- str_replace(buffer, &len, &changes, " lit ", 5, " lt ", 4, 0);
- str_replace(buffer, &len, &changes, " p k ", 5, " pk ", 4, 0);
- str_replace(buffer, &len, &changes, " lwr ", 5, " lr ", 4, 0);
- str_replace(buffer, &len, &changes, " low ", 5, " lr ", 4, 0);
- str_replace(buffer, &len, &changes, " sth ", 5, " s ", 3, 0);
- str_replace(buffer, &len, &changes, " crk ", 5, " cr ", 4, 0);
- str_replace(buffer, &len, &changes, "pres ", 5, " pres ", 6, 1);
- str_replace(buffer, &len, &changes, "laan ", 5, " ln ", 4, 0);
- str_replace(buffer, &len, &changes, " bda ", 5, " b ", 3, 0);
- str_replace(buffer, &len, &changes, " vei ", 5, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " via ", 5, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " way ", 5, " wy ", 4, 0);
- str_replace(buffer, &len, &changes, " upr ", 5, " up ", 4, 0);
- str_replace(buffer, &len, &changes, " avd ", 5, " av ", 4, 0);
- str_replace(buffer, &len, &changes, " crt ", 5, " ct ", 4, 0);
- str_replace(buffer, &len, &changes, "stwg ", 5, " stwg ", 6, 1);
- str_replace(buffer, &len, &changes, "sint ", 5, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " v d ", 5, " vd ", 4, 0);
- str_replace(buffer, &len, &changes, " van ", 5, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " drv ", 5, " dr ", 4, 0);
- str_replace(buffer, &len, &changes, " tce ", 5, " ter ", 5, 0);
- str_replace(buffer, &len, &changes, " va ", 4, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " oa ", 4, " o ", 3, 0);
- str_replace(buffer, &len, &changes, " sa ", 4, " s ", 3, 0);
- str_replace(buffer, &len, &changes, " na ", 4, " n ", 3, 0);
- str_replace(buffer, &len, &changes, "bgm ", 4, " bgm ", 5, 1);
- str_replace(buffer, &len, &changes, " nw ", 4, " n ", 3, 0);
- str_replace(buffer, &len, &changes, "vag ", 4, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " im ", 4, " 1 ", 3, 0);
- str_replace(buffer, &len, &changes, "vla ", 4, " vla ", 5, 1);
- str_replace(buffer, &len, &changes, "gla ", 4, " gla ", 5, 1);
- str_replace(buffer, &len, &changes, " am ", 4, " a ", 3, 0);
- str_replace(buffer, &len, &changes, " ph ", 4, " p ", 3, 0);
- str_replace(buffer, &len, &changes, "rue ", 4, " r ", 3, 0);
- str_replace(buffer, &len, &changes, " ga ", 4, " g ", 3, 0);
- str_replace(buffer, &len, &changes, "ste ", 4, " ste ", 5, 1);
- str_replace(buffer, &len, &changes, "str ", 4, " st ", 4, 0);
- str_replace(buffer, &len, &changes, " cl ", 4, " c ", 3, 0);
- str_replace(buffer, &len, &changes, " vn ", 4, " v ", 3, 0);
- str_replace(buffer, &len, &changes, " gt ", 4, " g ", 3, 0);
- str_replace(buffer, &len, &changes, "vei ", 4, " v ", 3, 0);
- str_replace(buffer, &len, &changes, "vlt ", 4, " vlt ", 5, 1);
- str_replace(buffer, &len, &changes, " ce ", 4, " cv ", 4, 0);
- str_replace(buffer, &len, &changes, " ii ", 4, " 2 ", 3, 0);
- str_replace(buffer, &len, &changes, "pln ", 4, " pln ", 5, 1);
- str_replace(buffer, &len, &changes, "olv ", 4, " olv ", 5, 1);
- str_replace(buffer, &len, &changes, "mkt ", 4, " mkt ", 5, 1);
- str_replace(buffer, &len, &changes, "tvl ", 4, " tvl ", 5, 1);
- str_replace(buffer, &len, &changes, " ob ", 4, " o ", 3, 0);
- str_replace(buffer, &len, &changes, "pgr ", 4, " pgr ", 5, 1);
- str_replace(buffer, &len, &changes, " in ", 4, " 1 ", 3, 0);
- str_replace(buffer, &len, &changes, " mw ", 4, " m ", 3, 0);
- str_replace(buffer, &len, &changes, "kri ", 4, " kri ", 5, 1);
- str_replace(buffer, &len, &changes, "pko ", 4, " pko ", 5, 1);
- str_replace(buffer, &len, &changes, "auk ", 4, " auk ", 5, 1);
- str_replace(buffer, &len, &changes, "tie ", 4, " t ", 3, 0);
- str_replace(buffer, &len, &changes, " i ", 3, " 1 ", 3, 0);
+++ /dev/null
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-#define UTFASCII " \x00""\x01"" \x01""0\x01""1\x01""2\x01""3\x01""4\x01""5\x01""6\x01""7\x01""8\x01""9\x01""a\x01""b\x01""c\x01""d\x01""e\x01""f\x01""g\x01""h\x01""i\x01""j\x01""k\x01""l\x01""m\x01""n\x01""o\x01""p\x01""q\x01""r\x01""s\x01""t\x01""u\x01""v\x01""w\x01""x\x01""y\x01""z\x02""ps\x02""ss\x03""deg\x01""-\x02""14\x02""12\x02""34\x02""ae\x02""th\x02""ij\x02""ng\x02""oe\x02""hv\x02""oi\x02""yr\x02""sh\x02""zh\x02""ts\x02""dz\x02""lj\x02""nj\x02""ou\x02""db\x02""qp\x04""stop\x02""lz\x02""tc\x02""fn\x02""ls\x02""ww\x0a""extra-high\x04""high\x03""mid\x04""tone\x09""extra-low\x03""yin\x04""yang\x04""down\x02""up\x04""left\x05""right\x04""ring\x06""middle\x05""tilde\x06""raised\x05""begin\x03""end\x05""shelf\x05""below\x04""heta\x05""sampi\x0a""pamphylian\x02""ks\x02""ph\x02""kh\x05""koppa\x02""st\x02""sp\x02""ch\x02""ti\x03""sho\x03""san\x02""ie\x02""io\x02""dj\x02""gj\x02""yi\x03""tsh\x02""kj\x03""dzh\x04""shch\x04""hard\x02""iu\x02""ia\x02""gh\x02""ot\x04""1000\x06""100000\x07""1000000\x03""tts\x02""el\x02""en\x02""em\x08""palochka\x03""ghe\x02""ha\x02""de\x03""dje\x03""zje\x04""dzje\x03""lje\x03""nje\x03""sje\x03""tje\x02""ze\x03""lha\x03""rha\x03""yae\x02""qa\x02""we\x05""aleut\x02""rh\x02""ew\x04""alef\x04""ayin\x02""oy\x05""sanah\x05""safha\x05""misra\x0b""sallallahou\x06""alayhe\x0b""rahmatullah\x04""radi\x09""takhallus\x05""hamza\x03""teh\x02""dh\x03""ain\x05""keheh\x05""farsi\x02""an\x02""un\x02""in\x05""small\x03""dot\x03""beh\x03""qaf\x02""tt\x03""tth\x02""bh\x03""hah\x02""ny\x02""dy\x03""cch\x02""dd\x02""dt\x03""ddh\x02""rr\x02""hy\x02""yu\x03""yeh\x03""sad\x03""dal\x03""reh\x06""sindhi\x03""heh\x05""alaph\x02""yh\x07""persian\x07""sogdian\x04""seen\x03""feh\x04""meem\x04""noon\x03""lam\x03""waw\x03""kaf\x05""alifu\x02""hh\x04""ainu\x02""aa\x02""ee\x02""oo\x02""ey\x02""oa\x03""naa\x0a""dagbasinna\x02""ba\x02""pa\x02""ta\x02""ja\x03""cha\x02""da\x02""ra\x03""rra\x02""sa\x03""gba\x02""fa\x02""ka\x02""la\x02""na\x02""ma\x03""nya\x02""wa\x02""ya\x04""jona\x0b""candrabindu\x08""anusvara\x07""visarga\x02""ii\x02""uu\x06""candra\x02""ai\x02""au\x03""kha\x02""ga\x03""gha\x03""nga\x02""ca\x03""jha\x03""tta\x04""ttha\x03""dda\x04""ddha\x03""nna\x03""tha\x03""dha\x04""nnna\x03""pha\x03""bha\x03""lla\x04""llla\x02""va\x03""sha\x03""ssa\x05""nukta\x08""avagraha\x06""virama\x06""udatta\x08""anudatta\x04""khha\x04""ghha\x02""za\x05""dddha\x03""yya\x02""ll\x03""gga\x03""jja\x04""ddda\x03""bba\x06""khanda\x02""on\x02""jh\x02""nn\x03""nnn\x03""lll\x03""aum\x05""udaat\x03""khh\x03""ghh\x04""dddh\x02""yy\x06""yakash\x02""rs\x02""bb\x03""geo\x03""tsa\x03""dza\x05""tuumu\x0b""jihvamuliya\x0b""upadhmaniya\x06""chillu\x08""iruyanna\x06""eyanna\x0a""alpapraana\x07""yayanna\x07""rayanna\x07""dantaja\x09""muurdhaja\x0a""aela-pilla\x05""ketti\x04""diga\x0c""gaetta-pilla\x07""kombuva\x05""kombu\x0b""gayanukitta\x02""ko\x03""kho\x02""yo\x03""tho\x02""so\x07""phinthu\x0b""lakkhangyao\x03""mai\x08""nikhahit\x08""yamakkan\x07""fongman\x0a""angkhankhu\x06""khomut\x03""aae\x02""no\x03""nng\x03""jny\x03""nyj\x04""nndd\x02""nd\x02""mb\x02""lo\x02""om\x04""half\x02""am\x02""ue\x03""uue\x03""yar\x02""ao\x04""tsha\x04""dzha\x03""zha\x02""-a\x04""kssa\x0a""fixed-form\x03""kka\x04""rjes\x04""rnam\x03""sna\x03""lci\x04""mchu\x03""gru\x02""ei\x02""ay\x04""rdel\x02""hn\x02""hm\x04""nnya\x04""shan\x03""mon\x04""tall\x04""asat\x06""medial\x05""great\x03""ssh\x03""kss\x07""western\x07""eastern\x05""rumai\x03""rae\x04""char\x04""jhan\x03""hae\x02""he\x03""hie\x03""har\x03""hoe\x03""ban\x03""gan\x03""don\x03""vin\x03""zen\x03""tan\x03""kan\x03""las\x03""man\x03""nar\x03""par\x04""zhar\x03""tar\x04""phar\x04""khar\x04""ghan\x03""qar\x04""shin\x04""chin\x03""can\x03""jil\x03""cil\x03""xan\x02""fi\x02""yn\x05""elifi\x08""georgian\x03""nny\x06""filler\x02""eo\x03""yeo\x02""ye\x03""wae\x03""weo\x02""wi\x02""eu\x03""a-o\x03""a-u\x04""ya-o\x05""ya-yo\x04""eo-o\x04""eo-u\x05""eo-eu\x05""yeo-o\x05""yeo-u\x04""o-eo\x03""o-e\x04""o-ye\x03""o-o\x03""o-u\x05""yo-ya\x06""yo-yae\x06""yo-yeo\x04""yo-o\x04""yo-i\x03""u-a\x04""u-ae\x07""u-eo-eu\x04""u-ye\x03""u-u\x04""yu-a\x05""yu-eo\x04""yu-e\x06""yu-yeo\x05""yu-ye\x04""yu-u\x04""yu-i\x04""eu-u\x05""eu-eu\x04""yi-u\x03""i-a\x04""i-ya\x03""i-o\x03""i-u\x04""i-eu\x07""i-araea\x05""araea\x08""araea-eo\x02""xh\x0c""nieun-tikeut\x0a""nieun-sios\x0d""nieun-pansios\x0d""nieun-thieuth\x0d""tikeut-kiyeok\x0c""tikeut-rieul\x11""rieul-kiyeok-sios\x0b""rieul-nieun\x0c""rieul-tikeut\x12""rieul-tikeut-hieuh\x0b""hieuh-mieum\x0b""hieuh-pieup\x0b""yeorinhieuh\x02""gg\x03""laa\x02""jj\x02""nb\x02""dg\x02""rn\x02""mn\x02""bg\x02""bn\x03""sza\x02""bs\x03""bsg\x03""bst\x03""bsb\x03""bss\x03""bsj\x02""bj\x02""bc\x02""bt\x02""bp\x03""bbn\x02""sg\x02""sn\x02""sd\x02""sr\x02""sm\x02""sb\x03""sbg\x03""sss\x02""sj\x02""sc\x02""sk\x04""shee\x03""she\x04""shwa\x03""qoa\x03""qha\x03""qhu\x02""ck\x04""qhee\x03""qhe\x02""pb\x02""pn\x04""qhwi\x05""qhwaa\x05""qhwee\x04""qhwe\x04""u-eo\x03""u-i\x02""gs\x02""nh\x02""lg\x02""lm\x02""lb\x02""lt\x02""lp\x02""lh\x02""gl\x03""gsg\x02""ns\x02""nz\x02""nt\x02""tl\x03""lgs\x02""ln\x02""ld\x03""lth\x03""lmg\x03""lms\x03""lbs\x03""lbh\x03""rnp\x03""lss\x02""lk\x02""lq\x02""mg\x02""ml\x02""ms\x03""mss\x02""mz\x02""mc\x02""mh\x02""bl\x02""sl\x02""hl\x02""hb\x03""ddi\x04""ddaa\x04""ddee\x03""dde\x03""ddo\x04""ddwa\x02""hu\x02""hi\x03""haa\x03""hee\x02""ho\x03""jwa\x02""lu\x02""li\x03""lee\x02""le\x03""lwa\x03""hha\x03""hhu\x03""hhi\x04""hhaa\x04""hhee\x03""hhe\x03""hho\x04""hhwa\x02""mu\x02""mi\x03""maa\x03""mee\x02""me\x02""mo\x03""mwa\x03""szu\x03""szi\x04""szaa\x04""szee\x03""sze\x03""szo\x04""szwa\x02""ru\x02""ri\x03""raa\x03""ree\x02""re\x02""ro\x03""rwa\x02""su\x02""si\x03""saa\x03""see\x02""se\x03""swa\x03""shu\x03""shi\x04""shaa\x02""qu\x02""qi\x03""qaa\x03""qee\x02""qe\x02""qo\x04""tzoa\x03""qwa\x02""fu\x03""qwi\x04""qwaa\x04""qwee\x03""qwe\x02""fo\x03""fwa\x03""qhi\x04""qhaa\x03""qho\x03""pwa\x04""qhwa\x03""mya\x02""bu\x02""bi\x03""baa\x03""bee\x02""be\x02""bo\x03""bwa\x02""vu\x02""vi\x03""vaa\x03""vee\x02""ve\x02""vo\x03""vwa\x02""tu\x03""taa\x03""tee\x02""te\x02""to\x03""twa\x02""cu\x02""ci\x03""caa\x03""cee\x02""ce\x02""co\x03""cwa\x02""xa\x02""xu\x02""xi\x03""xaa\x03""xee\x02""xe\x02""xo\x03""bwe\x03""xwa\x03""fwi\x03""xwi\x04""xwaa\x04""xwee\x03""xwe\x04""pwee\x03""pwe\x02""nu\x02""ni\x03""nee\x02""ne\x03""nwa\x03""nyu\x03""nyi\x04""nyaa\x04""nyee\x03""nye\x03""nyo\x04""nywa\x02""ku\x02""ki\x03""kaa\x03""kee\x02""ke\x03""kwa\x03""kwi\x04""kwaa\x04""kwee\x03""kwe\x03""kxa\x03""kxu\x03""kxi\x04""kxaa\x04""kxee\x03""kxe\x03""kxo\x03""hna\x04""kxwa\x04""kxwi\x05""kxwaa\x05""kxwee\x04""kxwe\x03""qua\x03""que\x02""wu\x03""waa\x03""wee\x02""wo\x02""sv\x02""di\x02""zu\x02""zi\x03""zaa\x03""zee\x02""zo\x03""zwa\x03""zhu\x03""zhi\x04""zhaa\x04""zhee\x03""zhe\x03""zho\x04""zhwa\x03""yaa\x03""yee\x02""du\x03""daa\x03""dee\x02""do\x03""dwa\x03""ddu\x02""ju\x02""ji\x03""jaa\x03""jee\x02""je\x02""jo\x02""gu\x02""gi\x03""gaa\x03""gee\x02""ge\x02""go\x03""gwa\x03""gwi\x04""gwaa\x04""gwee\x03""gwe\x03""ggu\x03""ggi\x04""ggaa\x04""ggee\x03""gge\x03""ggo\x03""thu\x03""thi\x04""thaa\x04""thee\x03""the\x04""thwa\x03""chu\x03""chi\x04""chaa\x04""chee\x03""che\x03""cho\x04""chwa\x03""phu\x03""phi\x04""phaa\x04""phee\x03""phe\x03""pho\x04""phwa\x03""tsu\x03""tsi\x04""tsaa\x04""tsee\x03""tse\x03""tso\x04""tswa\x03""tza\x03""tzu\x03""tzi\x04""tzaa\x04""tzee\x03""tze\x03""tzo\x03""faa\x03""fee\x02""fe\x02""pu\x02""pi\x03""paa\x03""pee\x02""pe\x02""po\x03""rya\x03""fya\x02""gv\x02""lv\x03""nah\x02""nv\x03""qui\x03""quo\x03""quu\x03""quv\x02""dv\x03""dla\x03""tla\x03""tle\x03""tli\x03""tlo\x03""tlu\x03""tlv\x03""tsv\x02""wv\x02""yv\x03""aai\x03""wii\x03""woo\x04""paai\x03""pii\x03""poo\x03""pwi\x04""pwii\x03""pwo\x04""pwoo\x04""pwaa\x04""taai\x03""tii\x03""too\x03""twe\x03""twi\x04""twii\x03""two\x04""twoo\x04""twaa\x03""tte\x03""tti\x03""tto\x04""kaai\x03""kii\x03""koo\x04""kwii\x03""kwo\x04""kwoo\x02""kw\x03""keh\x03""kih\x03""koh\x03""kah\x04""caai\x03""cii\x03""coo\x03""cwe\x03""cwi\x04""cwii\x03""cwo\x04""cwoo\x04""cwaa\x04""maai\x03""mii\x03""moo\x03""mwe\x03""mwi\x04""mwii\x03""mwo\x04""mwoo\x04""mwaa\x04""naai\x03""nii\x03""noo\x03""nwe\x04""nwaa\x04""laai\x03""lii\x03""loo\x03""lwe\x03""lwi\x04""lwii\x03""lwo\x04""lwoo\x04""lwaa\x04""saai\x03""sii\x03""soo\x03""swe\x03""swi\x04""swii\x03""swo\x04""swoo\x04""swaa\x02""sw\x03""skw\x04""spwa\x04""stwa\x04""skwa\x04""scwa\x04""shii\x04""shoo\x04""shwe\x04""shwi\x05""shwii\x04""shwo\x05""shwoo\x05""shwaa\x04""yaai\x03""yii\x03""yoo\x03""ywe\x03""ywi\x04""ywii\x03""ywo\x04""ywoo\x03""ywa\x04""ywaa\x04""raai\x03""rii\x03""roo\x04""rwaa\x04""faai\x03""fii\x03""foo\x04""fwaa\x04""thii\x04""thoo\x05""thwaa\x04""tthe\x04""tthi\x04""ttho\x03""tye\x03""tyi\x03""tyo\x03""tya\x03""hii\x03""hoo\x02""hk\x04""qaai\x03""qii\x03""qoo\x04""tlhe\x04""tlhi\x04""tlho\x04""tlha\x05""ngaai\x03""ngi\x04""ngii\x03""ngo\x04""ngoo\x04""ngaa\x03""lhi\x04""lhii\x03""lho\x04""lhoo\x04""lhaa\x03""ghu\x03""gho\x04""ghee\x03""ghi\x03""hwu\x03""hwo\x03""hwe\x04""hwee\x03""hwi\x03""hwa\x03""ttu\x04""ttee\x03""khu\x03""khe\x04""khee\x03""khi\x03""kku\x03""kko\x03""kke\x04""kkee\x03""kki\x02""kk\x03""jju\x03""jjo\x03""jje\x04""jjee\x03""jji\x03""dlu\x03""dlo\x03""dle\x04""dlee\x03""dli\x03""lhu\x03""lhe\x04""lhee\x04""tlhu\x05""tlhee\x04""tlee\x03""dzu\x03""dzo\x03""dze\x04""dzee\x03""dzi\x04""ttsu\x04""ttso\x04""ttse\x05""ttsee\x04""ttsi\x04""ttsa\x03""qai\x04""ngai\x04""nngi\x05""nngii\x04""nngo\x05""nngoo\x04""nnga\x05""nngaa\x03""sso\x02""ac\x03""ear\x03""ior\x08""boundary\x03""ang\x03""zra\x04""todo\x04""sibe\x06""manchu\x02""uk\x03""uuv\x02""ry\x03""ryy\x02""ly\x03""lyy\x02""ua\x02""kr\x03""yan\x09""mukphreng\x09""kemphreng\x04""sa-i\x02""eh\x03""aue\x05""tone-\x03""kva\x03""xva\x05""vowel\x03""aay\x02""uy\x03""oay\x03""uey\x02""iy\x05""final\x03""lae\x04""laev\x04""ngka\x03""mpa\x03""nra\x04""nyca\x03""ulu\x05""cecek\x06""surang\x05""bisah\x05""akara\x05""ikara\x05""ukara\x05""ekara\x06""aikara\x05""okara\x07""rerekan\x06""tedung\x04""suku\x06""taling\x05""pepet\x04""khot\x04""tzir\x02""ef\x03""zal\x06""asyura\x08""panyecek\x09""panglayar\x09""pangwisad\x09""pamingkal\x08""panyakra\x07""panyiku\x08""panghulu\x07""panyuku\x0a""panaelaeng\x08""panolong\x07""pamepet\x0a""paneuleung\x07""pamaaeh\x03""sya\x03""kla\x03""gla\x03""pla\x03""fla\x03""bla\x03""mla\x03""hla\x07""nyin-do\x04""kang\x03""ran\x02""at\x02""ag\x02""al\x03""aak\x03""aaj\x03""aam\x03""aaw\x02""is\x02""ih\x03""iny\x02""ir\x02""uc\x02""ud\x03""unn\x02""ep\x03""edd\x03""err\x03""ott\x02""ob\x02""ov\x02""oh\x07""capital\x04""open\x08""sideways\x03""top\x06""bottom\x06""voiced\x06""turned\x05""alpha\x05""schwa\x03""eng\x04""beta\x05""greek\x05""delta\x05""gamma\x03""rho\x08""cyrillic\x07""insular\x04""iota\x07""upsilon\x03""esh\x03""ezh\x03""eth\x08""reversed\x07""dotless\x06""script\x06""barred\x05""theta\x09""flattened\x02""av\x02""zr\x02""jy\x02""cy\x0c""middle-welsh\x07""epsilon\x03""eta\x07""omicron\x05""omega\x03"" ha\x03"" ga\x03"" zi\x04"" pai\x05"" yong\x05"" bing\x03""tie\x02""et\x03"" xi\x06"" zheng\x06"" chong\x05"" ping\x05"" shan\x06""shapes\x05"" xian\x04"" qia\x05"" jiao\x04"" jue\x04"" hui\x03"" li\x03"" mo\x04"" jin\x05"" zhuo\x04"" shu\x03"" ji\x03"" lu\x03"" le\x04"" you\x04"" sui\x04"" lan\x05"" peng\x03"" bi\x04"" nen\x04"" xia\x04"" zao\x03"" ti\x04"" jie\x04"" nao\x04"" shi\x04"" hua\x05"" lian\x05"" jian\x05"" beng\x06"" jiang\x05"" xing\x04"" bie\x04"" zai\x05"" chou\x04"" sou\x05"" niao\x04"" die\x06"" huang\x04"" dun\x03"" yi\x04"" tuo\x05"" jing\x04"" dai\x04"" cha\x04"" fen\x02""pp\x04"" wan\x04"" sao\x04"" xiu\x04"" gao\x04"" xue\x05"" weng\x03""ecu\x02""cl\x02""cr\x02""ff\x03""mil\x03""pts\x02""dr\x03"" fu\x04"" kou\x04"" chu\x04"" zhe\x03""iii\x02""iv\x03""vii\x04""viii\x02""ix\x03""xii\x05"" tian\x04"" suo\x04"" she\x06"" zhuan\x05"" tang\x06"" zhuai\x04"" yao\x03"" tu\x03"" mi\x05"" zhen\x04"" xie\x04"" lei\x04"" gai\x05"" juan\x05""above\x04"" qiu\x05"" ding\x04"" que\x03""and\x03"" ao\x04"" mei\x03"" ge\x04""with\x03"" qu\x04"" hou\x03""azu\x04""buky\x04""vede\x07""glagoli\x05""dobro\x05""yestu\x07""zhivete\x05""dzelo\x06""zemlja\x04""izhe\x07""initial\x06""djervi\x04""kako\x07""ljudije\x07""myslite\x05""nashi\x03""onu\x06""pokoji\x05""ritsi\x05""slovo\x06""tvrido\x03""uku\x05""fritu\x04""heru\x03""otu\x04""shta\x06""chrivi\x04""yeru\x04""yeri\x04""yati\x07""spidery\x03""yus\x07""iotated\x03""big\x04""fita\x07""izhitsa\x07""shtapic\x0a""trokutasti\x08""latinate\x08""tailless\x04""alfa\x04""vida\x05""dalda\x03""eie\x03""sou\x04""zata\x04""hate\x06""thethe\x05""iauda\x04""kapa\x05""laula\x03""ksi\x04""sima\x03""tau\x03""psi\x03""oou\x09""dialect-p\x03""old\x0d""cryptogrammic\x07""crossed\x08""akhmimic\x08""l-shaped\x03""yab\x04""yabh\x03""yag\x05""yaghh\x06""berber\x03""yaj\x03""yad\x04""yadh\x04""yadd\x05""yaddh\x03""yey\x03""yaf\x03""yak\x06""tuareg\x05""yakhh\x03""yah\x04""yahh\x04""yakh\x03""yaq\x04""yazh\x07""ahaggar\x03""yal\x03""yam\x03""yap\x04""yarr\x04""yagh\x04""ayer\x03""yas\x04""yass\x04""yash\x03""yat\x04""yath\x04""yach\x04""yatt\x03""yav\x03""yaw\x03""yay\x03""yaz\x0a""tawellemet\x04""yazz\x0d""labialization\x03""loa\x03""moa\x03""roa\x03""soa\x04""shoa\x03""boa\x03""toa\x03""coa\x03""noa\x04""nyoa\x03""zoa\x03""doa\x04""ddoa\x03""joa\x04""thoa\x04""choa\x04""phoa\x03""poa\x04""ggwa\x04""ggwi\x05""ggwee\x04""ggwe\x03""ssu\x03""ssi\x04""ssaa\x04""ssee\x03""sse\x03""cca\x03""ccu\x03""cci\x04""ccaa\x04""ccee\x03""cce\x03""cco\x03""zza\x03""zzu\x03""zzi\x04""zzaa\x04""zzee\x03""zze\x03""zzo\x04""ccha\x04""cchu\x04""cchi\x05""cchaa\x05""cchee\x04""cche\x04""ccho\x03""qya\x03""qyu\x03""qyi\x04""qyaa\x04""qyee\x03""qye\x03""qyo\x03""kya\x03""kyu\x03""kyi\x04""kyaa\x04""kyee\x03""kye\x03""kyo\x03""xya\x03""xyu\x03""xyi\x04""xyaa\x04""xyee\x03""xye\x03""xyo\x03""gya\x03""gyu\x03""gyi\x04""gyaa\x04""gyee\x03""gye\x03""gyo\x02""er\x02""es\x05""shcha\x05""es-te\x05""djerv\x09""monograph\x08""iotified\x06""little\x04""full\x08""surround\x08""overlaid\x02""gn\x06""kiyeok\x0b""ssangkiyeok\x0b""kiyeok-sios\x05""nieun\x0b""nieun-cieuc\x0b""nieun-hieuh\x06""tikeut\x0b""ssangtikeut\x05""rieul\x0c""rieul-kiyeok\x0b""rieul-mieum\x0b""rieul-pieup\x0a""rieul-sios\x0d""rieul-thieuth\x0d""rieul-phieuph\x0b""rieul-hieuh\x05""mieum\x05""pieup\x0a""ssangpieup\x0a""pieup-sios\x04""sios\x09""ssangsios\x05""ieung\x05""cieuc\x0a""ssangcieuc\x07""chieuch\x07""khieukh\x07""thieuth\x07""phieuph\x05""hieuh\x0a""ssangnieun\x10""rieul-pieup-sios\x0d""rieul-pansios\x11""rieul-yeorinhieuh\x0b""mieum-pieup\x0a""mieum-sios\x0d""mieum-pansios\x0d""kapyeounmieum\x0c""pieup-kiyeok\x0c""pieup-tikeut\x11""pieup-sios-kiyeok\x11""pieup-sios-tikeut\x0b""pieup-cieuc\x0d""pieup-thieuth\x0d""kapyeounpieup\x12""kapyeounssangpieup\x0b""sios-kiyeok\x0a""sios-nieun\x0b""sios-tikeut\x0a""sios-pieup\x0a""sios-cieuc\x07""pansios\x0a""ssangieung\x08""yesieung\x0d""yesieung-sios\x10""yesieung-pansios\x0f""kapyeounphieuph\x0a""ssanghieuh\x06""araeae\x03""enn\x03""onn\x03""ann\x03""inn\x02""im\x03""ngg\x04""ainn\x04""aunn\x03""ong\x04""innn\x05""ojeon\x06""chamko\x05""jueui\x04"" kua\x03"" wu\x04"" yin\x03"" si\x03"" ye\x04"" nuo\x03"" xu\x06"" xiong\x04"" liu\x04"" lin\x06"" xiang\x04"" xin\x04"" pan\x03"" ma\x05"" qian\x06"" zhong\x02"" n\x06"" cheng\x05"" fang\x04"" zuo\x05"" zhou\x05"" dong\x03"" su\x06"" jiong\x05"" wang\x04"" zhu\x05"" long\x05"" ying\x05"" miao\x03"" yu\x04"" luo\x05"" chai\x04"" hun\x04"" rao\x04"" han\x04"" tai\x03"" ai\x04"" jun\x02"" l\x05"" xiao\x05"" tiao\x04"" zha\x03"" ku\x03"" er\x05"" nang\x03"" qi\x04"" chi\x03"" mu\x03"" se\x06"" qiong\x03"" sa\x03"" pu\x03"" ta\x03"" ou\x05"" mian\x04"" wen\x05"" diao\x04"" mie\x05"" quan\x04"" cai\x06"" liang\x03"" gu\x04"" mao\x04"" gua\x04"" man\x05"" chui\x05"" huan\x05"" gong\x04"" nan\x05"" dian\x04"" yan\x03"" ci\x05"" lang\x03"" he\x04"" tou\x05"" pian\x02"" e\x04"" qie\x04"" rui\x05"" chan\x04"" dan\x04"" duo\x04"" fei\x05"" bang\x03"" ba\x05"" kuai\x05"" shen\x03"" pi\x05"" yang\x04"" bei\x04"" che\x05"" suan\x05"" heng\x04"" gui\x04"" lou\x04"" sun\x04"" zou\x04"" zhi\x04"" jia\x03"" hu\x03"" la\x03"" ke\x04"" wei\x05"" zhao\x04"" kui\x04"" fan\x06"" zhang\x05"" song\x04"" nei\x05"" chen\x04"" guo\x03"" ng\x03"" fa\x04"" hao\x04"" pou\x05"" hong\x04"" tun\x03"" bo\x04"" nie\x04"" wai\x05"" shou\x05"" ling\x04"" lun\x05"" chun\x04"" rou\x03"" ze\x06"" sheng\x04"" bai\x04"" gou\x03"" na\x03"" cu\x04"" kuo\x04"" lao\x04"" huo\x04"" sai\x05"" rong\x03"" ju\x04"" pao\x04"" can\x05"" nian\x05"" xuan\x04"" qin\x03"" bu\x05"" zang\x05"" mang\x04"" dui\x04"" bao\x06"" chang\x04"" gun\x05"" liao\x03"" da\x05"" meng\x05"" qiao\x05"" rang\x04"" yun\x04"" tao\x04"" lai\x04"" ban\x05"" chuo\x03"" nu\x04"" ran\x04"" sha\x04"" dou\x03"" po\x05"" tong\x06"" qiang\x04"" xun\x05"" pang\x04"" cao\x03"" an\x04"" mai\x04"" yue\x05"" huai\x04"" zan\x04"" hai\x05"" luan\x05"" ning\x03"" ya\x05"" ming\x04"" zui\x04"" cui\x03"" de\x05"" bian\x04"" nou\x04"" tui\x05"" zhan\x04"" cen\x04"" min\x03"" zu\x03"" ni\x04"" cuo\x04"" pei\x05"" gang\x05"" yuan\x05"" biao\x04"" dao\x04"" jiu\x04"" run\x03"" wo\x05"" cuan\x04"" ren\x04"" kai\x04"" men\x07"" chuang\x05"" feng\x05"" zhai\x03"" di\x04"" ben\x05"" zong\x05"" ceng\x05"" hang\x04"" nin\x05"" kong\x04"" lie\x06"" kuang\x04"" san\x03"" te\x05"" shun\x03"" ce\x04"" ang\x03"" ru\x07"" shuang\x05"" guai\x03"" wa\x05"" shai\x05"" tuan\x05"" piao\x04"" kun\x04"" qun\x06"" chuai\x05"" shao\x05"" duan\x04"" gen\x06"" guang\x04"" cou\x05"" nuan\x05"" reng\x04"" mou\x04"" nai\x05"" guan\x04"" hen\x06"" chuan\x05"" kuan\x05"" qing\x04"" pin\x05"" kang\x03"" du\x05"" neng\x04"" tan\x05"" cang\x05"" chao\x05"" nong\x04"" kan\x04"" ken\x05"" ting\x04"" gan\x04"" niu\x05"" ruan\x05"" cong\x05"" zeng\x05"" shui\x05"" geng\x05"" shuo\x05"" zuan\x05"" zhui\x03"" en\x05"" leng\x04"" cun\x03"" ne\x04"" bin\x04"" ruo\x04"" kao\x05"" dang\x05"" teng\x03"" ri\x05"" deng\x03"" za\x06"" niang\x03"" ca\x05"" sang\x05"" keng\x06"" shuai\x04"" pie\x04"" tie\x06"" shuan\x05"" chua\x04"" zen\x06"" shang\x03"" pa\x04"" fou\x04"" diu\x03"" fo\x03"" ka\x04"" lia\x04"" zun\x05"" seng\x05"" zhun\x06"" zhuen\x05"" shua\x02"" a\x04"" pen\x02"" m\x04"" gem\x03"" yo\x03"" re\x04"" dia\x04""inch\x06""gallon\x04""giga\x06""guinea\x08""kilogram\x08""kilowatt\x07""gramton\x06""koruna\x08""shilling\x05""dozen\x04""desi\x06""dollar\x07""percent\x08""building\x05""farad\x05""franc\x07""hectare\x04""peso\x07""pfennig\x05""point\x03""hon\x05""micro\x04""mile\x04""mark\x06""micron\x05""rupee\x05""ruble\x03""rem\x08""roentgen\x05""meiji\x02""gb\x03""cal\x02""pf\x09""microgram\x02""hz\x03""khz\x02""mm\x03""ms2\x03""kpa\x03""gpa\x05""rads2\x02""mv\x02""nw\x02""cc\x02""cd\x02""gy\x03""mol\x07"" zhuang\x04"" zei\x02"" t\x05"" zhua\x04"" sen\x04"" hei\x04"" hal\x06"" ppwun\x04"" nay\x04"" yai\x06"" sasou\x04"" kes\x05"" saai\x05"" haai\x03"" so\x07"" akutsu\x05"" gake\x05"" gomi\x04"" ama\x04"" sho\x04"" ten\x04"" gei\x03"" ki\x04"" lue\x04"" miu\x05"" moku\x06"" tochi\x06"" kasei\x07"" kunugi\x06"" hazou\x08"" katsura\x05"" tamo\x0a"" shitamizu\x07"" shibui\x05"" tani\x05"" suei\x05"" diou\x08"" oozutsu\x0d"" tsumekanmuri\x04"" swu\x0c"" deshiguramu\x0b"" miriguramu\x0b"" hekutogura\x07"" tatamu\x04"" nue\x07"" utsubo\x02"" o\x04"" sik\x07"" sasara\x05"" yana\x03""bup\x05"" hata\x03""pap\x04""purx\x05"" kuji\x08"" shinshi\x04""nbap\x05"" kume\x04""nbyx\x09"" nukamiso\x03"" ro\x04""hmyx\x05""hmyrx\x07"" sukumo\x06"" kouji\x05"" kinu\x05"" wata\x04"" sok\x05"" kase\x06"" yingl\x07"" kasuri\x05"" nawa\x07"" odoshi\x05"" horo\x04"" sem\x05"" jung\x03"" un\x04""zzyr\x08"" kaakeru\x04""ssyt\x04""zhux\x09"" yashinau\x03""jyt\x03""qie\x04""njup\x04""nyuo\x08"" shikato\x03""xie\x0a"" tsuraneru\x03""een\x04""ween\x04""bhee\x04""mbee\x04""kpee\x05""mgbee\x04""gbee\x04""dhee\x05""dhhee\x04""ndee\x04""njee\x05""nggee\x03""hin\x03""win\x03""bhi\x03""mbi\x03""kpi\x04""mgbi\x03""gbi\x03""dhi\x04""dhhi\x03""ndi\x03""nji\x04""nggi\x04""ngan\x03""han\x03""wan\x03""mba\x04""kpan\x04""mgba\x04""dhha\x03""nda\x03""nja\x04""ngga\x03""oon\x04""woon\x04""bhoo\x03""boo\x04""mboo\x04""kpoo\x05""mgboo\x04""gboo\x03""voo\x04""dhoo\x05""dhhoo\x03""doo\x04""ndoo\x03""zoo\x04""zhoo\x03""joo\x04""njoo\x05""nggoo\x03""goo\x04""nyoo\x03""hun\x03""wun\x03""bhu\x03""mbu\x03""kpu\x04""mgbu\x03""gbu\x03""dhu\x04""dhhu\x03""ndu\x03""nju\x04""nggu\x04""ngon\x03""won\x03""bho\x03""mbo\x03""kpo\x04""mgbo\x03""gbo\x04""gbon\x03""dho\x04""dhho\x03""ndo\x03""njo\x04""nggo\x04""ngen\x03""hen\x03""wen\x03""bhe\x03""mbe\x03""kpe\x04""kpen\x04""mgbe\x03""gbe\x04""gben\x03""dhe\x04""dhhe\x03""nde\x04""ngge\x05""nggen\x03""gen\x0a""lengthener\x05""ndole\x06""zemlya\x05""broad\x07""neutral\x06""closed\x07""blended\x04""soft\x09""monocular\x09""binocular\x06""double\x0b""multiocular\x03""dwe\x04""dzwe\x04""zhwe\x04""dzze\x04""tswe\x04""tsse\x04""tche\x07""chinese\x06""dotted\x09""left-stem\x05""lower\x08""inverted\x06""stress\x0d""egyptological\x04""heng\x02""tz\x08""tresillo\x09""cuatrillo\x06""broken\x03""rum\x02""vy\x0a""visigothic\x05""thorn\x04""vend\x03""con\x02""us\x03""dum\x03""lum\x03""mum\x03""num\x03""tum\x02""um\x0a""circumflex\x05""colon\x06""equals\x08""saltillo\x08""dvisvara\x07""hasanta\x03""jho\x04""ddho\x03""rro\x09""alternate\x09""voiceless\x09""aspirated\x05""haaru\x03""hta\x04""shya\x04""nyja\x02""ea\x04""ngue\x04""chha\x04""nhue\x03""nha\x04""nhja\x03""nue\x03""ppa\x03""mue\x0b"" obiyaakasu\x04"" noy\x05"" tara\x07"" yadoru\x07"" hesaki\x04""gyon\x05"" sori\x07"" yofune\x05"" susa\x06"" usagi\x04"" nuc\x0b"" kutabireru\x05"" yaji\x07"" sonoko\x04"" hie\x04""nyan\x05"" hagi\x04"" ebi\x09"" kamakiri\x03""dab\x0a"" kamishimo\x05"" yuki\x04"" ena\x06"" hitoe\x08"" chihaya\x07"" tasuki\x08"" yasashi\x03""ren\x03""roe\x07"" segare\x06"" nerau\x07"" utsuke\x03""rim\x09"" shitsuke\x07"" yagate\x07"" suberu\x04"" sip\x03"" ip\x07"" totemo\x04"" kep\x05"" sako\x07"" appare\x06"" otoko\x0b"" sakenomoto\x09"" ishiyumi\x07"" habaki\x06"" irori\x06"" ngaak\x08"" kasugai\x06"" pyeng\x04""byun\x07"" kazari\x05"" yari\x05"" yuru\x07"" phwung\x04""song\x05"" tomo\x07"" kohaze\x03"" on\x07"" oroshi\x05"" shuu\x04"" eri\x07"" namazu\x05"" todo\x07"" kajika\x03""yon\x05"" bora\x05"" mate\x05"" gori\x05"" ugui\x06"" asari\x0a"" subashiri\x09"" kazunoko\x07"" shachi\x06"" dojou\x08"" sukesou\x08"" muroaji\x07"" haraka\x02"" z\x09"" hatahata\x04"" eso\x05"" kyou\x07"" shiira\x06"" mutsu\x04"" nio\x05"" yiao\x06"" shigi\x08"" chidori\x05"" toki\x08"" ikaruga\x07"" kakesu\x06"" isuka\x0c"" kikuitadaki\x08"" tsugumi\x04""jjog\x04""jjon\x04""jjol\x04""jjom\x04""jjob\x04""jjos\x05""jjong\x04""jjoc\x04""jjwa\x05""jjwag\x05""jjwal\x06""jjwass\x05""jjwae\x02""it\x02""ip\x03""iet\x03""iex\x03""iep\x02""ax\x02""ap\x03""uox\x02""uo\x03""uop\x02""ox\x02""op\x02""ex\x03""bit\x03""bix\x03""bip\x04""biet\x04""biex\x03""bie\x04""biep\x03""bat\x03""bax\x03""bap\x04""buox\x03""buo\x04""buop\x03""bot\x03""box\x03""bop\x03""bex\x03""bep\x03""but\x03""bux\x04""burx\x03""bur\x03""byt\x03""byx\x02""by\x03""byp\x04""byrx\x03""byr\x03""pit\x03""pix\x03""pip\x04""piex\x03""pie\x04""piep\x03""pat\x03""pax\x04""puox\x03""puo\x04""puop\x03""pot\x03""pox\x03""pop\x03""put\x03""pux\x03""pup\x03""pur\x03""pyt\x03""pyx\x02""py\x03""pyp\x04""pyrx\x03""pyr\x04""bbit\x04""bbix\x03""bbi\x04""bbip\x05""bbiet\x05""bbiex\x04""bbie\x05""bbiep\x04""bbat\x04""bbax\x04""bbap\x05""bbuox\x04""bbuo\x05""bbuop\x04""bbot\x04""bbox\x03""bbo\x04""bbop\x04""bbex\x03""bbe\x04""bbep\x04""bbut\x04""bbux\x03""bbu\x04""bbup\x05""bburx\x04""bbur\x04""bbyt\x04""bbyx\x03""bby\x04""bbyp\x04""nbit\x04""nbix\x03""nbi\x04""nbip\x05""nbiex\x04""nbie\x05""nbiep\x04""nbat\x04""nbax\x03""nba\x04""nbot\x04""nbox\x03""nbo\x04""nbop\x04""nbut\x04""nbux\x03""nbu\x04""nbup\x05""nburx\x04""nbur\x04""nbyt\x03""nby\x04""nbyp\x05""nbyrx\x04""nbyr\x04""hmit\x04""hmix\x03""hmi\x04""hmip\x05""hmiex\x04""hmie\x05""hmiep\x04""hmat\x04""hmax\x03""hma\x04""hmap\x05""hmuox\x04""hmuo\x05""hmuop\x04""hmot\x04""hmox\x03""hmo\x04""hmop\x04""hmut\x04""hmux\x03""hmu\x04""hmup\x05""hmurx\x04""hmur\x03""hmy\x04""hmyp\x04""hmyr\x03""mit\x03""mix\x03""mip\x04""miex\x03""mie\x04""miep\x03""mat\x03""max\x03""map\x04""muot\x04""muox\x03""muo\x04""muop\x03""mot\x03""mox\x03""mop\x03""mex\x03""mut\x03""mux\x03""mup\x04""murx\x03""mur\x03""myt\x03""myx\x02""my\x03""myp\x03""fit\x03""fix\x03""fip\x03""fat\x03""fax\x03""fap\x03""fox\x03""fop\x03""fut\x03""fux\x03""fup\x04""furx\x03""fur\x03""fyt\x03""fyx\x02""fy\x03""fyp\x03""vit\x03""vix\x03""vip\x04""viet\x04""viex\x03""vie\x04""viep\x03""vat\x03""vax\x03""vap\x03""vot\x03""vox\x03""vop\x03""vex\x03""vep\x03""vut\x03""vux\x03""vup\x04""vurx\x03""vur\x03""vyt\x03""vyx\x03""vyp\x04""vyrx\x03""vyr\x03""dit\x03""dix\x03""dip\x04""diex\x03""die\x04""diep\x03""dat\x03""dax\x03""dap\x04""duox\x03""duo\x03""dox\x03""dop\x03""dex\x03""dep\x03""dut\x03""dux\x03""dup\x04""durx\x03""dur\x03""tit\x03""tix\x03""tip\x04""tiex\x04""tiep\x03""tat\x03""tax\x03""tap\x04""tuot\x04""tuox\x03""tuo\x04""tuop\x03""tot\x03""tox\x03""tex\x03""tep\x03""tut\x03""tux\x03""tup\x04""turx\x03""tur\x04""ddit\x04""ddix\x04""ddip\x05""ddiex\x04""ddie\x05""ddiep\x04""ddat\x04""ddax\x04""ddap\x05""dduox\x04""dduo\x05""dduop\x04""ddot\x04""ddox\x04""ddop\x04""ddex\x04""ddep\x04""ddut\x04""ddux\x04""ddup\x05""ddurx\x04""ddur\x04""ndit\x04""ndix\x04""ndip\x05""ndiex\x04""ndie\x04""ndat\x04""ndax\x04""ndap\x04""ndot\x04""ndox\x04""ndop\x04""ndex\x04""ndep\x04""ndut\x04""ndux\x04""ndup\x05""ndurx\x04""ndur\x04""hnit\x04""hnix\x03""hni\x04""hnip\x05""hniet\x05""hniex\x04""hnie\x05""hniep\x04""hnat\x04""hnax\x04""hnap\x05""hnuox\x04""hnuo\x04""hnot\x04""hnox\x04""hnop\x04""hnex\x03""hne\x04""hnep\x04""hnut\x03""nit\x03""nix\x03""nip\x04""niex\x03""nie\x04""niep\x03""nax\x03""nap\x04""nuox\x03""nuo\x04""nuop\x03""not\x03""nox\x03""nop\x03""nex\x03""nep\x03""nut\x03""nux\x03""nup\x04""nurx\x03""nur\x04""hlit\x04""hlix\x03""hli\x04""hlip\x05""hliex\x04""hlie\x05""hliep\x04""hlat\x04""hlax\x04""hlap\x05""hluox\x04""hluo\x05""hluop\x04""hlox\x03""hlo\x04""hlop\x04""hlex\x03""hle\x04""hlep\x04""hlut\x04""hlux\x03""hlu\x04""hlup\x05""hlurx\x04""hlur\x04""hlyt\x04""hlyx\x03""hly\x04""hlyp\x05""hlyrx\x04""hlyr\x03""lit\x03""lix\x03""lip\x04""liet\x04""liex\x03""lie\x04""liep\x03""lat\x03""lax\x03""lap\x04""luot\x04""luox\x03""luo\x04""luop\x03""lot\x03""lox\x03""lop\x03""lex\x03""lep\x03""lut\x03""lux\x03""lup\x04""lurx\x03""lur\x03""lyt\x03""lyx\x03""lyp\x04""lyrx\x03""lyr\x03""git\x03""gix\x03""gip\x04""giet\x04""giex\x03""gie\x04""giep\x03""gat\x03""gax\x03""gap\x04""guot\x04""guox\x03""guo\x04""guop\x03""got\x03""gox\x03""gop\x03""get\x03""gex\x03""gep\x03""gut\x03""gux\x03""gup\x04""gurx\x03""gur\x03""kit\x03""kix\x03""kip\x04""kiex\x03""kie\x04""kiep\x03""kat\x03""kax\x03""kap\x04""kuox\x03""kuo\x04""kuop\x03""kot\x03""kox\x03""kop\x03""ket\x03""kex\x03""kep\x03""kut\x03""kux\x03""kup\x04""kurx\x03""kur\x04""ggit\x04""ggix\x05""ggiex\x04""ggie\x05""ggiep\x04""ggat\x04""ggax\x04""ggap\x05""gguot\x05""gguox\x04""gguo\x05""gguop\x04""ggot\x04""ggox\x04""ggop\x04""gget\x04""ggex\x04""ggep\x04""ggut\x04""ggux\x04""ggup\x05""ggurx\x04""ggur\x05""mgiex\x04""mgie\x04""mgat\x04""mgax\x03""mga\x04""mgap\x05""mguox\x04""mguo\x05""mguop\x04""mgot\x04""mgox\x03""mgo\x04""mgop\x04""mgex\x03""mge\x04""mgep\x04""mgut\x04""mgux\x03""mgu\x04""mgup\x05""mgurx\x04""mgur\x04""hxit\x04""hxix\x03""hxi\x04""hxip\x05""hxiet\x05""hxiex\x04""hxie\x05""hxiep\x04""hxat\x04""hxax\x03""hxa\x04""hxap\x05""hxuot\x05""hxuox\x04""hxuo\x05""hxuop\x04""hxot\x04""hxox\x03""hxo\x04""hxop\x04""hxex\x03""hxe\x04""hxep\x05""ngiex\x04""ngie\x05""ngiep\x04""ngat\x04""ngax\x04""ngap\x05""nguot\x05""nguox\x04""nguo\x04""ngot\x04""ngox\x04""ngop\x04""ngex\x03""nge\x04""ngep\x03""hit\x04""hiex\x03""hat\x03""hax\x03""hap\x04""huot\x04""huox\x03""huo\x04""huop\x03""hot\x03""hox\x03""hop\x03""hex\x03""hep\x03""wat\x03""wax\x03""wap\x04""wuox\x03""wuo\x04""wuop\x03""wox\x03""wop\x03""wex\x03""wep\x03""zit\x03""zix\x03""zip\x04""ziex\x03""zie\x04""ziep\x03""zat\x03""zax\x03""zap\x04""zuox\x03""zuo\x04""zuop\x03""zot\x03""zox\x03""zop\x03""zex\x03""zep\x03""zut\x03""zux\x03""zup\x04""zurx\x03""zur\x03""zyt\x03""zyx\x02""zy\x03""zyp\x04""zyrx\x03""zyr\x03""cit\x03""cix\x03""cip\x04""ciet\x04""ciex\x03""cie\x04""ciep\x03""cat\x03""cax\x03""cap\x04""cuox\x03""cuo\x04""cuop\x03""cot\x03""cox\x03""cop\x03""cex\x03""cep\x03""cut\x03""cux\x03""cup\x04""curx\x03""cur\x03""cyt\x03""cyx\x03""cyp\x04""cyrx\x03""cyr\x04""zzit\x04""zzix\x04""zzip\x05""zziet\x05""zziex\x04""zzie\x05""zziep\x04""zzat\x04""zzax\x04""zzap\x04""zzox\x04""zzop\x04""zzex\x04""zzep\x04""zzux\x04""zzup\x05""zzurx\x04""zzur\x04""zzyt\x04""zzyx\x03""zzy\x04""zzyp\x05""zzyrx\x04""nzit\x04""nzix\x03""nzi\x04""nzip\x05""nziex\x04""nzie\x05""nziep\x04""nzat\x04""nzax\x03""nza\x04""nzap\x05""nzuox\x04""nzuo\x04""nzox\x04""nzop\x04""nzex\x03""nze\x04""nzux\x03""nzu\x04""nzup\x05""nzurx\x04""nzur\x04""nzyt\x04""nzyx\x03""nzy\x04""nzyp\x05""nzyrx\x04""nzyr\x03""sit\x03""six\x03""sip\x04""siex\x03""sie\x04""siep\x03""sat\x03""sax\x03""sap\x04""suox\x03""suo\x04""suop\x03""sot\x03""sox\x03""sop\x03""sex\x03""sep\x03""sut\x03""sux\x03""sup\x04""surx\x03""sur\x03""syt\x03""syx\x02""sy\x03""syp\x04""syrx\x03""syr\x04""ssit\x04""ssix\x04""ssip\x05""ssiex\x04""ssie\x05""ssiep\x04""ssat\x04""ssax\x04""ssap\x04""ssot\x04""ssox\x04""ssop\x04""ssex\x04""ssep\x04""ssut\x04""ssux\x04""ssup\x04""ssyx\x03""ssy\x04""ssyp\x05""ssyrx\x04""ssyr\x04""zhat\x04""zhax\x04""zhap\x05""zhuox\x04""zhuo\x05""zhuop\x04""zhot\x04""zhox\x04""zhop\x04""zhet\x04""zhex\x04""zhep\x04""zhut\x04""zhup\x05""zhurx\x04""zhur\x04""zhyt\x04""zhyx\x03""zhy\x04""zhyp\x05""zhyrx\x04""zhyr\x04""chat\x04""chax\x04""chap\x05""chuot\x05""chuox\x04""chuo\x05""chuop\x04""chot\x04""chox\x04""chop\x04""chet\x04""chex\x04""chep\x04""chux\x04""chup\x05""churx\x04""chur\x04""chyt\x04""chyx\x03""chy\x04""chyp\x05""chyrx\x04""chyr\x04""rrax\x05""rruox\x04""rruo\x04""rrot\x04""rrox\x04""rrop\x04""rret\x04""rrex\x03""rre\x04""rrep\x04""rrut\x04""rrux\x03""rru\x04""rrup\x05""rrurx\x04""rrur\x04""rryt\x04""rryx\x03""rry\x04""rryp\x05""rryrx\x04""rryr\x04""nrat\x04""nrax\x04""nrap\x04""nrox\x03""nro\x04""nrop\x04""nret\x04""nrex\x03""nre\x04""nrep\x04""nrut\x04""nrux\x03""nru\x04""nrup\x05""nrurx\x04""nrur\x04""nryt\x04""nryx\x03""nry\x04""nryp\x05""nryrx\x04""nryr\x04""shat\x04""shax\x04""shap\x05""shuox\x04""shuo\x05""shuop\x04""shot\x04""shox\x04""shop\x04""shet\x04""shex\x04""shep\x04""shut\x04""shux\x04""shup\x05""shurx\x04""shur\x04""shyt\x04""shyx\x03""shy\x04""shyp\x05""shyrx\x04""shyr\x03""rat\x03""rax\x03""rap\x04""ruox\x03""ruo\x04""ruop\x03""rot\x03""rox\x03""rop\x03""rex\x03""rep\x03""rut\x03""rux\x03""rup\x04""rurx\x03""rur\x03""ryt\x03""ryx\x03""ryp\x04""ryrx\x03""ryr\x03""jit\x03""jix\x03""jip\x04""jiet\x04""jiex\x03""jie\x04""jiep\x04""juot\x04""juox\x03""juo\x04""juop\x03""jot\x03""jox\x03""jop\x03""jut\x03""jux\x03""jup\x04""jurx\x03""jur\x03""jyx\x03""jyp\x04""jyrx\x03""jyr\x03""qit\x03""qix\x03""qip\x04""qiet\x04""qiex\x04""qiep\x04""quot\x04""quox\x04""quop\x03""qot\x03""qox\x03""qop\x03""qut\x03""qux\x03""qup\x04""qurx\x03""qur\x03""qyt\x03""qyx\x02""qy\x03""qyp\x04""qyrx\x03""qyr\x04""jjit\x04""jjix\x04""jjip\x05""jjiet\x05""jjiex\x04""jjie\x05""jjiep\x05""jjuox\x04""jjuo\x05""jjuop\x04""jjot\x04""jjox\x04""jjop\x04""jjut\x04""jjux\x04""jjup\x05""jjurx\x04""jjur\x04""jjyt\x04""jjyx\x03""jjy\x04""jjyp\x04""njit\x04""njix\x04""njip\x05""njiet\x05""njiex\x04""njie\x05""njiep\x05""njuox\x04""njuo\x04""njot\x04""njox\x04""njop\x04""njux\x05""njurx\x04""njur\x04""njyt\x04""njyx\x03""njy\x04""njyp\x05""njyrx\x04""njyr\x04""nyit\x04""nyix\x04""nyip\x05""nyiet\x05""nyiex\x04""nyie\x05""nyiep\x05""nyuox\x05""nyuop\x04""nyot\x04""nyox\x04""nyop\x04""nyut\x04""nyux\x04""nyup\x03""xit\x03""xix\x03""xip\x04""xiet\x04""xiex\x04""xiep\x04""xuox\x03""xuo\x03""xot\x03""xox\x03""xop\x03""xyt\x03""xyx\x02""xy\x03""xyp\x04""xyrx\x03""xyr\x03""yit\x03""yix\x03""yip\x04""yiet\x04""yiex\x03""yie\x04""yiep\x04""yuot\x04""yuox\x03""yuo\x04""yuop\x03""yot\x03""yox\x03""yop\x03""yut\x03""yux\x03""yup\x04""yurx\x03""yur\x03""yyt\x03""yyx\x03""yyp\x04""yyrx\x03""yyr\x03""kug\x03""kun\x03""kul\x03""kum\x03""kub\x03""kus\x04""kung\x04""kweo\x05""kweon\x05""kweol\x06""kweong\x05""kweng\x04""kwig\x04""kwin\x04""kwil\x04""kwim\x04""kwib\x04""kwis\x05""kwing\x04""kyun\x04""kyul\x04""kyum\x03""keu\x04""keug\x04""keun\x04""keul\x04""keum\x04""keub\x05""keung\x03""kig\x03""kin\x03""kil\x03""kim\x03""kib\x03""kis\x04""king\x03""tag\x03""tal\x04""talg\x03""tam\x03""tab\x03""tas\x04""tass\x04""tang\x03""tae\x04""taeg\x04""taen\x04""tael\x04""taem\x04""taeb\x04""taes\x05""taess\x05""taeng\x05""tyang\x03""teo\x04""teog\x04""teon\x04""teol\x05""teolm\x04""teom\x04""teob\x04""teos\x05""teoss\x05""teong\x03""teg\x03""ten\x03""tel\x03""tem\x03""teb\x03""tes\x04""teng\x04""tyeo\x05""tyeon\x06""tyeoss\x04""tyen\x03""tog\x03""ton\x03""tol\x03""tom\x03""tob\x03""tos\x04""tong\x04""twan\x04""twae\x03""toe\x04""toen\x04""toes\x05""toeng\x03""tug\x03""tun\x03""tul\x03""tub\x03""tus\x04""tung\x04""tweo\x06""tweoss\x04""twig\x04""twin\x04""twil\x04""twim\x04""twib\x05""twing\x03""tyu\x04""tyun\x04""tyul\x04""tyum\x05""tyung\x03""teu\x04""teug\x04""teun\x04""teud\x04""teul\x05""teulm\x04""teum\x04""teub\x04""teus\x04""tyin\x04""tyil\x04""tyim\x04""tyib\x03""tig\x03""tin\x03""til\x03""tim\x03""tib\x03""tis\x04""ting\x03""pag\x04""pagg\x03""pan\x03""pal\x04""palm\x03""pam\x03""pab\x03""pas\x04""pass\x04""pang\x03""pae\x04""paeg\x04""paen\x04""pael\x04""paem\x04""paeb\x04""paes\x05""paess\x05""paeng\x03""pya\x04""pyag\x03""peo\x04""peog\x04""peon\x04""peol\x04""peom\x04""peob\x04""peos\x05""peoss\x05""peong\x03""peg\x03""pen\x03""pel\x03""pem\x03""peb\x03""pes\x04""peng\x04""pyeo\x05""pyeon\x05""pyeol\x05""pyeom\x05""pyeob\x06""pyeoss\x06""pyeong\x03""pye\x04""pyel\x04""pyeb\x04""pyes\x03""pog\x03""pon\x03""pol\x03""pom\x03""pob\x03""pos\x04""pong\x05""pwang\x03""poe\x04""poen\x03""pyo\x04""pyon\x04""pyol\x04""pyob\x04""pyos\x03""pug\x03""pun\x03""pud\x03""pul\x04""pulm\x03""pum\x03""pub\x03""pus\x04""pung\x04""pweo\x06""pweong\x04""pwin\x04""pwil\x04""pwim\x04""pwis\x03""pyu\x04""pyun\x04""pyul\x04""pyum\x04""pyus\x05""pyung\x03""peu\x04""peun\x04""peul\x04""peum\x04""peub\x04""peus\x03""pig\x03""pin\x03""pil\x03""pim\x03""pib\x03""pis\x04""ping\x03""hag\x03""hal\x04""halt\x03""ham\x03""hab\x03""has\x04""hang\x04""haeg\x04""haen\x04""hael\x04""haem\x04""haeb\x04""haes\x05""haess\x05""haeng\x03""hya\x05""hyang\x03""heo\x04""heog\x04""heon\x04""heol\x05""heolm\x04""heom\x04""heob\x04""heos\x05""heong\x03""heg\x03""hel\x03""hem\x03""heb\x03""hes\x04""hyeo\x05""hyeog\x05""hyeon\x05""hyeol\x05""hyeom\x05""hyeob\x05""hyeos\x06""hyeoss\x06""hyeong\x03""hye\x04""hyen\x04""hyel\x04""hyeb\x03""hog\x03""hol\x04""holt\x03""hom\x03""hob\x03""hos\x04""hong\x04""hwag\x04""hwan\x04""hwal\x04""hwas\x05""hwang\x04""hwae\x05""hwaeg\x05""hwaen\x05""hwaes\x06""hwaeng\x04""hoeg\x04""hoen\x04""hoel\x04""hoeb\x04""hoes\x05""hoeng\x03""hyo\x04""hyon\x04""hyol\x04""hyob\x04""hyos\x03""hug\x03""hul\x04""hult\x03""hum\x03""hus\x04""hung\x04""hweo\x05""hweon\x05""hweol\x05""hweom\x06""hweong\x04""hweg\x03""gag\x04""gagg\x04""gags\x04""ganj\x04""ganh\x03""gad\x03""gal\x04""galg\x04""galm\x04""galb\x04""gals\x04""galt\x04""galp\x04""galh\x03""gam\x03""gab\x04""gabs\x03""gas\x04""gass\x04""gang\x03""gaj\x03""gac\x03""gak\x03""gah\x03""gae\x04""gaeg\x05""gaegg\x05""gaegs\x04""gaen\x05""gaenj\x05""gaenh\x04""gaed\x04""gael\x05""gaelg\x05""gaelm\x05""gaelb\x05""gaels\x05""gaelt\x05""gaelp\x05""gaelh\x04""gaem\x04""gaeb\x05""gaebs\x04""gaes\x05""gaess\x05""gaeng\x04""gaej\x04""gaec\x04""gaek\x04""gaet\x04""gaep\x04""gaeh\x04""gyag\x05""gyagg\x05""gyags\x04""gyan\x05""gyanj\x05""gyanh\x04""gyad\x04""gyal\x05""gyalg\x05""gyalm\x05""gyalb\x05""gyals\x05""gyalt\x05""gyalp\x05""gyalh\x04""gyam\x04""gyab\x05""gyabs\x04""gyas\x05""gyass\x05""gyang\x04""gyaj\x04""gyac\x04""gyak\x04""gyat\x04""gyap\x04""gyah\x04""gyae\x05""gyaeg\x06""gyaegg\x06""gyaegs\x05""gyaen\x06""gyaenj\x06""gyaenh\x05""gyaed\x05""gyael\x06""gyaelg\x06""gyaelm\x06""gyaelb\x06""gyaels\x06""gyaelt\x06""gyaelp\x06""gyaelh\x05""gyaem\x05""gyaeb\x06""gyaebs\x05""gyaes\x06""gyaess\x06""gyaeng\x05""gyaej\x05""gyaec\x05""gyaek\x05""gyaet\x05""gyaep\x05""gyaeh\x04""geog\x05""geogg\x05""geogs\x04""geon\x05""geonj\x05""geonh\x04""geod\x04""geol\x05""geolg\x05""geolm\x05""geolb\x05""geols\x05""geolt\x05""geolp\x05""geolh\x04""geom\x04""geob\x05""geobs\x04""geos\x05""geoss\x05""geong\x04""geoj\x04""geoc\x04""geok\x04""geot\x04""geop\x04""geoh\x03""geg\x04""gegg\x04""gegs\x04""genj\x04""genh\x03""ged\x03""gel\x04""gelg\x04""gelm\x04""gelb\x04""gels\x04""gelt\x04""gelp\x04""gelh\x03""gem\x03""geb\x04""gebs\x03""ges\x04""gess\x04""geng\x03""gej\x03""gec\x03""gek\x03""geh\x04""gyeo\x05""gyeog\x06""gyeogg\x06""gyeogs\x05""gyeon\x06""gyeonj\x06""gyeonh\x05""gyeod\x05""gyeol\x06""gyeolg\x06""gyeolm\x06""gyeolb\x06""gyeols\x06""gyeolt\x06""gyeolp\x06""gyeolh\x05""gyeom\x05""gyeob\x06""gyeobs\x05""gyeos\x06""gyeoss\x06""gyeong\x05""gyeoj\x05""gyeoc\x05""gyeok\x05""gyeot\x05""gyeop\x05""gyeoh\x04""gyeg\x05""gyegg\x05""gyegs\x04""gyen\x05""gyenj\x05""gyenh\x04""gyed\x04""gyel\x05""gyelg\x05""gyelm\x05""gyelb\x05""gyels\x05""gyelt\x05""gyelp\x05""gyelh\x04""gyem\x04""gyeb\x05""gyebs\x04""gyes\x05""gyess\x05""gyeng\x04""gyej\x04""gyec\x04""gyek\x04""gyet\x04""gyep\x04""gyeh\x03""gog\x04""gogg\x04""gogs\x03""gon\x04""gonj\x04""gonh\x03""god\x03""gol\x04""golg\x04""golm\x04""golb\x04""gols\x04""golt\x04""golp\x04""golh\x03""gom\x03""gob\x04""gobs\x03""gos\x04""goss\x04""gong\x03""goj\x03""goc\x03""gok\x03""goh\x04""gwag\x05""gwagg\x05""gwags\x04""gwan\x05""gwanj\x05""gwanh\x04""gwad\x04""gwal\x05""gwalg\x05""gwalm\x05""gwalb\x05""gwals\x05""gwalt\x05""gwalp\x05""gwalh\x04""gwam\x04""gwab\x05""gwabs\x04""gwas\x05""gwass\x05""gwang\x04""gwaj\x04""gwac\x04""gwak\x04""gwat\x04""gwap\x04""gwah\x04""gwae\x05""gwaeg\x06""gwaegg\x06""gwaegs\x05""gwaen\x06""gwaenj\x06""gwaenh\x05""gwaed\x05""gwael\x06""gwaelg\x06""gwaelm\x06""gwaelb\x06""gwaels\x06""gwaelt\x06""gwaelp\x06""gwaelh\x05""gwaem\x05""gwaeb\x06""gwaebs\x05""gwaes\x06""gwaess\x06""gwaeng\x05""gwaej\x05""gwaec\x05""gwaek\x05""gwaet\x05""gwaep\x05""gwaeh\x03""goe\x04""goeg\x05""goegg\x05""goegs\x04""goen\x05""goenj\x05""goenh\x04""goed\x04""goel\x05""goelg\x05""goelm\x05""goelb\x05""goels\x05""goelt\x05""goelp\x05""goelh\x04""goem\x04""goeb\x05""goebs\x04""goes\x05""goess\x05""goeng\x04""goej\x04""goec\x04""goek\x04""goet\x04""goep\x04""goeh\x04""gyog\x05""gyogg\x05""gyogs\x05""gyonj\x05""gyonh\x04""gyod\x04""gyol\x05""gyolg\x05""gyolm\x05""gyolb\x05""gyols\x05""gyolt\x05""gyolp\x05""gyolh\x04""gyom\x04""gyob\x05""gyobs\x04""gyos\x05""gyoss\x05""gyong\x04""gyoj\x04""gyoc\x04""gyok\x04""gyot\x04""gyop\x04""gyoh\x03""gug\x04""gugg\x04""gugs\x03""gun\x04""gunj\x04""gunh\x03""gud\x03""gul\x04""gulg\x04""gulm\x04""gulb\x04""guls\x04""gult\x04""gulp\x04""gulh\x03""gum\x03""gub\x04""gubs\x03""gus\x04""guss\x04""gung\x03""guj\x03""guc\x03""guk\x03""guh\x04""gweo\x05""gweog\x06""gweogg\x06""gweogs\x05""gweon\x06""gweonj\x06""gweonh\x05""gweod\x05""gweol\x06""gweolg\x06""gweolm\x06""gweolb\x06""gweols\x06""gweolt\x06""gweolp\x06""gweolh\x05""gweom\x05""gweob\x06""gweobs\x05""gweos\x06""gweoss\x06""gweong\x05""gweoj\x05""gweoc\x05""gweok\x05""gweot\x05""gweop\x05""gweoh\x04""gweg\x05""gwegg\x05""gwegs\x04""gwen\x05""gwenj\x05""gwenh\x04""gwed\x04""gwel\x05""gwelg\x05""gwelm\x05""gwelb\x05""gwels\x05""gwelt\x05""gwelp\x05""gwelh\x04""gwem\x04""gweb\x05""gwebs\x04""gwes\x05""gwess\x05""gweng\x04""gwej\x04""gwec\x04""gwek\x04""gwet\x04""gwep\x04""gweh\x04""gwig\x05""gwigg\x05""gwigs\x04""gwin\x05""gwinj\x05""gwinh\x04""gwid\x04""gwil\x05""gwilg\x05""gwilm\x05""gwilb\x05""gwils\x05""gwilt\x05""gwilp\x05""gwilh\x04""gwim\x04""gwib\x05""gwibs\x04""gwis\x05""gwiss\x05""gwing\x04""gwij\x04""gwic\x04""gwik\x04""gwit\x04""gwip\x04""gwih\x04""gyug\x05""gyugg\x05""gyugs\x04""gyun\x05""gyunj\x05""gyunh\x04""gyud\x04""gyul\x05""gyulg\x05""gyulm\x05""gyulb\x05""gyuls\x05""gyult\x05""gyulp\x05""gyulh\x04""gyum\x04""gyub\x05""gyubs\x04""gyus\x05""gyuss\x05""gyung\x04""gyuj\x04""gyuc\x04""gyuk\x04""gyut\x04""gyup\x04""gyuh\x03""geu\x04""geug\x05""geugg\x05""geugs\x04""geun\x05""geunj\x05""geunh\x04""geud\x04""geul\x05""geulg\x05""geulm\x05""geulb\x05""geuls\x05""geult\x05""geulp\x05""geulh\x04""geum\x04""geub\x05""geubs\x04""geus\x04""geuj\x04""geuc\x04""geuk\x04""geut\x04""geup\x04""geuh\x04""gyig\x05""gyigg\x05""gyigs\x04""gyin\x05""gyinj\x05""gyinh\x04""gyid\x04""gyil\x05""gyilg\x05""gyilm\x05""gyilb\x05""gyils\x05""gyilt\x05""gyilp\x05""gyilh\x04""gyim\x04""gyib\x05""gyibs\x04""gyis\x05""gyiss\x05""gying\x04""gyij\x04""gyic\x04""gyik\x04""gyit\x04""gyip\x04""gyih\x03""gig\x04""gigg\x04""gigs\x03""gin\x04""ginj\x04""ginh\x03""gid\x03""gil\x04""gilg\x04""gilm\x04""gilb\x04""gils\x04""gilt\x04""gilp\x04""gilh\x03""gim\x03""gib\x04""gibs\x03""gis\x04""giss\x04""ging\x03""gij\x03""gic\x03""gik\x03""gih\x04""ggag\x05""ggagg\x05""ggags\x04""ggan\x05""gganj\x05""gganh\x04""ggad\x04""ggal\x05""ggalg\x05""ggalm\x05""ggalb\x05""ggals\x05""ggalt\x05""ggalp\x05""ggalh\x04""ggam\x04""ggab\x05""ggabs\x04""ggas\x05""ggass\x05""ggang\x04""ggaj\x04""ggac\x04""ggak\x04""ggah\x04""ggae\x05""ggaeg\x06""ggaegg\x06""ggaegs\x05""ggaen\x06""ggaenj\x06""ggaenh\x05""ggaed\x05""ggael\x06""ggaelg\x06""ggaelm\x06""ggaelb\x06""ggaels\x06""ggaelt\x06""ggaelp\x06""ggaelh\x05""ggaem\x05""ggaeb\x06""ggaebs\x05""ggaes\x06""ggaess\x06""ggaeng\x05""ggaej\x05""ggaec\x05""ggaek\x05""ggaet\x05""ggaep\x05""ggaeh\x04""ggya\x05""ggyag\x06""ggyagg\x06""ggyags\x05""ggyan\x06""ggyanj\x06""ggyanh\x05""ggyad\x05""ggyal\x06""ggyalg\x06""ggyalm\x06""ggyalb\x06""ggyals\x06""ggyalt\x06""ggyalp\x06""ggyalh\x05""ggyam\x05""ggyab\x06""ggyabs\x05""ggyas\x06""ggyass\x06""ggyang\x05""ggyaj\x05""ggyac\x05""ggyak\x05""ggyat\x05""ggyap\x05""ggyah\x05""ggyae\x06""ggyaeg\x07""ggyaegg\x07""ggyaegs\x06""ggyaen\x07""ggyaenj\x07""ggyaenh\x06""ggyaed\x06""ggyael\x07""ggyaelg\x07""ggyaelm\x07""ggyaelb\x07""ggyaels\x07""ggyaelt\x07""ggyaelp\x07""ggyaelh\x06""ggyaem\x06""ggyaeb\x07""ggyaebs\x06""ggyaes\x07""ggyaess\x07""ggyaeng\x06""ggyaej\x06""ggyaec\x06""ggyaek\x06""ggyaet\x06""ggyaep\x06""ggyaeh\x04""ggeo\x05""ggeog\x06""ggeogg\x06""ggeogs\x05""ggeon\x06""ggeonj\x06""ggeonh\x05""ggeod\x05""ggeol\x06""ggeolg\x06""ggeolm\x06""ggeolb\x06""ggeols\x06""ggeolt\x06""ggeolp\x06""ggeolh\x05""ggeom\x05""ggeob\x06""ggeobs\x05""ggeos\x06""ggeoss\x06""ggeong\x05""ggeoj\x05""ggeoc\x05""ggeok\x05""ggeot\x05""ggeop\x05""ggeoh\x04""ggeg\x05""ggegg\x05""ggegs\x04""ggen\x05""ggenj\x05""ggenh\x04""gged\x04""ggel\x05""ggelg\x05""ggelm\x05""ggelb\x05""ggels\x05""ggelt\x05""ggelp\x05""ggelh\x04""ggem\x04""ggeb\x05""ggebs\x04""gges\x05""ggess\x05""ggeng\x04""ggej\x04""ggec\x04""ggek\x04""ggeh\x05""ggyeo\x06""ggyeog\x07""ggyeogg\x07""ggyeogs\x06""ggyeon\x07""ggyeonj\x07""ggyeonh\x06""ggyeod\x06""ggyeol\x07""ggyeolg\x07""ggyeolm\x07""ggyeolb\x07""ggyeols\x07""ggyeolt\x07""ggyeolp\x07""ggyeolh\x06""ggyeom\x06""ggyeob\x07""ggyeobs\x06""ggyeos\x07""ggyeoss\x07""ggyeong\x06""ggyeoj\x06""ggyeoc\x06""ggyeok\x06""ggyeot\x06""ggyeop\x06""ggyeoh\x04""ggye\x05""ggyeg\x06""ggyegg\x06""ggyegs\x05""ggyen\x06""ggyenj\x06""ggyenh\x05""ggyed\x05""ggyel\x06""ggyelg\x06""ggyelm\x06""ggyelb\x06""ggyels\x06""ggyelt\x06""ggyelp\x06""ggyelh\x05""ggyem\x05""ggyeb\x06""ggyebs\x05""ggyes\x06""ggyess\x06""ggyeng\x05""ggyej\x05""ggyec\x05""ggyek\x05""ggyet\x05""ggyep\x05""ggyeh\x04""ggog\x05""ggogg\x05""ggogs\x04""ggon\x05""ggonj\x05""ggonh\x04""ggod\x04""ggol\x05""ggolg\x05""ggolm\x05""ggolb\x05""ggols\x05""ggolt\x05""ggolp\x05""ggolh\x04""ggom\x04""ggob\x05""ggobs\x04""ggos\x05""ggoss\x05""ggong\x04""ggoj\x04""ggoc\x04""ggok\x04""ggoh\x05""ggwag\x06""ggwagg\x06""ggwags\x05""ggwan\x06""ggwanj\x06""ggwanh\x05""ggwad\x05""ggwal\x06""ggwalg\x06""ggwalm\x06""ggwalb\x06""ggwals\x06""ggwalt\x06""ggwalp\x06""ggwalh\x05""ggwam\x05""ggwab\x06""ggwabs\x05""ggwas\x06""ggwass\x06""ggwang\x05""ggwaj\x05""ggwac\x05""ggwak\x05""ggwat\x05""ggwap\x05""ggwah\x05""ggwae\x06""ggwaeg\x07""ggwaegg\x07""ggwaegs\x06""ggwaen\x07""ggwaenj\x07""ggwaenh\x06""ggwaed\x06""ggwael\x07""ggwaelg\x07""ggwaelm\x07""ggwaelb\x07""ggwaels\x07""ggwaelt\x07""ggwaelp\x07""ggwaelh\x06""ggwaem\x06""ggwaeb\x07""ggwaebs\x06""ggwaes\x07""ggwaess\x07""ggwaeng\x06""ggwaej\x06""ggwaec\x06""ggwaek\x06""ggwaet\x06""ggwaep\x06""ggwaeh\x04""ggoe\x05""ggoeg\x06""ggoegg\x06""ggoegs\x05""ggoen\x06""ggoenj\x06""ggoenh\x05""ggoed\x05""ggoel\x06""ggoelg\x06""ggoelm\x06""ggoelb\x06""ggoels\x06""ggoelt\x06""ggoelp\x06""ggoelh\x05""ggoem\x05""ggoeb\x06""ggoebs\x05""ggoes\x06""ggoess\x06""ggoeng\x05""ggoej\x05""ggoec\x05""ggoek\x05""ggoet\x05""ggoep\x05""ggoeh\x04""ggyo\x05""ggyog\x06""ggyogg\x06""ggyogs\x05""ggyon\x06""ggyonj\x06""ggyonh\x05""ggyod\x05""ggyol\x06""ggyolg\x06""ggyolm\x06""ggyolb\x06""ggyols\x06""ggyolt\x06""ggyolp\x06""ggyolh\x05""ggyom\x05""ggyob\x06""ggyobs\x05""ggyos\x06""ggyoss\x06""ggyong\x05""ggyoj\x05""ggyoc\x05""ggyok\x05""ggyot\x05""ggyop\x05""ggyoh\x04""ggug\x05""ggugg\x05""ggugs\x04""ggun\x05""ggunj\x05""ggunh\x04""ggud\x04""ggul\x05""ggulg\x05""ggulm\x05""ggulb\x05""gguls\x05""ggult\x05""ggulp\x05""ggulh\x04""ggum\x04""ggub\x05""ggubs\x04""ggus\x05""gguss\x05""ggung\x04""gguj\x04""gguc\x04""gguk\x04""gguh\x05""ggweo\x06""ggweog\x07""ggweogg\x07""ggweogs\x06""ggweon\x07""ggweonj\x07""ggweonh\x06""ggweod\x06""ggweol\x07""ggweolg\x07""ggweolm\x07""ggweolb\x07""ggweols\x07""ggweolt\x07""ggweolp\x07""ggweolh\x06""ggweom\x06""ggweob\x07""ggweobs\x06""ggweos\x07""ggweoss\x07""ggweong\x06""ggweoj\x06""ggweoc\x06""ggweok\x06""ggweot\x06""ggweop\x06""ggweoh\x05""ggweg\x06""ggwegg\x06""ggwegs\x05""ggwen\x06""ggwenj\x06""ggwenh\x05""ggwed\x05""ggwel\x06""ggwelg\x06""ggwelm\x06""ggwelb\x06""ggwels\x06""ggwelt\x06""ggwelp\x06""ggwelh\x05""ggwem\x05""ggweb\x06""ggwebs\x05""ggwes\x06""ggwess\x06""ggweng\x05""ggwej\x05""ggwec\x05""ggwek\x05""ggwet\x05""ggwep\x05""ggweh\x05""ggwig\x06""ggwigg\x06""ggwigs\x05""ggwin\x06""ggwinj\x06""ggwinh\x05""ggwid\x05""ggwil\x06""ggwilg\x06""ggwilm\x06""ggwilb\x06""ggwils\x06""ggwilt\x06""ggwilp\x06""ggwilh\x05""ggwim\x05""ggwib\x06""ggwibs\x05""ggwis\x06""ggwiss\x06""ggwing\x05""ggwij\x05""ggwic\x05""ggwik\x05""ggwit\x05""ggwip\x05""ggwih\x04""ggyu\x05""ggyug\x06""ggyugg\x06""ggyugs\x05""ggyun\x06""ggyunj\x06""ggyunh\x05""ggyud\x05""ggyul\x06""ggyulg\x06""ggyulm\x06""ggyulb\x06""ggyuls\x06""ggyult\x06""ggyulp\x06""ggyulh\x05""ggyum\x05""ggyub\x06""ggyubs\x05""ggyus\x06""ggyuss\x06""ggyung\x05""ggyuj\x05""ggyuc\x05""ggyuk\x05""ggyut\x05""ggyup\x05""ggyuh\x04""ggeu\x05""ggeug\x06""ggeugg\x06""ggeugs\x05""ggeun\x06""ggeunj\x06""ggeunh\x05""ggeud\x05""ggeul\x06""ggeulg\x06""ggeulm\x06""ggeulb\x06""ggeuls\x06""ggeult\x06""ggeulp\x06""ggeulh\x05""ggeum\x05""ggeub\x06""ggeubs\x05""ggeus\x06""ggeuss\x06""ggeung\x05""ggeuj\x05""ggeuc\x05""ggeuk\x05""ggeut\x05""ggeup\x05""ggeuh\x04""ggyi\x05""ggyig\x06""ggyigg\x06""ggyigs\x05""ggyin\x06""ggyinj\x06""ggyinh\x05""ggyid\x05""ggyil\x06""ggyilg\x06""ggyilm\x06""ggyilb\x06""ggyils\x06""ggyilt\x06""ggyilp\x06""ggyilh\x05""ggyim\x05""ggyib\x06""ggyibs\x05""ggyis\x06""ggyiss\x06""ggying\x05""ggyij\x05""ggyic\x05""ggyik\x05""ggyit\x05""ggyip\x05""ggyih\x04""ggig\x05""ggigg\x05""ggigs\x04""ggin\x05""gginj\x05""gginh\x04""ggid\x04""ggil\x05""ggilg\x05""ggilm\x05""ggilb\x05""ggils\x05""ggilt\x05""ggilp\x05""ggilh\x04""ggim\x04""ggib\x05""ggibs\x04""ggis\x05""ggiss\x05""gging\x04""ggij\x04""ggic\x04""ggik\x04""ggip\x04""ggih\x03""nag\x04""nagg\x04""nags\x03""nan\x04""nanj\x04""nanh\x03""nad\x03""nal\x04""nalg\x04""nalm\x04""nalb\x04""nals\x04""nalt\x04""nalp\x04""nalh\x03""nam\x03""nab\x04""nabs\x03""nas\x04""nass\x04""nang\x03""naj\x03""nac\x03""nak\x03""nat\x03""nae\x04""naeg\x05""naegg\x05""naegs\x04""naen\x05""naenj\x05""naenh\x04""naed\x04""nael\x05""naelg\x05""naelm\x05""naelb\x05""naels\x05""naelt\x05""naelp\x05""naelh\x04""naem\x04""naeb\x05""naebs\x04""naes\x05""naess\x05""naeng\x04""naej\x04""naec\x04""naek\x04""naet\x04""naep\x04""naeh\x04""nyag\x05""nyagg\x05""nyags\x05""nyanj\x05""nyanh\x04""nyad\x04""nyal\x05""nyalg\x05""nyalm\x05""nyalb\x05""nyals\x05""nyalt\x05""nyalp\x05""nyalh\x04""nyam\x04""nyab\x05""nyabs\x04""nyas\x05""nyass\x05""nyang\x04""nyaj\x04""nyac\x04""nyak\x04""nyat\x04""nyap\x04""nyah\x04""nyae\x05""nyaeg\x06""nyaegg\x06""nyaegs\x05""nyaen\x06""nyaenj\x06""nyaenh\x05""nyaed\x05""nyael\x06""nyaelg\x06""nyaelm\x06""nyaelb\x06""nyaels\x06""nyaelt\x06""nyaelp\x06""nyaelh\x05""nyaem\x05""nyaeb\x06""nyaebs\x05""nyaes\x06""nyaess\x06""nyaeng\x05""nyaej\x05""nyaec\x05""nyaek\x05""nyaet\x05""nyaep\x05""nyaeh\x03""neo\x04""neog\x05""neogg\x05""neogs\x04""neon\x05""neonj\x05""neonh\x04""neod\x04""neol\x05""neolg\x05""neolm\x05""neolb\x05""neols\x05""neolt\x05""neolp\x05""neolh\x04""neom\x04""neob\x05""neobs\x04""neos\x05""neoss\x05""neong\x04""neoj\x04""neoc\x04""neok\x04""neot\x04""neop\x04""neoh\x03""neg\x04""negg\x04""negs\x03""nen\x04""nenj\x04""nenh\x03""ned\x03""nel\x04""nelg\x04""nelm\x04""nelb\x04""nels\x04""nelt\x04""nelp\x04""nelh\x03""nem\x03""neb\x04""nebs\x03""nes\x04""ness\x04""neng\x03""nej\x03""nec\x03""nek\x03""net\x03""neh\x04""nyeo\x05""nyeog\x06""nyeogg\x06""nyeogs\x05""nyeon\x06""nyeonj\x06""nyeonh\x05""nyeod\x05""nyeol\x06""nyeolg\x06""nyeolm\x06""nyeolb\x06""nyeols\x06""nyeolt\x06""nyeolp\x06""nyeolh\x05""nyeom\x05""nyeob\x06""nyeobs\x05""nyeos\x06""nyeoss\x06""nyeong\x05""nyeoj\x05""nyeoc\x05""nyeok\x05""nyeot\x05""nyeop\x05""nyeoh\x04""nyeg\x05""nyegg\x05""nyegs\x04""nyen\x05""nyenj\x05""nyenh\x04""nyed\x04""nyel\x05""nyelg\x05""nyelm\x05""nyelb\x05""nyels\x05""nyelt\x05""nyelp\x05""nyelh\x04""nyem\x04""nyeb\x05""nyebs\x04""nyes\x05""nyess\x05""nyeng\x04""nyej\x04""nyec\x04""nyek\x04""nyet\x04""nyep\x04""nyeh\x03""nog\x04""nogg\x04""nogs\x03""non\x04""nonj\x04""nonh\x03""nod\x03""nol\x04""nolg\x04""nolm\x04""nolb\x04""nols\x04""nolt\x04""nolp\x04""nolh\x03""nom\x03""nob\x04""nobs\x03""nos\x04""noss\x04""nong\x03""noj\x03""noc\x03""nok\x03""noh\x04""nwag\x05""nwagg\x05""nwags\x04""nwan\x05""nwanj\x05""nwanh\x04""nwad\x04""nwal\x05""nwalg\x05""nwalm\x05""nwalb\x05""nwals\x05""nwalt\x05""nwalp\x05""nwalh\x04""nwam\x04""nwab\x05""nwabs\x04""nwas\x05""nwass\x05""nwang\x04""nwaj\x04""nwac\x04""nwak\x04""nwat\x04""nwap\x04""nwah\x04""nwae\x05""nwaeg\x06""nwaegg\x06""nwaegs\x05""nwaen\x06""nwaenj\x06""nwaenh\x05""nwaed\x05""nwael\x06""nwaelg\x06""nwaelm\x06""nwaelb\x06""nwaels\x06""nwaelt\x06""nwaelp\x06""nwaelh\x05""nwaem\x05""nwaeb\x06""nwaebs\x05""nwaes\x06""nwaess\x06""nwaeng\x05""nwaej\x05""nwaec\x05""nwaek\x05""nwaet\x05""nwaep\x05""nwaeh\x03""noe\x04""noeg\x05""noegg\x05""noegs\x04""noen\x05""noenj\x05""noenh\x04""noed\x04""noel\x05""noelg\x05""noelm\x05""noelb\x05""noels\x05""noelt\x05""noelp\x05""noelh\x04""noem\x04""noeb\x05""noebs\x04""noes\x05""noess\x05""noeng\x04""noej\x04""noec\x04""noek\x04""noet\x04""noep\x04""noeh\x04""nyog\x05""nyogg\x05""nyogs\x04""nyon\x05""nyonj\x05""nyonh\x04""nyod\x04""nyol\x05""nyolg\x05""nyolm\x05""nyolb\x05""nyols\x05""nyolt\x05""nyolp\x05""nyolh\x04""nyom\x04""nyob\x05""nyobs\x04""nyos\x05""nyoss\x05""nyong\x04""nyoj\x04""nyoc\x04""nyok\x04""nyoh\x03""nug\x04""nugg\x04""nugs\x03""nun\x04""nunj\x04""nunh\x03""nud\x03""nul\x04""nulg\x04""nulm\x04""nulb\x04""nuls\x04""nult\x04""nulp\x04""nulh\x03""nub\x04""nubs\x03""nus\x04""nuss\x04""nung\x03""nuj\x03""nuc\x03""nuk\x03""nuh\x04""nweo\x05""nweog\x06""nweogg\x06""nweogs\x05""nweon\x06""nweonj\x06""nweonh\x05""nweod\x05""nweol\x06""nweolg\x06""nweolm\x06""nweolb\x06""nweols\x06""nweolt\x06""nweolp\x06""nweolh\x05""nweom\x05""nweob\x06""nweobs\x05""nweos\x06""nweoss\x06""nweong\x05""nweoj\x05""nweoc\x05""nweok\x05""nweot\x05""nweop\x05""nweoh\x04""nweg\x05""nwegg\x05""nwegs\x04""nwen\x05""nwenj\x05""nwenh\x04""nwed\x04""nwel\x05""nwelg\x05""nwelm\x05""nwelb\x05""nwels\x05""nwelt\x05""nwelp\x05""nwelh\x04""nwem\x04""nweb\x05""nwebs\x04""nwes\x05""nwess\x05""nweng\x04""nwej\x04""nwec\x04""nwek\x04""nwet\x04""nwep\x04""nweh\x03""nwi\x04""nwig\x05""nwigg\x05""nwigs\x04""nwin\x05""nwinj\x05""nwinh\x04""nwid\x04""nwil\x05""nwilg\x05""nwilm\x05""nwilb\x05""nwils\x05""nwilt\x05""nwilp\x05""nwilh\x04""nwim\x04""nwib\x05""nwibs\x04""nwis\x05""nwiss\x05""nwing\x04""nwij\x04""nwic\x04""nwik\x04""nwit\x04""nwip\x04""nwih\x04""nyug\x05""nyugg\x05""nyugs\x04""nyun\x05""nyunj\x05""nyunh\x04""nyud\x04""nyul\x05""nyulg\x05""nyulm\x05""nyulb\x05""nyuls\x05""nyult\x05""nyulp\x05""nyulh\x04""nyum\x04""nyub\x05""nyubs\x04""nyus\x05""nyuss\x05""nyung\x04""nyuj\x04""nyuc\x04""nyuk\x04""nyuh\x03""neu\x04""neug\x05""neugg\x05""neugs\x04""neun\x05""neunj\x05""neunh\x04""neud\x04""neul\x05""neulg\x05""neulm\x05""neulb\x05""neuls\x05""neult\x05""neulp\x05""neulh\x04""neum\x04""neub\x05""neubs\x04""neus\x05""neuss\x05""neung\x04""neuj\x04""neuc\x04""neuk\x04""neut\x04""neup\x04""neuh\x04""nyig\x05""nyigg\x05""nyigs\x04""nyin\x05""nyinj\x05""nyinh\x04""nyid\x04""nyil\x05""nyilg\x05""nyilm\x05""nyilb\x05""nyils\x05""nyilt\x05""nyilp\x05""nyilh\x04""nyim\x04""nyib\x05""nyibs\x04""nyis\x05""nyiss\x05""nying\x04""nyij\x04""nyic\x04""nyik\x04""nyih\x03""nig\x04""nigg\x04""nigs\x03""nin\x04""ninj\x04""ninh\x03""nid\x03""nil\x04""nilg\x04""nilm\x04""nilb\x04""nils\x04""nilt\x04""nilp\x04""nilh\x03""nim\x03""nib\x04""nibs\x03""nis\x04""niss\x04""ning\x03""nij\x03""nic\x03""nik\x03""nih\x03""dag\x04""dagg\x04""dags\x03""dan\x04""danj\x04""danh\x03""dad\x04""dalg\x04""dalm\x04""dalb\x04""dals\x04""dalt\x04""dalp\x04""dalh\x03""dam\x04""dabs\x03""das\x04""dass\x04""dang\x03""daj\x03""dac\x03""dak\x03""dah\x03""dae\x04""daeg\x05""daegg\x05""daegs\x04""daen\x05""daenj\x05""daenh\x04""daed\x04""dael\x05""daelg\x05""daelm\x05""daelb\x05""daels\x05""daelt\x05""daelp\x05""daelh\x04""daem\x04""daeb\x05""daebs\x04""daes\x05""daess\x05""daeng\x04""daej\x04""daec\x04""daek\x04""daet\x04""daep\x04""daeh\x03""dya\x04""dyag\x05""dyagg\x05""dyags\x04""dyan\x05""dyanj\x05""dyanh\x04""dyad\x04""dyal\x05""dyalg\x05""dyalm\x05""dyalb\x05""dyals\x05""dyalt\x05""dyalp\x05""dyalh\x04""dyam\x04""dyab\x05""dyabs\x04""dyas\x05""dyass\x05""dyang\x04""dyaj\x04""dyac\x04""dyak\x04""dyat\x04""dyap\x04""dyah\x04""dyae\x05""dyaeg\x06""dyaegg\x06""dyaegs\x05""dyaen\x06""dyaenj\x06""dyaenh\x05""dyaed\x05""dyael\x06""dyaelg\x06""dyaelm\x06""dyaelb\x06""dyaels\x06""dyaelt\x06""dyaelp\x06""dyaelh\x05""dyaem\x05""dyaeb\x06""dyaebs\x05""dyaes\x06""dyaess\x06""dyaeng\x05""dyaej\x05""dyaec\x05""dyaek\x05""dyaet\x05""dyaep\x05""dyaeh\x03""deo\x04""deog\x05""deogg\x05""deogs\x04""deon\x05""deonj\x05""deonh\x04""deod\x04""deol\x05""deolg\x05""deolm\x05""deolb\x05""deols\x05""deolt\x05""deolp\x05""deolh\x04""deom\x04""deob\x05""deobs\x04""deos\x05""deoss\x05""deong\x04""deoj\x04""deoc\x04""deok\x04""deot\x04""deop\x04""deoh\x04""degg\x04""degs\x03""den\x04""denj\x04""denh\x03""ded\x03""del\x04""delg\x04""delm\x04""delb\x04""dels\x04""delt\x04""delp\x04""delh\x03""dem\x03""deb\x04""debs\x03""des\x04""dess\x04""deng\x03""dej\x03""dec\x03""dek\x03""det\x03""deh\x04""dyeo\x05""dyeog\x06""dyeogg\x06""dyeogs\x05""dyeon\x06""dyeonj\x06""dyeonh\x05""dyeod\x05""dyeol\x06""dyeolg\x06""dyeolm\x06""dyeolb\x06""dyeols\x06""dyeolt\x06""dyeolp\x06""dyeolh\x05""dyeom\x05""dyeob\x06""dyeobs\x05""dyeos\x06""dyeoss\x06""dyeong\x05""dyeoj\x05""dyeoc\x05""dyeok\x05""dyeot\x05""dyeop\x05""dyeoh\x03""dye\x04""dyeg\x05""dyegg\x05""dyegs\x04""dyen\x05""dyenj\x05""dyenh\x04""dyed\x04""dyel\x05""dyelg\x05""dyelm\x05""dyelb\x05""dyels\x05""dyelt\x05""dyelp\x05""dyelh\x04""dyem\x04""dyeb\x05""dyebs\x04""dyes\x05""dyess\x05""dyeng\x04""dyej\x04""dyec\x04""dyek\x04""dyet\x04""dyep\x04""dyeh\x03""dog\x04""dogg\x04""dogs\x04""donj\x04""donh\x03""dod\x03""dol\x04""dolg\x04""dolm\x04""dolb\x04""dols\x04""dolt\x04""dolp\x04""dolh\x03""dom\x03""dob\x04""dobs\x03""dos\x04""doss\x04""dong\x03""doj\x03""doc\x03""dok\x03""doh\x04""dwag\x05""dwagg\x05""dwags\x04""dwan\x05""dwanj\x05""dwanh\x04""dwad\x04""dwal\x05""dwalg\x05""dwalm\x05""dwalb\x05""dwals\x05""dwalt\x05""dwalp\x05""dwalh\x04""dwam\x04""dwab\x05""dwabs\x04""dwas\x05""dwass\x05""dwang\x04""dwaj\x04""dwac\x04""dwak\x04""dwat\x04""dwap\x04""dwah\x04""dwae\x05""dwaeg\x06""dwaegg\x06""dwaegs"
-#define UTFASCIILOOKUP {1,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,2,1,2,2,1,1,2,2,2,2,4,6,8,10,12,14,16,18,20,22,2,2,1,1,1,1,1,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,1,2,1,1,1,1,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,28,76,1,72,1,79,1,28,24,1,1,2,58,2,82,86,8,10,1,64,54,1,2,6,52,1,88,91,94,1,24,24,24,24,24,24,97,28,32,32,32,32,40,40,40,40,30,50,52,52,52,52,52,70,52,64,64,64,64,64,100,79,24,24,24,24,24,24,97,28,32,32,32,32,40,40,40,40,30,50,52,52,52,52,52,2,52,64,64,64,64,72,100,72,24,24,24,24,24,24,28,28,28,28,28,28,28,28,30,30,30,30,32,32,32,32,32,32,32,32,32,32,36,36,36,36,36,36,36,36,38,38,38,38,40,40,40,40,40,40,40,40,40,40,103,103,42,42,44,44,44,46,46,46,46,46,46,46,46,46,46,50,50,50,50,50,50,50,106,106,52,52,52,52,52,52,109,109,58,58,58,58,58,58,60,60,60,60,60,60,60,60,62,62,62,62,62,62,64,64,64,64,64,64,64,64,64,64,64,64,68,68,72,72,72,74,74,74,74,74,74,60,26,26,26,26,16,16,52,28,28,30,30,30,30,30,10,1,32,34,34,36,36,112,40,40,44,44,46,46,68,50,50,52,52,52,115,115,54,54,118,8,8,121,121,62,62,62,62,64,64,72,66,72,72,74,74,124,124,124,124,8,14,14,127,68,1,1,1,1,130,130,130,133,133,133,136,136,136,24,24,40,40,52,52,64,64,64,64,64,64,64,64,64,64,1,24,24,24,24,97,97,36,36,36,36,44,44,52,52,52,52,124,124,42,130,30,130,36,36,112,68,50,50,24,24,97,97,52,52,24,24,24,24,32,32,32,32,40,40,40,40,52,52,52,52,58,58,58,58,64,64,64,64,60,60,62,62,72,72,38,38,50,30,139,139,74,74,24,24,32,32,52,52,52,52,52,52,52,52,72,72,46,50,62,42,142,145,24,28,28,46,62,60,74,148,148,26,64,66,32,32,42,42,56,56,58,58,72,72,24,24,24,26,52,28,30,30,32,1,1,32,32,32,32,42,36,36,36,36,64,72,38,38,40,40,40,46,46,46,153,68,68,48,50,50,50,52,109,52,34,58,58,58,58,58,58,58,58,58,60,60,42,60,60,62,62,64,64,66,1,68,72,72,74,74,74,74,1,1,1,28,1,26,32,36,38,42,44,46,56,1,1,130,130,130,127,127,156,159,162,153,165,1,38,38,44,38,42,58,58,58,58,68,72,1,1,1,1,1,1,1,1,1,1,1,1,66,1,66,1,2,2,2,2,1,2,2,2,2,1,1,1,66,1,2,66,2,1,2,1,1,58,70,36,46,60,70,1,168,179,184,188,193,203,207,66,1,1,212,217,220,225,231,236,236,236,243,249,256,262,256,262,266,266,220,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,272,272,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,272,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,24,32,40,52,64,28,30,38,48,58,62,66,70,278,278,283,283,1,2,289,289,0,0,1,0,0,0,1,0,0,0,0,0,1,1,24,2,32,32,40,0,52,0,64,52,40,24,26,36,30,32,74,32,100,40,44,46,48,50,300,52,54,58,0,60,62,64,303,306,76,52,40,64,24,32,32,40,64,24,26,36,30,32,74,32,100,40,44,46,48,50,70,52,54,58,60,60,62,64,303,306,76,52,40,64,52,64,52,0,26,100,64,64,64,303,54,2,309,309,315,315,68,68,56,56,318,318,121,121,34,34,306,306,38,38,36,36,321,321,324,324,44,58,28,42,0,0,0,327,327,0,331,331,0,0,0,0,335,338,341,344,335,130,40,347,42,133,136,350,354,40,64,357,24,26,66,36,30,335,124,74,40,40,44,46,48,50,52,54,58,60,62,64,34,306,127,321,121,361,366,72,1,32,371,374,24,26,66,36,30,335,124,74,40,40,44,46,48,50,52,54,58,60,62,64,34,306,127,321,121,361,366,72,1,32,371,374,335,338,341,344,335,130,40,347,42,133,136,350,354,40,64,357,52,52,32,32,335,335,32,32,335,335,52,52,338,338,300,300,76,76,34,34,72,72,72,72,64,64,52,52,52,52,380,380,56,56,383,1,1,1,1,0,388,395,40,40,1,1,58,58,36,36,36,36,36,36,124,124,74,74,44,44,44,44,44,44,44,44,50,50,106,106,54,54,306,306,60,60,62,62,64,64,64,64,306,306,403,403,321,321,321,321,38,38,321,321,321,321,1,124,124,44,44,407,407,50,50,410,410,321,321,413,413,416,24,24,24,24,97,97,335,335,1,1,1,1,124,124,74,74,130,130,40,40,40,40,52,52,52,52,52,52,32,32,64,64,64,64,64,64,321,321,425,425,72,72,425,425,429,429,429,429,432,432,435,435,439,439,443,443,448,448,452,452,456,456,460,460,464,464,407,407,467,467,471,471,475,475,479,479,482,482,485,485,407,407,410,410,0,0,0,0,0,0,0,0,0,0,0,0,0,24,26,36,30,32,74,32,32,62,124,40,46,306,127,44,38,130,377,321,48,72,50,121,52,321,54,42,491,60,66,62,58,127,68,54,44,52,34,0,0,220,0,0,0,0,0,0,0,24,26,36,30,32,74,32,32,62,124,40,46,306,127,44,38,130,377,321,48,72,50,121,52,321,54,42,491,60,66,62,58,127,68,54,44,52,34,494,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,24,52,40,32,32,24,24,52,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,497,26,36,30,38,66,74,306,62,72,44,44,46,48,48,50,50,60,502,54,54,127,127,56,58,121,62,0,0,0,0,0,66,507,40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,510,0,516,0,0,0,0,0,0,0,0,0,0,0,522,528,540,547,559,564,0,497,0,0,0,0,0,0,0,0,0,0,574,24,497,68,497,72,497,26,580,62,100,42,38,306,30,584,58,74,60,121,60,30,62,74,587,36,591,591,597,597,597,0,34,56,44,46,48,50,38,68,497,72,603,606,609,24,64,40,68,0,0,0,0,0,0,0,0,66,612,618,0,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,622,626,497,497,497,497,179,179,179,64,179,630,633,26,62,62,54,100,637,640,38,644,647,38,321,650,654,30,30,657,584,660,30,30,30,664,58,58,58,58,58,58,42,58,60,60,60,60,60,62,377,34,34,34,66,34,303,56,56,306,44,44,44,106,44,36,36,50,36,36,36,46,46,46,46,50,50,50,50,50,38,321,667,38,38,580,68,109,109,64,670,670,68,66,72,72,72,68,32,673,72,72,0,97,677,626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,681,685,0,6,8,10,12,14,16,18,20,22,121,30,377,689,689,696,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,700,700,26,36,36,30,30,38,68,74,38,62,62,72,706,44,46,48,50,60,60,32,54,54,60,56,58,121,62,709,709,709,24,24,24,24,24,24,32,32,32,32,40,40,64,64,64,52,0,0,0,0,0,70,56,0,0,0,0,0,0,717,717,717,622,622,622,622,622,622,622,640,640,681,681,685,725,587,587,587,730,730,591,591,591,734,734,739,739,739,744,685,685,725,640,640,725,685,640,497,497,597,597,597,748,748,673,673,640,725,725,752,38,121,50,58,26,46,44,756,66,48,34,584,100,46,36,644,60,30,74,62,72,54,42,321,630,762,306,100,74,121,60,30,62,74,765,377,56,68,24,770,40,773,64,776,32,779,52,782,0,785,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,24,773,40,32,64,776,52,789,50,800,803,806,809,812,816,819,822,826,829,833,836,839,842,845,848,842,429,852,855,848,858,858,858,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,863,875,884,24,24,770,40,892,64,895,58,46,898,32,32,905,898,52,52,908,836,911,915,918,922,926,812,809,929,848,933,937,942,946,951,806,955,816,959,842,963,803,968,800,972,845,855,819,822,839,976,980,985,988,992,826,429,0,0,996,1002,770,40,892,64,895,58,664,898,32,32,905,898,52,52,908,1011,0,0,0,1018,1025,0,0,0,0,0,479,1034,1039,1044,1047,471,833,1053,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,0,179,898,0,0,0,0,0,0,0,0,1060,1064,148,1068,1073,0,863,875,884,0,24,770,40,892,64,895,58,46,0,0,32,905,0,0,52,908,836,911,915,918,922,926,812,809,929,848,933,937,942,946,951,806,955,816,959,842,0,803,968,800,972,845,855,819,0,839,0,0,0,988,992,826,429,0,0,996,1002,770,40,892,64,895,58,664,0,0,32,905,0,0,52,908,1011,1077,0,0,0,0,0,0,0,0,0,0,0,0,0,822,471,0,1053,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,819,819,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,50,50,38,0,24,770,40,892,64,895,58,46,410,32,32,905,1084,52,52,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,1093,54,303,26,637,48,72,58,664,46,46,1097,66,121,79,60,38,0,0,996,0,770,40,892,64,895,58,664,410,32,32,905,1084,52,52,908,1011,0,0,1101,1105,0,0,0,0,0,0,56,1111,1115,74,1119,491,34,1124,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,1127,0,0,0,0,0,0,0,0,0,0,0,50,50,38,0,24,770,40,892,64,895,58,664,0,0,32,905,0,0,52,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,0,54,303,26,637,48,72,58,0,46,976,0,985,121,79,60,38,0,0,996,1002,770,40,892,64,895,58,664,898,0,32,905,898,0,52,908,1011,0,0,0,0,0,0,0,0,0,0,0,0,0,0,664,491,0,1124,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,58,58,1134,1134,0,0,0,0,0,0,0,0,0,0,0,0,0,863,50,884,0,24,770,40,892,64,895,58,46,0,0,773,905,0,0,776,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,0,54,303,26,1137,48,72,58,0,46,1057,0,66,121,992,60,38,0,0,996,1002,770,40,892,64,895,58,664,0,0,773,905,0,0,776,908,1011,0,0,0,0,0,0,0,0,0,0,0,1111,1115,74,664,471,34,1053,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,50,38,0,0,1140,0,0,0,0,0,0,0,0,0,0,0,0,50,50,38,0,24,770,40,892,64,895,58,0,410,32,32,905,1084,52,52,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,963,54,303,26,637,48,855,58,822,46,1057,980,66,121,79,60,38,0,0,0,0,770,40,892,64,895,58,664,410,32,32,905,1084,52,52,908,1011,0,0,1101,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,664,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,50,50,38,0,24,770,40,892,64,895,58,46,0,32,32,905,0,52,52,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,0,54,303,26,637,48,72,58,822,46,1057,0,985,121,79,60,38,0,0,0,1002,770,40,892,64,895,58,664,0,32,32,905,0,52,52,908,1011,0,0,0,0,0,0,0,0,0,0,1144,1148,0,0,664,491,0,1124,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,6,8,10,6,8,10,1152,0,0,50,38,0,24,770,40,892,64,895,58,46,0,32,773,905,0,52,776,908,44,911,915,918,106,28,812,42,929,644,630,937,942,946,1090,62,955,816,959,50,1093,54,968,800,972,48,72,58,664,46,1057,1097,66,988,79,60,38,0,0,996,1002,770,40,892,64,895,58,664,0,32,773,905,0,52,776,908,1011,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,833,0,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,0,1158,1170,0,0,0,0,0,0,0,0,0,0,0,0,0,0,50,50,38,0,24,770,40,892,64,895,58,46,0,32,773,905,0,52,776,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,0,54,303,26,637,48,72,58,664,46,1057,980,66,121,79,60,38,0,0,0,1002,770,40,892,64,895,58,664,0,32,773,905,0,52,776,908,1011,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,664,1057,46,1057,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,0,1182,1182,1182,1182,1182,1182,0,0,50,38,0,24,770,40,892,64,895,58,46,1189,32,773,905,1198,52,776,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,1205,54,303,26,637,48,72,58,664,46,1057,1205,66,121,79,60,38,1216,1224,0,1232,770,40,892,64,895,58,664,1240,32,773,905,0,52,776,908,0,0,1250,1261,1267,1261,1267,1261,0,1267,0,1272,1285,1267,1293,1285,1285,1097,1299,664,1057,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1267,1267,0,0,0,0,0,0,0,0,0,0,0,0,0,1311,50,38,1314,24,770,40,892,64,895,58,46,1318,32,773,905,1321,52,776,908,44,306,36,377,106,28,321,42,1087,644,630,633,654,660,1090,62,100,30,584,50,1325,54,303,26,637,48,72,58,664,46,1057,1097,66,121,79,60,38,1328,0,0,0,770,40,892,64,895,58,905,1336,32,773,905,1348,52,776,908,1352,1361,1370,0,6,8,10,12,14,16,18,20,22,1378,1389,0,0,0,0,664,1057,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1311,50,38,1314,24,770,97,1396,40,892,64,895,58,664,46,1057,32,773,905,52,776,908,1321,0,1400,44,306,36,377,106,1403,28,321,42,1087,644,1407,1411,630,633,654,660,1090,1415,62,100,30,584,50,770,1420,54,303,26,637,48,1423,72,58,1426,46,0,0,66,121,79,60,38,1057,34,0,0,0,0,0,0,0,0,770,97,1396,40,892,64,14,895,18,58,32,773,905,52,776,908,46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,664,1057,0,0,0,0,0,0,0,0,0,0,0,0,1429,44,306,306,306,306,306,106,650,321,321,321,321,72,30,62,100,100,100,50,30,62,100,100,100,50,26,54,303,34,303,34,303,48,72,58,58,46,46,68,60,60,60,38,46,1432,38,1432,24,24,770,1437,40,892,1440,1443,64,895,0,0,0,0,1447,637,32,97,52,905,905,1451,812,809,0,848,933,937,942,48,951,806,955,816,959,842,803,968,800,972,845,1144,1454,1148,1459,852,1464,1044,1468,855,819,839,988,992,826,429,24,1471,1476,1487,822,0,0,0,0,770,40,892,64,895,58,664,46,1057,32,773,52,776,1491,1496,40,44,306,1501,306,0,1505,106,321,1509,60,1514,0,644,0,0,836,911,915,918,30,38,100,100,0,50,26,54,303,34,303,34,955,48,72,58,803,46,800,68,845,1144,60,38,1459,852,1464,1044,24,855,770,1437,40,892,72,1124,64,895,1476,52,46,644,0,0,32,1518,52,1521,905,0,0,0,0,0,0,0,0,48,1524,1524,0,0,0,0,0,0,0,0,0,0,0,0,1529,1532,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1101,911,915,918,922,926,812,809,929,848,1535,933,937,942,946,951,806,955,816,959,842,803,968,800,972,845,855,819,839,852,826,429,976,24,1540,40,892,64,895,32,1545,52,908,1549,770,40,892,64,895,32,905,1545,1545,32,875,618,884,1011,1554,1559,1559,1559,1559,1566,44,306,36,377,106,28,321,42,20,644,630,633,654,660,1090,62,100,30,584,50,54,303,26,637,48,127,350,130,357,68,124,74,1545,72,58,46,121,1572,60,38,24,1576,58,1580,1580,1580,1588,1588,1588,770,40,892,64,895,58,664,46,1057,32,773,52,776,48,38,40,892,1540,1540,1540,1540,1540,1540,1540,1540,1540,1540,1540,1540,1596,1596,44,306,36,377,106,28,321,42,20,644,630,633,654,660,1090,62,100,30,584,50,54,303,26,637,48,127,350,130,357,68,124,74,1602,72,58,46,121,79,60,38,24,1576,68,72,58,1606,70,1611,1616,1620,1623,482,1627,1631,0,0,0,0,0,0,0,0,0,0,603,1635,1639,1643,410,1647,1651,1655,609,1659,1663,1667,1671,1084,1675,1679,1602,331,1684,606,1688,1693,1698,1703,1707,1712,1717,1721,1725,1606,1729,1611,1616,1620,1623,482,1627,1631,1733,1736,1739,1639,587,0,1745,0,0,0,44,306,36,377,106,28,321,42,1087,644,1754,630,633,654,660,1090,630,100,30,584,50,54,303,26,637,48,72,58,46,68,60,38,1057,24,0,40,892,64,895,32,0,52,908,0,770,40,892,64,895,32,905,0,0,0,50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,50,58,46,32,121,79,58,664,46,1057,58,664,46,1057,0,0,0,0,0,0,1758,24,97,855,475,1765,32,1768,1772,52,852,1775,109,1318,64,1779,482,1783,670,1786,347,40,1789,1793,1797,1802,1808,1813,1818,1824,1830,1836,1841,1845,1850,1854,1858,1864,1871,1878,1883,1888,1892,1897,1905,1910,1914,1919,1925,1930,1937,1943,1948,1953,1958,1964,1969,1973,1978,1982,1986,1991,1999,2005,24,26,36,30,32,66,74,62,40,44,46,48,50,52,54,124,58,60,62,64,54,44,36,56,121,321,28,74,28,321,70,42,38,32,72,68,2014,109,2017,2030,2041,2055,2069,2083,2096,2114,2126,2139,24,26,36,30,32,66,74,62,40,44,46,48,50,52,54,124,58,60,62,64,54,44,36,56,121,321,28,74,28,321,70,42,38,32,72,68,2014,109,34,2158,2170,2182,0,0,0,0,0,0,36,2194,50,30,654,58,48,26,1137,60,79,2197,42,2201,28,44,62,54,38,106,1090,1420,2204,2207,2210,664,491,2210,1423,2213,2216,2219,2222,2226,2229,2233,2237,2241,2245,2249,2252,2255,2258,2219,2261,2265,2268,2271,2274,2277,2280,2283,2287,60,2291,2294,2297,315,318,121,2300,2305,327,2309,74,36,30,48,26,60,74,2314,42,28,62,54,50,42,0,0,2318,2322,2326,321,2329,2334,2338,2341,762,56,2344,2349,2355,2361,0,0,800,24,97,855,475,1765,32,1768,1772,52,852,1775,109,1318,64,1779,482,1783,670,1786,347,40,1789,1793,1797,1802,1808,1813,1818,1824,1830,1836,1841,1845,1850,1854,1858,1864,1871,1878,1883,1888,1892,1897,1905,1910,1914,1919,1925,1930,1937,1943,1948,1953,1958,1964,1969,1973,1978,1982,1986,1982,64,2366,1910,2371,895,770,773,32,52,852,36,2194,2375,50,136,2378,30,46,2381,2384,2387,162,2390,2393,2396,48,26,2226,60,79,106,42,28,44,62,54,38,2399,2402,106,1420,2406,2409,2412,2207,2415,2418,2422,2425,2428,1057,2432,2436,2440,2444,2448,2452,153,2456,2459,2462,2465,1423,2468,2471,2475,2478,2481,2213,2484,2258,303,2341,2265,2271,2487,2280,74,36,79,1318,306,50,2406,2409,2338,2341,1529,2490,1532,2493,56,2496,2500,2505,2510,2514,2518,429,2523,2526,2529,2533,1620,2537,2540,839,2544,2547,2197,2550,2554,1426,2557,2561,2565,2569,2573,2578,2583,2587,2591,845,2596,2599,2602,2606,2610,2613,2616,2222,2620,2624,2628,2633,2638,2642,2646,819,2651,2654,2657,2661,2665,2668,2671,826,2675,2678,2681,2685,2689,1325,2692,988,2696,2700,2704,2300,2305,327,2309,479,2709,2712,2715,2719,2723,2726,2729,2734,2738,2741,2745,2750,2755,2759,2762,2318,2322,2766,2770,2329,2334,2775,2779,2783,2788,2344,2349,2355,2361,0,0,800,2792,2795,2798,2802,2806,2809,2812,985,2816,2819,2822,2826,2830,2833,2836,806,2840,324,2843,2847,2851,2854,2857,926,2861,2864,2867,2871,2875,2878,2881,2885,2888,2891,2894,2898,2902,2905,2908,2912,2916,2920,2924,2929,2934,2938,2943,842,2947,2950,785,2953,2957,1400,2960,848,2964,2968,2972,2977,2982,2986,2990,24,32,40,52,64,66,915,836,836,2995,2998,3001,3005,3009,1311,2526,3012,2523,3016,3020,3025,3030,1426,2544,3034,3038,3042,3046,3051,3056,3060,3064,3068,2957,3073,3078,3084,3090,3095,3099,852,3103,1783,3106,3110,482,3114,2678,1325,2675,3117,816,806,432,2851,3120,1044,3123,3126,3129,3133,464,3137,3140,1464,3144,3148,3152,3157,3162,3166,3170,855,670,347,3175,3179,1772,1318,855,816,3183,3120,3186,3190,432,3194,3197,942,3201,2496,2500,2505,2510,2514,2518,809,3205,3208,3211,3215,3219,3222,2540,915,3225,3228,3231,3235,3239,3242,0,3245,0,3249,3253,3258,3263,0,0,1060,3267,3271,3275,3280,3285,3289,0,955,3293,3297,3301,3306,3311,1321,3315,812,3320,3324,3328,3333,3338,3342,3346,968,3351,3355,3359,3364,3369,3373,3377,1144,3382,3386,3390,3395,3400,3404,3408,3413,3417,3421,3425,3430,3435,3439,0,833,2738,1733,3443,3447,3451,2759,2762,803,3454,3457,3460,3464,3468,3471,2779,3474,2788,3478,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,32,40,52,64,66,915,836,3239,3228,3242,3225,3482,429,1620,2526,2537,2523,112,839,2554,2547,1426,2544,3485,845,2610,2599,2613,2596,842,3064,3488,2957,2950,1400,2947,3492,3095,3099,3495,3499,3503,3507,826,60,2689,2678,1325,2675,3117,816,806,432,2851,3120,324,3194,3183,3511,3514,3518,3522,3526,3530,3534,3538,1144,3400,3386,3404,3382,3542,852,482,1783,3114,3103,3546,855,1772,347,1318,670,3549,0,0,0,0,0,0,0,0,0,0,0,0,32,3552,40,892,52,776,776,773,40,24,770,482,482,1783,1783,3556,3556,3114,3114,3560,3560,3560,852,852,3106,3106,3106,905,68,0,62,44,121,60,50,68,50,0,68,28,0,46,410,609,1084,603,3468,3564,3457,3569,3471,3573,3573,2533,2526,803,3460,2943,2943,3577,3577,3581,3581,3586,3586,3590,3590,2779,2779,3595,3595,3595,54,54,38,2851,3600,324,3605,2854,3609,3609,3190,3120,806,2843,3613,3613,3617,3617,3621,3621,3626,3626,3630,3630,2857,2857,3635,3635,3635,62,3640,3644,3648,933,3009,3652,2998,3657,1311,3661,3661,836,3001,3030,3030,3016,3016,3665,3665,3670,3670,3674,3674,3012,3012,3020,3020,3020,44,3679,3682,3686,3690,3694,2875,3698,2864,3703,2878,3707,3707,926,2867,3711,3711,3715,3715,3719,3719,3724,3724,3728,3728,2881,2881,3733,3733,3733,28,100,2610,3738,2599,3743,2613,3747,3747,845,2602,3751,3751,3755,3755,3759,3759,3764,3764,3768,3768,2616,2616,3773,3773,3773,48,48,2481,48,48,2957,3778,2950,3783,1400,3787,3787,842,785,3791,3791,2960,2960,3795,3795,3795,50,106,2378,2554,3800,2547,3805,1426,3809,3809,839,2197,3813,3813,3817,3817,3821,3821,3826,3826,3830,3830,2557,2557,3835,3835,46,46,46,2689,3840,2678,3845,1325,3849,3849,826,2681,3853,3853,3857,3857,3861,3861,3866,3866,3870,3870,2692,2692,3875,3875,3875,60,60,3880,60,2297,3883,3880,3887,3892,3897,3902,2305,2700,3907,327,3912,988,2704,3917,3917,3922,3922,3927,3927,3933,3933,3938,3938,2309,2309,3944,3944,121,1772,3950,347,3955,1318,3959,3959,855,3175,3963,3963,3967,3967,3971,3971,3976,3976,3980,3980,3985,3985,3989,3989,3989,72,72,72,347,2665,2665,2554,3994,2654,3999,2668,4003,1426,819,2657,839,4007,4007,58,58,58,3451,4012,1733,4017,2759,4021,833,3443,4025,4025,34,3311,3311,3297,3297,4030,4030,1321,4035,955,3301,4040,4040,100,4046,4051,4056,937,633,4061,4065,4069,4073,1620,2526,4077,2537,4081,429,2529,38,38,4085,4088,2712,4093,2726,4097,479,2715,56,4101,4106,4111,4116,2665,2654,2668,819,4121,4127,4131,4136,4140,922,4145,106,1403,2305,2700,327,988,3311,3297,1321,955,100,4150,4154,4159,4163,467,4168,2396,3311,3297,4030,1321,4035,955,3301,100,26,32,40,52,24,482,1783,3114,852,2957,2950,1400,842,3009,2998,1311,836,1620,2526,2537,429,4173,4177,425,4181,4186,918,2651,2668,2665,2661,2654,819,3103,3114,482,3110,1783,852,4190,4194,4198,4202,4207,4211,3293,1321,3311,3306,3297,955,4215,3648,3640,4219,3644,933,3454,3471,3468,3464,3457,803,54,3225,3242,3239,3235,3228,915,4224,1314,4228,4232,4237,911,4241,4245,4249,4253,4258,1487,4262,2947,1400,2957,2953,2950,842,2596,2613,2610,2606,2599,845,670,1318,1772,3179,347,855,3205,3205,3222,3219,3215,3208,3208,809,4265,4269,4273,4277,4282,1064,2544,1426,2554,2550,2547,839,4286,4290,4294,4298,4303,3514,4307,4159,4311,4315,4150,467,4320,4111,4101,4325,4106,4116,3534,3530,3522,4331,3526,3518,3123,3137,464,3133,3126,1044,74,74,4336,4340,4344,4348,4353,1148,2675,1325,2689,2685,2678,826,2696,327,2305,2300,2700,988,121,3382,3404,3400,3395,3386,1144,3320,3342,3338,3333,3324,812,4357,4362,4367,4372,4378,4383,70,839,4388,4392,4397,4402,4408,4413,4419,4424,0,0,0,0,0,0,0,0,0,836,26,46,34,60,50,38,30,62,28,56,48,36,106,74,58,24,52,64,32,40,321,100,303,54,70,54,1426,2833,988,4430,826,34,66,64,118,72,68,100,100,24,52,4434,97,52,52,52,109,1084,58,44,28,44,36,106,36,36,68,38,38,38,38,50,50,50,40,32,42,36,97,24,1765,54,74,60,60,60,28,74,62,62,30,26,26,54,54,32,48,48,48,46,46,106,106,30,52,4437,4441,2709,2709,2709,60,118,118,118,56,70,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4445,0,0,0,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,24,32,40,52,64,109,1440,773,842,4454,800,803,479,915,845,839,826,988,806,816,812,809,855,819,852,833,836,911,1144,1044,2529,4458,467,3148,3324,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4462,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4467,4472,4472,4472,4472,4472,0,0,0,0,0,0,0,0,44,306,36,377,106,28,321,42,1087,644,62,633,30,660,1090,62,100,30,584,50,54,303,26,637,48,72,58,46,66,121,79,60,38,46,56,24,770,40,892,64,4479,895,4482,4486,4489,4493,4496,32,905,776,776,908,24,770,770,40,892,72,1124,64,895,4500,109,855,335,32,97,905,776,908,48,38,24,0,0,0,58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4503,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,836,911,915,918,922,926,812,809,929,4506,806,955,816,959,842,803,968,800,972,845,855,819,839,852,988,992,826,429,0,0,0,24,40,64,773,905,776,908,32,52,855,819,852,0,0,0,0,836,922,875,806,842,803,845,819,839,4510,4520,4530,0,0,0,0,3809,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,836,2885,922,1144,826,855,806,955,839,803,968,845,833,985,429,479,911,1454,842,24,40,773,4535,64,776,52,1440,32,4538,905,0,0,4542,4542,4542,4542,4542,0,0,0,0,0,0,0,0,0,0,0,179,479,179,179,179,836,2885,922,179,179,179,1144,826,855,179,179,179,806,955,842,179,179,179,803,968,845,179,179,179,833,985,839,179,179,179,429,816,800,179,179,4548,4552,0,0,0,0,0,0,4556,770,892,64,895,32,97,52,782,1440,1521,4562,4566,507,4569,4573,4577,4580,4580,4580,4580,4580,4580,4580,0,0,0,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,4586,4590,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,836,915,922,4595,803,800,845,4600,806,816,842,4604,926,809,848,4608,855,819,839,985,826,24,429,40,64,32,52,97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4613,4613,4617,4623,4630,4636,4636,4642,4642,4648,4648,819,819,839,839,4654,4660,4667,4667,836,836,915,915,922,926,926,809,809,848,806,806,816,816,842,806,806,816,816,842,803,803,800,800,845,855,819,839,852,826,826,826,429,4673,4681,4613,4613,4688,4688,819,819,839,839,4693,4693,4693,4693,4700,4700,0,752,4706,4711,4716,2830,4719,4723,0,0,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4730,4739,4749,24,40,64,97,52,32,1786,836,479,915,922,926,809,1044,848,806,816,842,803,833,985,800,845,855,819,839,852,826,2885,429,4759,4769,4778,4786,4795,4803,4814,4823,4831,4842,0,0,0,911,4850,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,836,4854,911,915,4858,922,926,812,809,848,806,955,816,842,803,4862,968,833,4866,800,4870,845,4874,1144,1454,1148,855,819,839,429,4878,985,826,988,852,24,855,819,770,40,52,776,64,895,32,44,48,46,50,54,58,62,4882,4890,4895,996,0,0,0,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,933,937,942,0,6,8,10,12,14,16,18,20,22,839,4899,4902,4454,4905,2197,4908,4912,4916,4920,2547,4924,4927,4930,4934,2544,4937,4940,4943,4566,2554,4947,4950,410,4954,1426,4958,4962,4965,4968,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4971,4971,97,4971,4971,4971,4971,4971,4979,40,4971,4971,4971,4971,4971,4971,4971,4984,4984,4984,109,4971,4993,4997,4971,4971,4971,4971,4971,4984,4984,4984,4971,4971,4971,4971,5004,587,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,4971,24,5011,5018,5011,26,30,32,5024,4979,5011,36,5011,44,48,5030,52,4979,4993,4997,54,62,64,4984,5011,66,587,5034,5039,5045,5039,3324,40,58,64,66,5034,5051,5057,3355,3324,1440,26,30,34,48,50,54,58,58,60,62,74,36,5061,5070,100,40,5078,54,64,5083,26,30,34,36,44,46,48,50,54,58,60,5091,66,70,74,24,5018,30,32,32,4979,5024,40,52,5091,64,5095,5011,28,28,5099,5103,34,5112,5120,5011,40,5078,4971,4971,42,46,46,4971,48,5011,50,50,4971,5127,3355,60,5091,62,64,5083,4971,66,5011,74,74,74,5095,5134,0,0,0,0,0,0,0,0,0,0,58,0,0,0,0,0,0,0,0,5140,97,1451,5150,28,5070,5099,36,4971,44,46,4971,4971,50,4971,4971,58,60,60,74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,24,26,26,26,26,26,26,28,28,30,30,30,30,30,30,30,30,30,30,32,32,32,32,32,32,32,32,32,32,34,34,24,32,40,52,64,52,64,773,50,106,26,54,56,36,48,46,60,121,62,30,321,42,72,58,68,34,44,911,127,74,38,5153,2396,124,321,48,32,40,52,64,52,64,106,26,54,56,36,48,62,30,321,42,127,72,68,44,36,38,5156,644,130,32,40,4577,64,64,106,44,36,38,54,121,62,30,42,34,36,38,127,74,58,321,124,40,44,58,34,124,64,64,64,64,66,66,66,66,68,38,70,68,48,68,68,24,40,44,106,28,630,633,654,1090,62,30,54,303,79,124,74,24,62,124,377,106,28,1087,933,660,62,584,79,5159,124,74,64,72,637,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,40,40,40,40,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,64,64,64,64,64,64,64,64,64,64,64,64,64,64,72,72,72,72,72,72,72,72,5162,5162,5162,5162,72,72,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5175,5175,5175,5175,5175,5175,0,0,5175,5175,5175,5175,5175,5175,0,0,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5078,5078,5078,5078,5078,5078,5078,5078,5078,5078,5078,5078,5078,5078,5078,5078,5187,5187,5187,5187,5187,5187,0,0,5187,5187,5187,5187,5187,5187,0,0,5083,5083,5083,5083,5083,5083,5083,5083,0,5083,0,5083,0,5083,0,5083,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5018,5018,5175,5175,5183,5183,5078,5078,5187,5187,5083,5083,5195,5195,0,0,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5018,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5183,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5195,5018,5018,5018,5018,5018,0,5018,5018,5018,5018,5018,5018,5018,0,0,0,0,0,5183,5183,5183,0,5183,5183,5175,5175,5183,5183,5183,0,0,0,5078,5078,5078,5078,0,0,5078,5078,5078,5078,5078,5078,0,0,0,0,5083,5083,5083,5083,5057,5057,5083,5083,5083,5083,5083,5083,5057,0,0,0,0,0,5195,5195,5195,0,5195,5195,5187,5187,5195,5195,5195,0,0,0,5201,0,2,2,2,2,5205,2,2,5209,5213,2,2,2,2,2,2,2,2,2,2,0,0,0,5218,1,1,1,1,1,1,1,1,1,1,1,0,0,0,2,0,0,5224,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,5230,0,0,2,0,0,0,0,0,0,5234,0,1,1,0,0,5237,0,0,5241,0,0,0,0,0,0,5248,0,0,0,5255,2,2,0,0,0,0,0,0,0,0,0,0,5261,0,0,5267,5267,5274,40,0,5280,0,0,0,0,0,0,0,5285,0,0,0,50,0,0,0,0,0,0,0,0,0,0,5291,0,0,0,0,0,24,32,52,70,5024,0,0,0,0,0,0,0,0,0,5296,0,0,0,0,0,0,0,0,5301,0,0,0,0,0,0,0,0,0,5305,0,5309,0,0,0,5314,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5320,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5325,0,0,0,0,0,0,0,0,0,0,0,0,0,5329,0,0,0,0,0,0,0,0,0,0,4971,5333,0,0,0,4971,0,0,0,0,2,0,0,0,0,0,4971,0,0,0,0,5337,2,0,0,0,0,0,4971,5078,0,0,0,4971,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5342,0,0,5347,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5352,0,0,0,0,0,0,0,0,0,0,5358,0,0,0,0,0,0,0,0,0,0,5325,0,0,0,0,0,0,0,5362,0,0,0,0,0,0,0,0,28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5367,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5372,0,0,0,0,0,0,0,0,0,5320,0,0,0,0,5377,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5381,0,0,0,0,0,0,0,5386,0,0,0,0,0,0,0,0,0,5391,0,0,0,0,0,0,0,0,0,5396,5401,5401,0,0,0,5407,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5413,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5419,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5426,5386,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5248,0,0,5437,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5442,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5448,0,0,0,0,0,0,0,0,0,0,0,0,0,5453,5459,0,0,0,0,0,0,0,5453,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5248,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5464,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5471,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5476,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5325,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,24,26,26,26,26,26,26,28,28,30,30,30,30,30,30,30,30,30,30,32,32,32,32,32,32,32,32,32,32,34,34,36,36,38,38,38,38,38,38,38,38,38,38,40,40,40,40,44,44,44,44,44,44,46,46,46,46,46,46,46,46,48,48,48,48,48,48,50,50,50,50,50,50,50,50,52,52,52,52,52,52,52,52,54,54,54,54,58,58,58,58,58,58,58,58,60,60,60,60,60,60,60,60,60,60,62,62,62,62,62,62,62,62,64,64,64,64,64,64,64,64,64,64,66,66,66,66,68,68,68,68,68,68,68,68,68,68,70,70,70,70,72,72,74,74,74,74,74,74,38,62,68,72,24,60,24,26,28,30,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,40,40,40,40,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,64,64,64,64,64,64,64,64,64,64,64,64,64,64,72,72,72,72,72,72,72,72,16,18,20,22,0,0,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,32,32,32,32,32,32,0,0,32,32,32,32,32,32,0,0,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,52,52,52,52,52,52,5386,0,52,52,52,52,52,52,2,5480,64,64,64,64,64,64,64,64,0,64,0,64,0,64,0,64,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,24,24,32,32,32,32,40,40,52,52,64,64,52,52,0,0,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,24,24,24,24,24,0,24,24,24,24,24,24,24,0,40,0,0,0,32,32,32,0,32,32,32,32,32,32,32,0,0,0,40,40,40,40,0,0,40,40,40,40,40,40,0,0,0,0,64,64,64,64,58,58,64,64,64,64,64,64,58,0,0,0,0,0,52,52,52,0,52,52,52,52,52,52,52,0,0,0,5485,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5491,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,5496,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5501,0,0,0,0,0,0,0,0,0,0,5506,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5509,0,5255,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5514,0,0,0,5519,50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5524,0,0,0,0,0,5529,0,0,0,0,0,5534,0,0,0,5540,5544,5547,5550,46,5553,50,5557,1134,68,2406,30,1786,44,62,5561,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5564,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5568,0,5573,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5578,0,0,0,0,5564,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,40,892,5583,5587,66,2819,5590,5594,5599,70,2891,5602,46,28,30,48,40,892,5583,5587,66,2819,5590,5594,5599,70,2891,5602,46,28,30,48,6,30,10,12,14,16,18,20,22,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,66,0,0,66,0,0,0,0,0,5606,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5612,0,0,0,0,5617,0,0,0,0,0,0,0,0,0,5622,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5629,0,0,0,0,5291,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5635,0,0,0,5629,0,0,0,0,5642,0,0,0,0,0,0,0,0,0,0,0,0,0,5647,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5651,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5325,5655,0,0,0,0,0,0,0,0,5285,0,0,0,5367,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5661,5661,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5666,0,0,0,0,5671,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,0,0,5676,5682,0,0,0,0,5688,0,0,0,0,0,0,0,0,0,0,5693,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5699,0,0,0,0,0,0,5291,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5476,5704,5704,0,0,0,0,0,0,0,0,0,0,0,5708,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5712,0,0,5717,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5721,5721,5721,5721,5721,5721,5721,5721,5721,5721,5721,609,609,0,5721,5721,0,0,609,609,5721,5726,0,609,609,609,0,5464,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5693,0,0,0,0,0,0,0,0,5730,0,0,2,2,0,0,0,0,0,0,0,0,5721,0,0,0,0,0,0,0,0,0,0,5682,5682,5682,0,0,0,5721,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5721,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,272,272,272,272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,5682,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5735,5739,5744,5749,5757,5763,5769,5777,5783,5790,5795,40,5803,5810,5815,5823,5831,5837,5841,5848,5854,5860,5867,5871,5877,5882,3468,5886,3386,5891,988,5898,5903,5908,5913,670,5921,5921,1318,5925,5933,5925,5937,5942,5950,5958,5969,0,5735,5739,5744,5749,5757,5763,5769,5777,5783,5790,5795,40,5803,5810,5815,5823,5831,5837,5841,5848,5854,5860,5867,5871,5877,5882,3468,5886,3386,5891,988,5898,5903,5908,5913,670,5921,5921,1318,5925,5933,5925,5937,5942,5950,5958,5969,0,46,46,46,54,58,24,62,38,38,44,44,74,74,5018,48,24,0,66,68,68,66,1432,1432,5978,32,58,52,4971,42,4971,0,0,5987,5987,5992,5992,5051,5051,5997,5997,6003,6003,6007,6007,6011,6011,6016,6016,6021,6021,6028,6028,6034,6034,6039,6039,2599,2599,2950,2950,6045,6045,52,52,3457,3457,2668,2668,6049,6049,6054,6054,4500,4500,1733,1733,4237,4237,6058,6058,6062,6062,6066,6066,6076,6076,6080,6080,6066,6066,6066,6066,6080,6080,6076,6076,283,283,6094,6094,6076,6076,6076,6076,6102,6102,6066,6066,6076,6076,6076,6076,6111,6111,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,6076,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,603,1635,1639,1643,410,1647,1651,1655,609,1659,1663,1667,1671,1084,1675,1679,1602,331,1684,606,1688,1693,1698,1703,1707,1712,1717,1721,1725,1606,1729,1611,1616,1620,1623,482,1627,1631,0,0,0,0,0,0,0,0,0,0,855,6120,6124,6129,6133,6139,6146,6150,6154,6159,6164,6170,6174,6178,6182,6189,6195,6139,6182,6199,3175,6204,6182,6209,6182,347,6213,6218,6182,6226,6230,4506,6182,6182,6234,670,1447,6238,6243,6182,6248,6253,6257,6262,6267,6271,6276,6281,6286,6290,6294,6298,6302,6313,0,0,0,0,0,0,0,0,0,6318,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6332,6336,6340,6344,6348,6353,6357,6361,6365,6369,782,6374,6378,6382,6387,6391,6396,6401,6406,6410,6415,6420,6426,0,0,0,0,0,0,0,0,0,992,6431,6435,6439,6444,6449,4430,0,6453,6457,6461,6465,6470,6475,6479,0,6483,6487,6491,6495,6500,6505,6509,0,6513,6518,6523,6528,6534,6540,6545,0,6550,6554,6558,6562,6567,6572,6576,0,6580,6584,6588,6592,6597,6602,6606,0,6610,6614,6618,6622,6627,6632,6636,0,6640,6644,6648,6652,6657,6662,6666,0,2806,2830,425,432,3162,464,836,407,413,410,52,3468,6670,6673,2851,429,3400,3338,988,6676,5937,6682,24,335,6688,6694,6267,670,6704,6713,5933,6704,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,220,5682,220,5682,6720,6725,6725,6725,6725,6725,6725,6734,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,24,24,40,40,64,64,32,32,52,52,836,915,2998,3228,2995,3225,3009,3239,1311,3242,826,1044,2678,3126,2675,3123,2689,464,1325,3137,806,816,324,3120,2840,2840,3183,2851,432,2854,3194,842,2950,2947,2957,1400,429,800,803,2526,70,3457,2523,2792,3454,1620,2806,3468,2537,2809,3471,845,2599,2596,2610,2613,855,855,670,670,1318,1318,819,2654,2651,2665,2668,852,852,1783,482,3114,50,2816,836,3009,0,0,0,0,0,0,0,0,0,2,24,24,40,40,64,64,32,32,52,52,836,915,2998,3228,2995,3225,3009,3239,1311,3242,826,1044,2678,3126,2675,3123,2689,66,66,66,66,816,324,3120,2840,2840,3183,2851,432,2854,3194,842,2950,2947,2957,1400,429,800,803,2526,2795,3457,2523,2792,3454,1620,2806,3468,2537,2809,3471,845,2599,2596,2610,2613,855,855,670,670,1318,1318,819,2654,2651,2665,2668,852,52,1783,482,3114,50,2816,836,3009,985,2819,2830,2833,0,0,0,0,0,0,0,0,0,0,26,54,48,34,30,62,50,46,36,44,38,42,56,70,124,321,121,58,74,28,60,24,52,32,4535,905,1518,908,139,603,410,4454,5030,6670,40,64,371,66,106,6743,4927,0,0,0,6746,6753,6765,6777,6783,6795,6807,6814,6826,6832,6845,6857,6869,6880,6894,6908,6920,6926,6932,6943,6954,6959,6969,6975,6981,6992,7000,7008,7016,7024,24,97,855,475,1765,32,1768,1772,52,852,1775,109,1318,64,1779,482,1783,670,1786,347,40,0,7030,2017,2030,2041,2096,2126,7041,7058,7072,7090,7102,7113,7127,7141,7154,7167,7185,7203,7215,7229,7243,7262,7274,7285,7297,7308,7319,7327,7338,7347,7361,7378,7394,2182,1858,1864,1883,1930,1937,1948,1999,7405,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2792,3126,3208,3225,773,7412,776,7416,4934,7420,7424,4943,7428,7431,7435,7440,1437,1429,7445,7449,54,62,44,38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2995,2678,2675,2854,2947,429,2526,2523,1620,2537,2596,819,2654,2651,2665,2668,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7454,52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7460,7467,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,0,26,0,44,0,46,0,28,40,34,0,48,60,54,0,32,0,38,0,52,0,58,0,30,42,36,0,50,62,56,0,0,0,0,0,64,0,66,0,0,0,0,0,70,0,0,0,0,0,0,0,74,0,0,0,0,68,0,0,72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5688,5606,0,0,7473,7478,7482,0,0,0,0,0,7487,0,0,0,0,0,0,0,0,0,7491,0,0,0,0,0,5442,0,0,0,0,7495,0,0,5688,0,0,0,7500,5426,0,7504,7511,7516,7521,5218,7528,5655,5491,7478,7533,0,0,7538,7542,5476,7548,7555,7558,0,0,0,0,5314,7565,5708,7478,7571,0,7576,7582,7588,5476,7592,7599,5666,5386,7605,0,0,0,0,7500,0,0,0,5381,5459,7495,7588,5476,7610,7616,5413,0,0,0,5347,7622,5476,5301,5325,7628,7632,7637,0,0,0,7643,7500,5296,7648,0,7576,0,7653,5237,7658,7663,5296,7667,7538,7672,5629,7675,7681,7687,7628,7692,7696,7700,7706,7710,7715,7653,5629,7719,0,7723,5666,7730,0,0,5296,7734,7738,5320,0,7742,7658,0,7746,7752,7757,7628,7763,7667,5453,5661,5337,0,0,5617,0,5666,5301,0,7632,0,5325,0,0,0,0,7768,0,7774,7779,7786,7790,0,7795,5342,0,0,7790,7800,0,5391,5301,0,7599,5568,7805,5655,0,0,0,5224,7811,7582,7817,0,0,5401,7592,5329,5426,0,7823,5661,0,5358,5381,7588,0,0,0,5337,5426,7706,0,7828,5564,7632,5280,5381,0,0,7834,7839,0,7843,0,0,7849,0,5301,5396,7853,7858,0,7667,7864,7867,5476,5291,7872,5407,0,7710,5248,7710,0,7672,0,7516,5291,7588,7675,7877,0,0,7605,7883,5407,7576,7888,5661,5301,0,7710,5237,5407,0,5325,0,7893,5573,7898,5568,0,7904,7779,7908,0,7849,0,5291,5666,7914,7920,7924,7672,7930,7864,5329,0,0,7935,7495,7940,7946,7628,0,7952,5476,5274,7817,7957,0,5333,5391,0,7962,5642,5381,7967,0,5699,7482,0,7972,7977,7982,7986,5730,7990,0,5485,7663,0,7864,5573,5661,5573,7994,0,0,7811,7588,5337,0,7667,7999,7500,5391,0,0,8005,0,7849,5671,7834,5688,5476,5396,0,8010,8015,7883,7565,8022,5708,5564,8028,7849,5337,5396,0,8033,8039,8044,5396,5301,8048,8052,8057,0,7487,0,0,5333,7516,5476,5730,0,7500,5726,7696,0,0,0,0,0,0,0,8028,7994,5661,5377,8062,8068,8073,8077,7482,0,0,0,0,0,0,8082,8087,7904,7491,5325,7853,7653,7592,7582,7752,5329,5448,8039,8093,0,5606,8099,0,0,0,0,0,0,0,7491,5391,5529,5501,8104,8110,7888,8115,7864,5661,0,7864,8119,7752,7800,7982,5717,5367,7800,5358,5325,5730,7972,0,0,0,0,8126,7663,0,0,8131,7883,8126,8073,8136,5301,7675,5519,0,0,0,0,0,7582,5377,8140,8144,8149,7972,7663,5237,0,7867,0,0,0,0,5573,5325,8154,7738,7834,7500,0,8159,0,0,0,0,7491,7521,0,5367,7571,5476,7839,0,0,5274,7658,8164,5476,7972,5476,5274,8170,5325,7653,0,8174,5301,0,5347,8179,7653,7834,0,0,7834,7653,0,7710,8184,8154,0,5358,5367,5534,8190,0,5337,8196,7500,8028,5358,8052,5485,5708,5708,0,0,0,0,8170,0,7571,8201,5381,7663,8205,7839,8048,0,0,0,0,8077,7511,8211,8217,0,5358,8222,0,5573,7653,5606,8227,0,0,0,0,5564,7888,7628,7491,8005,7653,7908,0,7908,0,7610,0,8201,7710,5661,8077,7843,5476,0,7800,8015,5367,8234,0,0,5325,8239,7491,5325,7482,0,8245,5476,5661,8052,5218,7653,7877,7658,5629,7972,8222,8249,7952,7877,5666,0,5237,0,0,8255,8261,8267,0,7610,5564,0,0,7786,0,0,5396,8039,0,5524,8272,0,5261,8277,8077,5564,5524,7867,8282,0,0,5237,7500,8005,8249,8287,0,5325,8293,7675,5476,7628,5476,7834,0,8297,8052,8302,0,5337,0,7528,5358,0,7828,0,8201,0,7487,7696,0,7790,8267,0,0,8255,0,8174,0,0,7495,5381,0,7696,7888,0,0,0,7888,0,0,7867,0,7742,5448,8179,8307,0,5352,5476,0,7571,8312,7867,8316,7528,5337,7930,7610,0,0,0,0,0,0,7738,5347,7800,8322,7576,7834,0,5329,0,5514,7746,0,7872,8048,5496,5386,0,5442,0,5320,7858,0,8005,8302,0,5274,7972,0,0,0,5401,8329,7500,5651,5296,7715,0,8334,5476,8131,5629,7706,8267,5320,5564,5476,8245,0,5401,8340,8179,8170,5329,7588,5362,5708,8345,7542,0,0,0,0,8297,7914,8349,7653,8354,7696,5708,5274,7538,0,0,5347,0,8354,7582,5534,8359,8249,5453,5509,5651,8077,5726,8365,5401,7972,5209,8370,7500,8052,8329,7972,8010,8104,8131,0,8104,8375,7605,8087,8239,5381,5661,5693,5381,8164,8211,0,5717,5642,8381,5476,7843,5218,7482,0,7588,0,7516,8387,7790,8391,8397,7628,7491,8131,5651,7667,7752,0,0,7828,7610,0,5426,8402,8255,7746,8249,8196,0,5509,8407,7663,0,8411,8417,5401,5309,0,7805,7571,8073,0,5642,8422,5325,0,8039,5325,7994,0,0,7500,8184,8267,0,7904,5578,8170,7994,5237,7706,5476,5661,7839,5688,8068,5453,7706,5325,0,0,0,7828,8149,8427,0,0,7482,8433,5325,5296,5437,5347,5386,8170,8196,5491,0,5381,7500,0,5218,8307,7710,0,8438,5464,5342,7990,8443,8052,7558,5529,8447,7710,5401,8345,7710,0,7521,7924,5396,8451,5688,8149,5564,8217,8211,7843,5480,7653,8211,8073,0,7706,7653,0,7610,0,7681,8149,7706,8365,5651,8456,8427,7521,8461,0,7706,0,5329,0,8267,7864,7768,8438,7994,7768,5320,8438,0,0,8391,5642,5291,5301,7908,8461,8467,8245,0,8149,7957,7542,5708,8473,0,8211,8479,0,5708,0,5237,5564,0,8484,8489,8316,5726,7864,0,5325,5325,5396,5285,8397,8473,8249,8126,7994,5325,5708,7628,8052,8217,8494,8447,8498,0,5301,5329,5453,5396,8277,0,7672,0,5651,7628,0,8170,0,0,8427,0,5476,0,5325,5358,0,8504,0,8010,5717,7692,5381,7622,0,0,8316,0,7839,5358,8509,5301,0,7962,7495,0,5325,8514,5274,5280,7864,7790,0,0,7853,0,8255,0,0,7478,0,8519,5377,5401,5358,0,8211,5529,8527,5666,0,5241,5573,7800,7610,0,7482,0,5241,7542,8375,8077,5476,0,5325,5325,8533,7628,8484,7811,8539,0,8093,5325,8543,7687,7839,7883,8239,5476,7999,5274,7710,7839,7710,7834,7843,8307,7610,7877,0,8422,5496,7663,7710,0,7616,5496,7853,0,8422,5496,5642,8548,0,0,8255,5401,8196,5329,7834,0,0,5476,7877,7592,5419,0,5485,0,7582,0,5676,7653,8539,0,0,8062,0,7710,8438,5358,0,8329,5329,0,5617,5358,0,5358,0,5274,7994,5432,7696,5676,0,5655,7930,5476,7628,5726,8365,5651,8447,7487,0,0,0,5261,7658,7715,5485,8411,8164,8554,8179,0,0,0,0,0,8539,8316,7738,5426,0,7888,5237,8316,0,5377,5261,5407,7972,0,7482,0,0,7811,7548,7706,0,0,5661,5661,8115,7994,0,0,7738,8427,8381,0,0,0,5476,8504,5320,5496,5314,0,7746,5325,7565,8456,7663,8010,5708,8196,5280,7675,0,0,8255,0,8316,0,5337,0,8543,5564,5573,7605,0,5573,0,8560,8566,5291,0,5496,8571,8577,5301,7500,0,7628,8370,5301,5730,7817,7990,8467,8407,5296,0,8582,7592,8365,5564,7867,7930,5237,7839,8334,0,5237,5688,5464,0,0,5442,8589,0,8407,8407,8594,8514,8093,8087,7828,8179,5459,7935,5352,0,8170,5325,8277,5606,8467,0,7774,7706,7628,5401,0,0,0,0,7628,5325,7994,5651,8402,5661,7500,5237,5688,5296,0,7628,7867,8598,7805,7888,7957,0,8334,7658,7576,7482,0,7893,7914,8467,5476,7643,7719,7491,8438,5501,7849,0,7482,8604,8447,5708,8527,5401,8227,7877,7538,8539,0,5329,0,5476,5396,0,8422,7864,5396,7962,8447,5401,5301,5274,7834,7610,8514,5407,0,0,8411,7628,8154,7622,5442,8370,0,5333,5381,7994,5476,7811,7849,8179,5347,7482,5661,0,7632,8093,7542,8154,0,8494,0,0,5717,0,5459,5218,5325,8608,8613,5237,8617,7500,5476,7982,5325,5726,5606,0,7542,7715,0,7790,7482,5671,7904,5274,7790,7565,8387,0,8022,7994,5529,0,8625,8484,7864,5209,8402,5358,8631,0,8577,0,0,7908,0,8370,0,7605,5248,5274,8190,0,5688,8456,7952,7696,7817,7723,0,8149,5301,8033,8589,8073,8494,8057,0,7888,0,8594,7738,7972,8473,7786,0,0,5224,7972,7582,7558,7999,8028,7516,8312,5325,8438,7994,7935,8131,0,8613,0,8201,0,8005,8149,7653,7616,7972,5381,5426,5661,8329,5261,7542,5661,7588,8370,5651,7643,0,0,5296,8136,8022,8543,7511,5381,5464,5347,0,7982,8307,8154,5717,5642,8604,7952,5407,5407,5442,5309,7538,5296,8514,8179,7672,7920,7924,8170,8170,5699,0,0,8635,0,8484,5396,5274,5661,0,7588,7893,8604,7491,0,0,0,8196,5296,8068,0,8322,5237,5476,0,8249,8641,5347,8052,7839,8533,8647,7632,5651,0,0,0,5661,8073,5296,7706,5661,0,0,8073,7542,8282,5285,5291,8653,8022,8170,7864,8077,0,5459,5459,0,7952,0,7706,7805,0,7628,8196,0,7990,5564,0,8539,5274,7952,7849,8658,7653,8316,8073,5261,5358,5329,7491,8447,8663,8589,7757,5329,7853,5401,7990,8589,5655,8663,5401,7790,0,7542,7990,8670,8255,5358,0,7482,0,5261,7588,7730,7872,5314,5329,8093,5496,0,7811,0,0,7977,8282,7982,8307,0,7957,0,5676,7990,5612,5717,5578,5693,8676,7605,7834,8334,5496,0,0,0,0,5476,0,0,5337,8234,5642,5642,5391,7817,7706,8682,0,0,5730,5651,5564,7982,8687,7883,0,0,7834,0,0,5726,0,8227,8391,0,8222,0,0,0,5274,0,0,0,7790,7843,7823,8456,8033,0,0,8694,0,7867,5491,0,8653,5459,5329,0,0,0,0,7628,7658,7877,7800,7746,7811,0,8699,7811,5730,5485,8073,5274,5301,5309,0,8211,8647,8052,7924,0,5274,7588,7994,7935,0,5309,8554,7849,0,8635,8093,0,8217,0,7734,8354,8073,0,5296,5459,7834,8170,5285,7908,8577,7628,5377,0,7478,8062,7675,8052,0,0,0,0,5464,5564,0,0,5471,0,8705,5285,0,7528,0,0,8467,5291,5396,0,7898,8711,0,0,7994,0,5712,7487,8411,5329,0,0,0,7849,5617,7672,5213,8164,5688,8577,7817,5274,5237,0,0,5453,0,0,0,5661,5666,0,8498,5314,7893,7571,5459,5325,7849,5325,0,0,0,0,0,5647,5274,7834,5629,7738,8539,5291,8608,7653,5642,8170,7872,7898,0,8077,5606,8716,0,0,5337,7746,0,0,8716,5426,7706,0,8682,8316,7696,7977,8196,7790,7864,5301,7710,0,7849,5381,5325,0,8721,5730,5671,0,5501,7719,0,5325,0,7723,7849,0,5274,5381,5396,5358,0,0,5655,0,0,5391,0,8022,7972,8543,0,0,0,7843,5358,5274,7898,5491,0,0,7920,7877,5358,7588,8154,8727,7616,8732,5419,5362,7786,7565,0,0,7738,8402,0,8407,8297,8739,7935,8245,7982,8402,5329,5676,5329,7542,8174,5655,0,5301,8340,7706,0,0,5377,8093,5726,5401,5329,5320,7817,5578,8473,5309,8745,0,0,8548,7734,5309,8473,5407,8234,0,0,0,8577,5301,7632,7914,7746,5407,8539,7930,0,5401,0,8329,8751,5699,7610,8397,0,5291,0,5617,0,5661,0,5347,8140,5476,7495,5301,8354,0,5476,0,5325,8756,5661,0,5209,7990,5296,5726,0,0,0,8631,0,8329,0,7914,5568,7867,8302,7500,8387,8312,8443,5337,5209,5401,5309,5367,5476,7867,5651,5285,0,7710,5391,0,7482,5305,5476,0,7719,5309,7491,0,5699,7935,8375,0,5241,0,0,0,0,0,0,8402,0,8345,5519,8179,8732,7687,0,5325,8073,0,0,7843,8422,0,8093,7864,8494,5401,8762,8514,5347,7994,8676,7908,7663,5437,5296,5476,5305,5209,8543,5413,0,5358,5301,5329,7632,0,7883,0,5699,8033,0,7558,8484,5568,5325,8093,0,8670,8509,7872,8287,8766,0,7957,8222,0,0,8222,8164,0,5666,0,0,5726,0,0,7972,8772,8164,8443,7616,7790,8716,8411,0,0,5629,7653,5372,8164,0,0,7734,0,8772,0,8297,8381,8577,5459,5459,7548,0,7672,7883,0,7952,5325,8447,5476,8184,7628,7599,8039,8115,7834,8402,5274,5285,5320,5564,8456,0,0,0,0,8201,8411,7710,7730,5476,8411,0,8217,5347,0,7637,0,8190,7628,7628,0,0,0,0,7738,0,0,0,0,8170,5661,5237,5407,0,7533,7738,8190,5274,5453,0,0,0,0,0,5651,5325,8131,7752,0,7599,5337,8115,5358,5651,0,5661,8010,5476,0,5666,7616,0,5309,5617,7482,5325,0,7588,0,0,0,7599,7746,7588,5476,5437,7719,5325,7632,0,7790,7687,5342,7972,8411,5301,0,0,0,0,0,0,0,8255,8721,0,5655,0,8077,7667,5661,5642,5661,0,8766,0,0,7610,8033,5651,5699,0,8136,0,0,0,7588,5661,8073,5693,8498,0,8519,7935,7653,7883,8052,0,0,0,7914,5651,7877,8514,7653,8402,5291,7849,7893,5391,7935,7914,7555,5564,7800,0,0,0,0,5476,5442,0,0,8222,5666,7990,7828,5358,5342,5717,5358,5476,5274,8447,7616,7605,8104,8527,7500,8647,7478,8239,8777,7967,0,8411,5642,7811,5213,5448,0,8217,5485,5237,0,8039,0,0,7834,5529,5573,7946,7616,0,0,0,5401,5274,7811,0,0,5401,5261,8777,7930,5407,5320,8010,7828,0,7904,7628,0,0,7700,5666,5476,5491,0,7877,8783,0,5309,5362,0,0,0,8239,5712,8484,0,7511,7653,0,5218,5309,7710,8504,8789,0,0,8062,5606,0,0,0,0,8073,7723,0,5320,8402,5296,8783,8307,8625,7864,7994,5501,8772,0,8099,7849,5218,5296,0,7628,8548,7834,5688,7999,7592,7658,0,0,0,0,0,0,8422,7516,7592,7687,0,7849,0,7500,0,0,0,8402,8745,5305,0,0,5413,5301,0,0,7834,5717,5305,7930,5676,5459,8670,0,7478,7834,0,5291,0,7658,7653,0,7828,5325,5381,0,0,0,5661,7986,8010,8154,5237,8077,5651,8297,8498,7482,5651,0,5291,0,8316,5509,0,5301,8670,8571,8795,8282,0,7681,0,7930,7491,7858,7877,7982,8800,0,8345,8104,7542,7930,0,5501,0,5480,5480,7571,8093,0,7952,0,5391,5730,8577,0,7487,0,7930,8504,8762,8073,7779,7839,5358,5325,8548,0,7849,5301,8467,8354,0,7877,8539,5666,5309,5248,7487,7734,5476,0,0,7811,8272,8613,7616,7616,7648,7482,5391,7482,5291,8068,8190,0,0,7867,7605,0,0,5337,0,0,5237,5391,5476,5305,0,0,7982,7675,7478,0,5485,8805,5391,8447,0,7738,0,5573,7877,8647,7757,5386,5386,8811,8131,7628,5730,0,0,0,7982,7924,0,5274,0,8164,7957,7999,8179,8239,8647,8370,8010,7653,7883,8427,0,7738,7605,8282,5407,7628,5314,5337,5301,0,0,0,7877,5401,0,0,8484,7734,5688,7817,5209,7628,0,0,8705,8816,5712,0,8484,0,7500,5255,8411,7790,0,0,0,0,5476,5337,0,5255,0,8222,5296,0,0,0,8201,8211,7986,5647,7478,5301,8093,0,5325,7667,0,7888,5291,5491,7930,0,0,0,0,0,7986,8411,5342,5647,5459,0,0,0,0,0,7888,0,0,5342,5358,5647,7719,8179,5647,7746,0,7672,0,0,8427,5358,5325,8433,0,5301,0,0,5342,0,5320,0,0,7864,0,0,0,0,7723,7632,7482,8068,7786,7628,5666,7930,8028,7858,5401,5688,5401,0,0,5301,5693,8631,7576,0,5426,8608,8010,5352,8126,5480,0,7864,8126,7706,5573,7817,8316,7653,7558,7977,7811,5426,7828,8349,7582,7864,8821,8577,8119,7742,8539,7628,8732,8164,0,5629,8827,8647,8617,5329,8316,5241,5301,7730,0,0,0,0,8625,5476,7653,5661,7632,7511,0,7883,0,0,8772,0,0,0,5337,7823,0,8461,7667,7710,5568,5509,5301,7511,8577,5367,0,8345,7628,8170,8110,8329,0,8451,8179,8833,5218,5564,8821,0,5237,5320,5285,5285,7653,8015,0,0,8839,8033,8010,5325,0,0,7786,7478,0,7867,5320,0,5480,8762,7487,8297,7715,5564,8093,5325,5519,8190,8716,0,5381,5301,8245,5325,0,7672,7914,5301,7843,8845,7482,0,8196,7867,7935,5337,8201,5464,5699,8277,0,0,7500,7898,7990,7706,0,8119,0,0,7576,5464,8422,7982,7930,0,0,0,5325,7786,0,5524,7637,7538,7605,8422,8422,5401,7843,0,0,0,5491,7663,5274,0,5237,0,8422,8179,5514,0,5381,5501,8658,0,5642,8479,7977,5666,7834,5329,8422,7616,7920,7632,5301,5432,0,7790,8126,0,0,5642,7849,8104,7982,8381,5442,5301,5629,7811,5358,0,7935,7924,8245,5708,5529,0,0,0,8297,0,5372,5509,7738,8222,0,7834,0,7605,8387,8010,5337,0,8422,8249,5617,5309,7786,7706,8255,5285,7834,0,8795,7746,5274,8589,8136,0,7811,8816,7558,0,5291,5237,7706,8608,5712,7786,0,0,8010,5726,7877,8598,5358,7790,8851,7786,8062,7811,7632,8560,7977,7768,0,8211,8201,7786,0,7715,7663,7616,8598,7843,5381,8539,7977,0,8751,8504,7834,8762,8539,0,7843,5274,0,5426,7930,8345,5651,7706,7706,8494,5617,7628,7977,7558,5642,7616,7924,5325,5381,7653,8438,7957,8509,5642,7834,7962,7952,5464,7616,8119,5496,5401,0,8190,8732,7935,8447,5726,7622,8154,7628,7823,7982,8554,0,7542,5617,5419,5708,8349,8211,8427,8411,5285,5291,8789,5358,5391,5301,5305,8577,7763,5305,5237,7877,5726,5285,8154,0,7500,7700,8316,5730,7628,0,0,8073,8857,0,8287,0,5381,0,5426,5296,5391,0,0,0,5642,7628,7898,5381,5578,0,5617,8539,7582,7839,5564,8438,5655,5655,0,7834,7757,8062,7817,0,7672,8625,7986,8402,8048,8451,7834,0,5381,0,8039,5612,5509,5241,8077,7757,8277,7738,8402,0,8234,0,0,0,0,7746,0,8438,8170,7628,0,7999,8115,0,0,7533,7849,8131,8062,8149,7478,8287,0,5329,8140,5401,0,8255,5320,0,0,8433,0,5296,7588,8519,0,7610,0,5386,8772,7883,7994,8811,8245,5301,0,5274,7533,7986,0,5453,8359,7616,5274,5347,5305,7904,0,5564,5358,0,8154,5476,7511,0,0,5676,8154,7558,8307,7864,0,7834,8863,8762,7706,7628,7768,8154,8077,7946,8170,5617,0,0,5352,8391,8340,7957,5301,8104,0,8402,5261,0,7706,0,8277,8093,8239,8705,7628,5386,8287,7706,5476,8184,0,5407,8387,0,7805,0,0,0,5358,7883,8312,8184,7972,8783,5606,5606,8110,5476,8577,8345,7849,7723,5301,0,5209,7588,8467,8387,8762,5509,0,7582,5337,5296,5407,7872,8211,8170,0,0,8345,5342,8277,7643,8322,0,7888,0,8136,8179,5377,7500,8484,5464,7706,5381,7790,7834,0,7972,8422,0,7663,8334,8777,5629,8869,7643,7706,5573,5612,5314,8417,5647,8443,7957,7622,5301,7800,7786,8433,5396,5712,0,5401,8479,5261,7839,0,0,7972,7904,8402,5688,0,7610,0,7893,8039,7558,8484,7864,0,5291,8062,5285,8498,5642,8316,5496,5337,5320,5642,5717,7811,7843,5291,8033,0,0,7914,0,8391,8391,0,8519,8267,0,5309,8287,0,8772,0,7723,0,7558,0,7628,7558,8316,0,8255,0,8170,5347,5476,8164,0,0,7487,0,8048,0,8249,7952,0,0,8370,8255,8287,5699,8217,5301,7904,5381,0,7632,0,8267,0,7982,7482,0,7972,5401,0,8811,5407,7576,7605,7692,8136,8217,8115,7924,7605,7817,5476,0,0,8519,8149,8504,8164,0,8136,8604,0,0,5476,5291,5358,7558,7667,5442,5296,7710,7972,7834,0,0,8099,5224,7999,7653,7628,5491,7999,7893,8302,8093,7738,0,8211,7491,8222,8005,7795,7823,5717,0,7710,0,5612,7839,7576,7658,7908,8196,0,8762,8604,7811,0,8159,5241,7542,0,0,7994,0,0,5237,8136,7734,8359,8170,0,0,0,7533,7738,7542,0,8164,7632,7982,5448,0,7734,7763,0,8851,8349,5320,8093,5666,5419,8873,7972,7757,0,8589,7982,8010,5712,5342,5407,5629,5661,0,5305,8010,5666,0,8554,8093,0,8827,8267,8249,7628,7972,7706,7883,8154,7994,8772,7719,5661,5448,8022,0,7511,5476,0,5666,5301,7893,8577,7516,5274,5642,0,5432,5274,8261,5622,0,7883,8411,8093,8062,7706,8239,8282,5651,7982,7982,0,8604,8456,7723,8391,8484,8201,5712,8589,5712,0,0,5301,7768,0,8869,7521,0,5391,0,0,5347,5464,8484,7834,0,7730,8641,5661,5578,8514,5237,7800,0,5464,8772,7675,8387,5358,7632,8010,5301,8402,5496,5442,8539,8582,5573,0,7877,5651,7542,5688,5655,0,0,0,7786,7834,7710,8625,7715,8073,7473,8845,5642,7790,7599,0,0,0,8613,5291,0,8438,5419,0,8427,7571,8354,5224,0,7576,5358,8504,7628,0,8287,7696,5476,5651,8745,0,7599,5325,8201,0,5432,8010,5642,5301,8010,5726,5564,7696,0,0,0,8154,5309,7706,8170,8277,7935,7930,8816,5476,7500,7511,8329,5564,0,8566,8805,5413,7687,0,0,0,7742,8851,8845,5629,7952,5296,7738,0,5642,0,7706,7653,7672,5651,5651,0,5329,8010,7742,5651,5381,5564,5651,5464,7588,5642,8077,5309,5401,5358,8745,5377,8093,8857,7972,7482,8479,5442,7774,5651,7834,5347,5248,0,0,8721,5617,7632,0,0,7632,7605,0,5442,5676,7592,7696,5476,7872,7774,8504,5564,5347,5342,7628,5642,7828,8093,7605,7738,5255,7542,5291,7805,8201,7786,8879,0,7653,7653,8711,7982,8062,8539,5564,8190,5651,5712,7843,7786,7999,7738,7628,8548,5301,8239,7478,5666,5325,5666,5301,0,8073,8608,8005,5480,0,0,7999,7952,0,7500,8716,8287,7888,0,7582,7952,8073,0,7811,8190,8179,5301,8422,5464,5529,7982,8222,8297,7681,5564,8239,0,5476,5320,8312,7849,8140,0,8136,8345,8783,5329,8427,7738,0,0,0,8255,7588,0,8721,0,0,5573,0,7696,7696,8699,7706,7487,5573,0,7834,7898,8345,0,8884,8519,7904,0,5377,7653,7571,7904,5578,8631,8119,5358,7696,7605,7478,7752,7972,7576,5329,7752,8234,5688,7986,5437,5448,7746,7972,7706,8340,8647,5401,0,7610,7588,7706,8467,8527,0,5291,8539,7858,8721,8816,8504,5655,5671,7920,8772,8783,8104,0,8104,5305,5432,7706,5391,5358,5291,7487,0,5396,8136,5296,0,7696,0,8711,0,5237,7972,8504,8170,5459,5578,8670,8249,5358,7653,7628,5274,0,8766,8179,8201,0,7706,5325,5453,5329,7592,7653,5476,7774,8104,7972,5209,8245,0,5606,7576,0,8104,0,5578,0,8110,8888,5325,5476,8762,5291,5717,5325,0,0,5612,8893,7521,5464,7706,7605,8451,7710,5534,0,8898,7786,8509,8010,0,8340,7972,7877,5666,0,0,5578,7628,7952,5464,5309,0,8039,5514,8772,0,5237,7800,7888,5708,7920,7478,7663,8249,7920,8249,7924,7972,8073,7616,7994,5386,5347,7834,7877,7768,5655,7734,0,7658,7893,5320,0,8903,5496,8297,5606,7710,7738,7977,8598,5464,8239,0,0,0,5309,7864,0,5564,7888,0,7864,0,5642,8539,0,8539,8201,7800,7935,8099,7706,7715,8179,0,0,0,0,5337,0,8245,0,7588,5564,5325,5419,8340,8073,8909,7935,5564,8201,7478,0,7924,8391,8334,8211,0,8249,8340,7681,8509,8126,7675,7528,7706,0,0,8670,7946,8816,7675,8033,0,8010,7482,8608,8297,8915,8048,8010,5726,5391,7849,8411,5491,5305,8919,0,0,0,0,5496,7888,5337,8052,0,0,5274,5666,5309,7706,0,5712,0,0,0,0,7834,5476,7482,7706,5578,5237,5476,7491,7864,0,7972,7653,8287,0,8104,5224,7908,5442,0,5480,7723,0,8484,0,8140,5564,0,8249,5301,8577,7738,0,7786,7779,0,7986,7828,7839,0,0,0,5325,0,5496,7790,8762,0,7637,7872,8727,8821,0,8277,5426,0,5476,5712,0,7849,5325,0,7653,0,5301,5209,8443,5642,0,5301,7706,8811,5301,0,0,0,0,7588,5442,0,5661,7930,7500,5485,7734,8093,7521,7571,7757,8104,8745,7823,0,7672,7710,8670,7628,5396,5301,0,0,0,5301,0,0,8217,0,5476,8381,0,7982,5564,0,7558,7823,8604,0,5377,8196,8473,5342,7994,0,7719,7663,7864,5381,8739,7893,0,7482,0,5514,8307,5296,5661,8115,8772,8227,7972,5476,5564,7864,0,7667,0,5496,5274,7800,0,5358,8093,5381,8005,7977,0,0,7843,0,7893,5329,7687,7849,0,8447,7616,7675,8909,8149,8115,8005,0,7542,8170,8647,8282,8307,7516,5651,5314,5661,7982,5651,0,8925,8827,5717,7823,7605,7834,7653,0,5476,8375,8354,8297,8093,8929,7628,7555,0,5476,7555,8196,7542,5367,5573,5309,5651,0,8136,7653,8443,5367,7834,5647,0,0,5612,7482,5248,7576,8211,8467,7555,7622,5514,5509,5301,0,8136,5391,5358,7839,7898,0,5676,7521,7952,5213,0,8329,7687,5642,0,0,0,7864,7924,7681,5337,5291,5301,0,5301,0,5325,7982,8427,5564,8227,8721,8170,8249,0,7558,8711,0,5301,0,0,0,5476,5224,0,5730,5509,7710,0,5717,7653,8073,0,7511,8179,8179,5476,8190,7834,5612,5524,5218,0,0,0,7628,0,5578,7538,0,0,8617,5309,8721,7734,7516,0,8805,0,7986,5476,0,7839,7834,5381,0,7994,5274,8381,5564,5717,0,5305,5564,8716,5274,7752,5301,8179,7763,0,8447,7637,0,7500,7555,8349,0,8795,0,8560,0,0,7628,7994,7605,0,0,5476,0,0,5564,5358,7605,5209,5320,5367,8447,0,5285,8190,0,8417,8164,5459,7730,0,0,7628,0,0,0,5329,7653,0,5476,8397,8427,7588,5509,8447,8721,5291,5413,8179,0,7888,7706,5642,7952,8699,5730,8329,5661,0,5296,0,5661,8073,7990,0,7500,8126,0,5573,0,5377,5573,7710,5453,8721,8527,5661,0,7888,5291,5296,8833,7730,7888,8093,8249,0,8039,8249,7610,0,7616,0,8721,8140,5301,8762,0,7864,0,0,0,8407,8407,5419,5401,0,8670,5237,0,7994,0,0,7849,5337,5329,8277,7742,8119,5676,7706,0,8267,0,7706,0,8873,5325,8349,8519,8184,0,5301,8093,0,8033,0,5274,7982,0,8443,5491,5491,7643,0,7935,5377,0,7495,7972,7511,7893,5285,0,5708,7516,0,8705,8272,7920,7528,5261,5661,8631,8272,0,5237,5661,7920,5642,5642,7555,8052,8566,7482,8010,7823,7710,7599,8467,5367,7576,8467,5391,5651,0,5717,8174,7893,7982,8447,7839,5651,8411,0,8136,7628,7864,7972,8566,7500,7672,5296,8329,5386,7653,7977,8307,5396,0,0,8140,5237,8022,5651,7528,7478,7723,5241,5442,5426,8484,8170,7643,5377,7800,5407,7706,8087,5666,5509,7935,8179,5381,5337,5296,7687,7588,5717,5386,5237,0,0,7710,7994,5305,8234,0,0,5372,5296,8375,8239,8149,0,0,5280,5708,8077,5342,8349,8772,7528,5485,8345,7738,7877,7994,8641,5325,8033,7935,7500,5274,7528,0,0,0,5386,0,7834,5688,8062,8022,7667,8239,8170,0,7800,8577,0,5573,7710,7521,0,5712,5320,8604,7710,7786,7628,0,0,8239,8149,5320,5578,0,0,0,0,7864,0,8302,8548,5291,7667,0,7957,7994,0,7605,7986,0,5578,7999,0,5476,0,8447,0,0,5476,8052,8387,7811,7800,7800,5726,8149,8052,0,8514,5274,5655,5320,7571,7605,8131,8190,5476,5377,0,5309,8179,0,8201,7779,7972,5325,5509,8721,0,8745,7663,5564,7952,8131,5274,8821,7972,8473,5476,5612,5459,7952,8119,8329,8033,5617,8745,0,0,8104,8062,7582,7558,7994,5459,5320,0,5325,8925,7706,0,5564,5708,5564,8312,0,8772,7687,7935,5726,5337,7849,5730,7952,7864,5419,8267,7853,5688,0,5564,7571,7982,0,8073,0,5291,8539,5291,5564,5464,0,5218,7805,5612,7710,0,0,0,7800,8936,7706,5407,5358,0,7972,7605,5726,8427,5325,7828,0,5301,5301,7986,7768,0,5564,5496,5629,5391,8560,7867,7706,8073,8136,7853,5573,8140,8354,8539,8033,5573,5358,8211,7904,5606,8438,8577,8527,0,5688,7681,5564,8144,5407,0,0,0,5655,5688,8451,7710,8005,8577,7898,8762,7478,0,5291,5329,8227,0,5573,7779,5606,8653,8227,5291,5647,5396,7893,5358,0,5280,8494,5325,5726,8005,7982,8140,5342,0,0,5688,7920,7930,8631,5285,8164,0,8140,5459,7710,8451,8249,8190,7888,5432,5578,5573,7877,7952,8676,7967,8919,8277,8909,8354,7768,5320,8093,0,8196,5564,5617,7681,0,7663,0,7723,7757,8370,5261,8082,8427,7610,8484,5301,0,8438,8164,8354,5291,8756,8010,7706,8062,5564,5329,8062,5480,8438,5606,5676,7706,5241,5485,7817,5606,7843,7790,7482,5329,8267,8170,7920,0,5661,8411,0,0,8164,8940,7478,5496,7786,7877,5352,7800,0,0,8617,8946,5622,7877,0,8519,5342,7930,8509,0,7972,7994,8438,8093,0,8028,8093,7706,8354,0,5476,5237,8033,0,8164,8033,8789,5337,5325,8073,7565,0,0,8140,8539,0,7628,5717,7500,7672,7849,0,8126,7817,7592,0,8387,8293,5337,8022,5661,8777,5642,5320,7834,8952,8239,0,7628,8073,5342,0,7834,5666,7516,7658,8762,8354,5325,0,8267,0,0,0,8170,0,8033,0,7521,5274,0,7952,7628,5666,0,5647,8033,5426,5688,8560,0,8903,7774,8539,7834,0,0,0,7877,0,5301,5612,7538,7538,0,5629,8456,7957,0,8451,5647,7864,8179,5381,5377,5325,8903,5285,5358,5666,5476,8104,8104,8312,5301,5437,7658,8312,5606,8170,7500,8010,0,7500,7696,8154,0,8297,8048,0,0,7779,5377,5651,0,0,8433,5712,7482,7746,5647,8005,0,0,5651,8164,8039,0,5651,8170,7920,5309,7599,5325,8249,5407,5529,8222,8811,7877,5301,5301,5688,5471,7616,8267,8033,5325,8297,0,7672,0,7952,8354,5296,7920,5496,7888,7877,0,8739,5617,5426,5534,5391,7710,7491,7653,7893,7491,7834,8857,0,7482,7888,5274,0,0,7867,7877,7653,8249,8354,8140,7542,5309,5261,7715,0,0,0,5241,7972,8104,7628,8711,5509,5442,0,7588,8959,5606,8739,8140,5342,0,5381,5407,5708,5285,7491,0,7491,7610,5372,8222,5401,0,7811,7672,7994,5274,8964,8073,5241,7605,7904,8249,5661,0,0,0,7675,5301,7687,5651,0,7491,0,0,0,5661,0,0,0,5261,0,0,5261,5291,5325,7565,0,5453,5708,5573,7478,8721,5661,8805,5661,8903,0,8772,0,5367,7500,5358,7487,8154,5241,7478,0,8489,8663,5391,7811,8144,5564,8663,5274,8196,7867,5347,0,8387,0,5699,0,8104,7972,0,8005,7542,8560,5476,8447,5241,8663,0,5391,0,7839,5291,7500,8267,0,0,5573,8479,7828,5717,5377,8062,8447,0,5301,0,5274,0,5237,8190,0,0,0,8277,0,7715,7558,5407,5358,7706,8093,8052,7898,5629,8539,5564,5274,8969,0,0,0,7734,5296,7994,5476,7491,0,7935,8052,0,0,5274,7877,7643,0,7653,7839,0,7706,8005,8110,0,0,7504,0,7982,8402,0,5699,8539,7935,0,0,7834,8239,5358,0,0,0,0,7555,8222,7616,8062,7839,5280,5377,7628,5666,8222,0,5325,5564,5274,8433,0,7719,0,0,7628,0,7663,7653,7883,5717,8539,7982,8334,0,0,8093,8349,8349,5401,0,5529,5655,8312,5564,8417,5237,8217,7883,8267,5274,7482,0,8217,5413,7982,7893,7893,7542,7930,0,0,5391,5606,8427,5407,0,5296,5564,5509,5305,8255,8239,0,7763,5717,8062,7628,7706,7888,8608,0,7904,8539,8190,8539,5358,7576,8174,8184,5476,0,7977,8245,7888,5237,7883,7681,5661,8227,8467,8721,7779,5413,0,5329,5325,8190,5320,0,5320,7982,8267,7877,0,8164,7864,0,7904,8527,0,5578,5501,8721,8201,5717,0,5464,8762,5377,8073,7542,7986,7610,7994,8427,5347,0,8136,5358,5480,5285,0,8201,8170,8312,5367,7994,5564,7849,8010,7877,7982,8925,0,0,0,0,0,8010,5459,8062,7710,8222,7482,0,0,8073,8821,5442,7616,0,5671,0,8267,5655,8387,0,5730,8438,8456,5717,8411,0,8052,5651,8119,8682,5358,7888,8104,8976,8589,7558,8297,8982,7790,8073,8422,7920,5564,0,0,7516,0,8514,7478,7706,7972,8033,5367,7849,8940,0,5730,0,5564,7648,7643,8456,7542,0,5237,8391,8005,5717,0,5708,8589,8617,7957,5655,5296,8179,0,7516,8136,7653,8762,5309,7746,8010,7864,5386,8062,8062,5529,5529,0,5358,0,5337,5476,5529,7730,7628,5301,5301,8467,8217,8052,7867,8873,0,0,8039,8201,7994,7994,0,8345,7500,8987,7946,7924,0,5642,0,5358,0,7946,8272,7511,0,7605,0,7706,8783,5476,8307,8467,8140,0,8073,8179,7924,0,5476,8184,8670,8543,0,8282,5305,7663,8869,5617,0,7972,7924,5407,8467,8217,5377,7994,8329,7972,5476,8504,5391,7982,8884,5476,5407,5342,7616,8222,7982,7982,5661,0,7924,5401,0,8869,0,5407,7605,7616,7834,5309,8519,7883,0,7908,5476,7491,5407,8869,8381,7839,7542,5529,8073,5651,8839,5651,7779,7706,7706,8087,5358,8073,5413,5432,8447,7994,7811,8010,7706,7511,5564,8608,8608,0,7706,8658,5480,5476,8073,7858,8073,0,8190,0,0,7628,7710,5329,5476,5301,0,5453,5237,7478,0,5666,0,7999,8397,8287,0,8345,7696,7628,8873,5564,8302,7811,5573,5448,0,5358,5459,0,8539,5301,0,7653,5437,7786,7558,7957,5305,5651,8349,5708,7883,7605,5464,8010,8919,8316,0,8762,7982,7994,5325,7710,7516,0,8334,5407,8077,7632,5325,0,0,8077,5476,0,5509,8387,5280,8073,0,8093,8811,8154,8370,0,7946,8005,8433,0,7843,5358,7811,8312,7742,5407,5377,5342,0,8217,5708,5407,5305,7952,7908,8345,7538,8745,5501,0,8898,8052,7888,0,8716,0,5381,5564,8994,0,8227,8077,7800,0,7839,0,8144,0,8539,5564,7681,8443,8494,7893,7774,5352,5391,0,8110,7706,5496,7533,8073,7800,8548,7839,7952,5325,5347,0,8249,7746,7533,5329,8498,0,7511,5476,7752,5301,5301,8833,7605,7643,7914,7710,5426,7599,0,8154,7920,0,5712,7935,5712,8783,8170,8417,0,8447,8613,8093,8387,0,7706,0,0,7898,0,8115,5381,7628,7528,7930,7904,5480,0,8255,5337,8539,5381,5305,8119,5261,7706,5261,5651,7883,5476,8845,8845,7853,0,5529,5476,8805,7681,8711,7511,0,5301,0,5329,7500,8451,7904,7511,8170,8427,8170,0,8443,5274,7972,0,0,7972,0,0,7986,0,8845,7864,7715,7548,8539,7834,0,8845,0,7843,7628,0,8136,8370,5396,8427,0,7957,7877,5459,7994,8190,5372,8438,0,0,0,0,0,5480,8433,8739,8909,8028,8149,5329,5476,5661,7834,8745,7734,5442,5274,8721,5381,8277,8249,7491,0,5301,7482,0,0,8909,7628,0,0,5496,8762,8062,0,5237,0,7706,0,8467,5325,8267,7565,0,8560,5655,7982,0,0,5381,8456,8811,8190,0,8479,8255,7839,5459,7904,7681,5509,7839,7972,8126,7478,8222,7883,7904,8316,0,0,8484,7952,7839,5337,8467,8149,8484,8998,8028,7864,7864,5426,7849,7834,5647,8201,5413,5568,7805,0,7706,8467,0,0,0,5730,5464,0,5676,8005,7864,5325,5305,5248,8222,7478,5655,7500,8245,7710,0,8827,7538,5568,7834,8179,0,7849,0,5347,8316,7628,8560,5386,5301,5501,7734,8093,5708,8190,5476,8190,8249,0,5666,7834,8222,5459,8093,5391,5285,8577,5485,8170,5377,7920,8461,5285,8359,8201,8539,7811,5642,5301,5651,0,0,0,8504,0,0,8647,5329,8093,5476,7774,5261,0,5320,5480,5305,7849,8964,5224,5352,7643,0,8039,8201,5301,7877,8126,8451,8249,5612,8322,7972,8582,5358,5708,8249,5274,0,7853,0,7994,0,0,0,8149,7877,8447,8447,5301,7582,8170,5407,5564,8302,7687,8272,5407,8789,8387,5485,8811,8539,5407,5712,8245,5407,5617,5661,5437,8211,5301,8234,7628,7738,5578,7924,8641,0,7849,7757,7994,8267,7687,5726,0,0,0,8805,7786,0,8936,5564,8964,7738,7738,5314,7653,5255,7849,0,7576,8073,7511,7555,0,8174,8539,8302,5377,7908,5377,7706,5325,7710,8994,5309,7990,5301,8170,5726,7986,7786,5280,7706,5274,5407,5391,5274,7663,5396,8170,8115,5642,0,5325,5496,8795,0,0,7834,0,0,8316,7823,8354,0,7710,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5476,5693,8898,7706,8987,5367,0,5305,8015,8589,8987,5367,5325,8201,7628,7746,5671,5442,5442,5622,7867,7920,5391,5391,5688,5224,7491,8827,7582,7487,7558,9003,5688,7779,9003,5337,7779,7834,5224,8940,8234,8484,5717,8387,8322,7548,5325,5381,8527,8721,8732,7877,7516,5314,7605,0,5509,7883,7994,7605,5485,5301,8170,8959,5564,5476,5476,8716,0,8484,8484,5578,5642,5476,0,7972,7478,7687,7982,8048,5333,7548,5255,8334,8255,7982,8625,7558,7558,5476,7482,0,7763,8484,7706,7491,5237,7521,5671,8484,0,0,5320,0,5391,5325,7700,7977,0,5391,0,0,8349,8375,0,8613,5529,7834,5564,8302,8136,8811,0,0,0,0,8811,7710,7952,8811,8375,7516,5476,5291,5333,0,7628,5241,5391,5391,7696,5573,7628,7628,7628,8267,7982,7706,7478,5485,7487,5342,8682,8682,8387,5661,8387,7706,8387,5325,7853,7599,8756,7738,5285,8370,5476,7877,7946,7715,0,7521,5485,8805,7779,7521,5485,7491,8196,8073,5337,5661,7883,5401,7888,7994,8504,8504,5325,0,7599,5476,5391,8504,5333,5693,8115,5309,7734,5442,7904,8015,5309,5381,5224,8705,8827,9008,8589,8099,0,8777,5209,5391,7738,8015,5564,5274,5274,5480,8062,8316,8504,7542,8811,5476,8539,5491,8093,5476,8783,8227,7730,8987,5476,7715,8514,8504,7977,8783,7924,7542,7548,7920,5509,7478,5407,5381,5642,8527,8777,8504,7599,5501,8539,7565,7548,7706,8456,7628,7757,5471,7752,5476,7528,8756,5476,5325,7663,7478,5325,5564,8048,5519,5309,7930,7883,5564,5629,7548,5337,8154,5296,7628,8402,8732,8589,7994,8732,7935,8387,5274,8987,8227,8099,8777,8329,7528,7994,7605,7710,8190,8293,8073,7786,8447,8447,5661,8282,7500,8093,7576,7914,5726,7487,5413,7487,7977,7920,5476,7487,7663,5241,7828,7653,8349,7883,7605,8201,5726,5358,8670,7839,7994,8539,7605,7571,5337,7924,5377,8427,7849,5358,5480,5617,7628,5476,9008,7571,5568,8381,8316,8447,8190,5726,5218,8631,7542,0,9012,0,8456,8359,7849,8149,7521,5717,7924,8126,8048,8391,7977,7696,5224,5325,8727,8154,7952,7768,7681,5285,7839,5476,5391,5426,7914,5480,8795,7972,5671,8277,5476,7710,7473,8687,5301,7482,5391,5651,7605,7500,5337,8345,5329,8711,7696,8099,8316,5496,7710,8329,7817,7576,5476,8613,5407,5367,7977,5437,5329,0,5285,5655,8604,8255,7908,7637,8381,8789,5309,7478,5730,7592,7558,5655,7571,5442,8196,5329,8170,5320,8805,7914,5480,8073,7823,8052,8411,8422,7628,5237,8140,7864,5688,7500,8582,7692,7478,7667,5476,5564,7843,8443,8255,5301,5218,7643,5485,5274,8589,5213,7588,5564,5237,5301,5564,5255,8222,7628,7487,5367,7528,5519,7628,5377,7935,5442,0,7834,9016,5301,8277,7487,5407,5519,5564,7849,8170,7675,5213,5407,8473,5573,7893,8527,8387,8345,7930,7628,7528,5358,5407,8227,7710,5224,8365,5642,8402,9016,5509,8277,8777,8548,5717,8721,7930,5606,5320,5320,8514,8479,8772,5291,7805,5426,5352,5629,5730,5476,7706,5377,8811,5485,5381,5342,8227,5381,7565,7972,8571,5676,8548,8170,7542,8447,8099,5314,7994,7632,8022,8873,7643,7582,5209,8543,7478,8170,8716,7774,5407,8533,7491,7972,8302,8745,0,7616,7558,5407,7834,8699,7548,8104,7977,5381,7994,7628,5224,8893,5377,7994,7858,7834,8527,5629,8494,7864,5661,7935,8119,8795,8539,7571,5496,8805,7930,7491,5464,5642,8427,5442,7834,5337,5407,7500,7687,7839,5564,5358,7972,8548,7746,5325,5476,5661,8329,7487,8676,8604,5655,7742,7853,7853,7930,8925,5329,5381,7994,5501,8227,7952,5448,7972,7588,5367,5564,8467,8164,5301,8613,8267,8131,7538,7898,7828,5629,8052,5381,5237,5261,7542,5291,8777,5573,8589,7930,7675,5218,5642,8772,5612,7924,8048,5224,7977,5491,5437,5629,0,8888,5573,7495,8179,5666,8402,5218,5372,8548,5352,8022,5708,8732,7628,8533,8694,8987,8322,5485,7710,8302,7653,8015,8745,7834,8539,5237,5329,7930,8647,5309,5401,5329,7800,7542,5274,8772,7616,7582,5622,7521,5261,8255,7592,8422,9021,7734,5237,8149,8227,8687,8239,7706,8919,7877,7994,5325,8010,5296,8732,5407,7883,5285,8484,9026,5501,5274,5291,7864,5285,5407,8316,7516,8073,7786,0,7588,5274,5419,8438,7491,5309,7977,8255,7920,8527,7576,7663,8159,5476,7667,8789,7877,5476,8903,5485,8190,7908,5407,5573,7883,5285,8302,5437,0,8888,8345,8613,7658,5442,7637,5347,8447,5309,7542,8249,7478,8381,7723,8447,8227,8577,5666,5329,8582,8222,8762,8473,8365,7972,7487,5337,8052,8033,8033,5301,8909,7994,7610,5573,7877,8261,5320,5296,5301,7632,8365,7495,5629,7834,5666,7700,7696,7478,8267,8365,8467,7504,5248,7999,7504,5274,8687,8217,7990,8217,7746,5647,8227,7696,8217,7696,7528,5647,7487,7834,7834,5391,5391,8903,7542,8307,5501,7790,7914,8307,8126,5485,5301,5464,8613,7599,8028,7768,7779,7628,7904,7817,7511,5237,0,5347,7817,5606,8721,5426,5224,7706,8170,7828,5209,0,7924,5407,8087,5325,5476,5325,7877,7592,0,8297,8028,8467,7790,8461,8297,8604,7592,8604,5437,7795,7592,7790,7576,8711,8131,7500,7746,5651,8164,7482,5661,8795,7667,8789,5476,5651,5391,8721,8249,7548,8170,8467,8391,5568,0,5564,5661,5651,5224,7582,7658,8461,8527,5224,7982,5248,5291,7982,8582,7491,8873,7533,5564,8438,7582,5274,8577,5367,5407,5485,5320,5712,5647,7706,7786,9032,8022,5485,7779,8745,7757,8093,7582,8811,5407,7482,8694,5476,5301,8777,8391,9038,8402,7487,7888,5309,7516,7516,8381,5237,8762,5325,8010,8010,8010,8527,8170,5573,0,8527,0,0,5564,8527,5255,8527,8509,5464,8509,8811,8919,5255,5726,7504,7908,5647,5708,5573,5325,8903,7653,7653,5372,8479,7757,8479,8504,8504,8519,5501,7867,5476,5325,8795,7542,8879,5573,7752,5325,7883,5426,5396,5509,5291,5301,8354,8577,7511,8115,8461,8519,5564,5573,5726,8170,5261,8438,8093,7548,7533,5432,5381,5381,8222,5301,5261,5432,7877,5485,7795,8682,8479,8519,8005,7692,7888,7696,7972,9045,7768,5496,7839,7990,5381,7952,7839,7952,8509,7888,5325,5377,5485,7957,8682,8115,8467,8451,5529,7990,7986,7542,5496,8519,7795,5407,8451,5301,5377,7893,8057,7877,7706,8519,5209,8461,5509,8073,5325,7888,8745,7834,5314,5407,5325,8073,7834,8170,8154,8119,5407,7888,8676,7478,7795,5564,8119,5407,5717,7687,8509,8519,5676,7877,8641,5329,5301,8998,5261,8647,5568,5285,7795,8255,5291,5396,7687,5314,5401,8170,7920,7511,7952,5285,7952,5407,5407,5629,8154,5325,5407,5476,5407,7972,7877,8498,5305,5301,7605,5301,8387,7768,8282,7817,7977,7478,8349,8577,5309,8946,5661,7972,7582,7605,8293,5381,5726,8670,5476,7605,7622,5301,5485,8149,8149,5676,5568,7924,8631,7675,8711,8582,5381,8577,7849,5391,7990,5485,8052,8073,8438,7710,7843,5218,5218,7746,7990,8329,5676,8745,5329,8057,8249,8277,5333,8509,7746,7582,7500,7500,8795,7478,5476,8329,5534,8119,8149,7715,5329,8647,5391,5325,8196,8322,5285,7768,7924,5476,5291,8010,5676,8316,8170,7883,5661,8349,8329,8329,5329,5301,7935,8261,7768,8222,8670,8267,8484,8222,8131,7478,8267,0,0,5671,5671,8222,8827,0,7504,5352,8170,8272,5717,7734,8345,8174,5564,7817,8245,8484,7723,5358,5396,7930,5386,7710,7565,8484,5476,8925,5419,8756,5419,8582,7982,5367,5726,8411,7952,7867,8205,8582,7893,7982,7853,7952,7952,5296,7883,7952,5401,5401,7940,8762,8484,5726,5237,7920,5726,5476,5280,7834,8411,8447,5726,5391,7528,7542,8184,7730,8443,8119,7478,5296,8282,5391,5237,5509,5396,5661,5509,7930,8443,5314,5661,7883,8349,7823,7883,5325,8073,8952,8201,8582,8411,8201,8427,5280,5329,5337,5329,5237,7795,8494,5661,5381,5381,7994,8608,7723,7972,7790,7482,7994,8670,5325,5699,8375,5391,5676,5661,7500,5309,5699,7478,5325,7864,8745,5237,0,7653,8427,7864,8805,5301,5578,7653,5301,8387,8387,7834,5617,7972,7687,8334,0,7849,8387,7972,8604,8334,5377,5301,5617,5730,8805,8397,8451,7893,8467,8604,8467,7521,7834,5301,5291,8302,7828,5573,8484,8196,5708,7952,7834,7487,5301,8227,5347,5301,7834,7834,8467,7487,7817,7516,5688,5726,5726,0,5666,8762,5274,5622,8589,8179,8179,8179,8179,7663,5491,5337,5496,5325,5337,8617,8010,8087,8625,7904,8048,8893,5391,5320,5314,5726,8087,8411,7500,7977,7533,5448,5524,7994,5448,5459,7872,8827,5568,7786,8170,8093,7795,8272,5568,7972,5285,7999,7904,5693,7990,7658,7710,5391,5337,5688,8312,5661,8052,7487,8772,7710,5333,7757,5325,0,8062,7763,7500,8211,7710,5717,8190,5642,5209,7849,5325,7757,8879,8316,8391,5730,5301,5647,7521,7687,5367,7491,5329,9051,7538,7742,5529,5476,7667,5442,7516,8068,7482,7893,5358,8196,8196,5381,8201,8998,7904,5471,5501,7864,7653,8805,8560,8598,7706,8062,7972,7914,7478,7478,8783,8884,5529,5237,7805,8307,7752,5730,7742,7478,5524,8387,7667,5329,7864,40,5712,7663,7706,7558,7478,5524,5564,915,2998,7710,8119,8884,8068,5564,5476,5491,7742,5301,8126,8467,7908,3123,8322,7478,1325,3137,7768,9054,7752,8447,9059,8093,8297,5337,8539,7576,3194,7576,8964,5237,2957,1400,5255,5209,7786,5209,7994,7500,7849,5386,5367,8456,2806,7675,7914,7982,8391,8245,5726,8170,9062,8925,855,7888,670,8174,5358,5564,819,2654,7687,7849,8370,8484,5218,5564,5699,7576,50,9012,7786,9012,7571,8201,7610,7582,8381,0,7487,5274,8154,7706,7696,7864,8687,7687,5237,5476,8577,5209,7763,5651,7972,5642,5325,7576,5717,8952,8365,7675,7990,5296,826,8359,8272,5274,2675,8190,5519,8082,7834,8149,5476,7663,8751,7914,8316,8062,7504,7710,8631,5201,5437,842,2950,5213,7521,7663,8727,8582,8387,8245,7675,5358,8354,2792,5396,1620,7908,7888,2537,5325,8789,8711,9067,8052,8467,7610,855,8211,5717,7864,7710,8670,5301,8136,8443,2665,7692,7675,852,1783,8073,5578,7687,2816,836,3009,5333,5342,2830,8201,7653,7946,8845,8851,5717,5337,7834,7786,7786,8126,7653,5612,8104,5476,7663,7977,5647,5274,36,5301,5237,5629,7571,5688,124,7478,121,8387,8307,28,60,8196,7538,32,4535,905,1518,8149,139,5612,5372,7811,5030,8302,5325,64,371,8527,106,7982,7706,8087,7994,9045,8227,7696,5301,8322,8345,5381,9067,8184,7628,2381,8277,8302,162,5480,7982,7663,7576,26,8800,5314,5314,8987,8539,7946,5347,9051,7675,7521,8068,7478,7752,8402,8302,7982,7706,32,8272,7883,7883,852,1775,109,1318,8287,7849,8387,7706,670,8456,7779,5274,7920,8302,7986,8115,2409,2418,8994,2440,7719,5622,8077,2468,2475,2213,2216,7768,2229,8073,5693,7843,7675,2261,2265,7710,5377,8345,8484,7883,7990,5218,7994,7823,5261,762,56,7986,5381,5730,7653,5459,7576,7637,8082,9071,7628,7482,8365,5642,8494,7746,7982,8267,8732,5296,7811,7811,5237,7849,5325,8005,2792,7994,8302,7500,5464,8762,8077,8190,7779,7628,8940,7710,8255,7834,7883,7440,8179,5301,9067,7687,54,7622,7616,9054,0,8005,5237,7628,5381,7957,7692,5514,8154,5377,5642,7849,9051,5519,8322,7719,5218,7588,8062,5661,5476,5612,7538,5496,8370,7990,7738,8940,5606,8613,5448,8631,5325,8334,7478,5274,5391,5717,5209,5381,7632,5534,8631,7487,7710,8052,5612,7977,8370,5612,8196,8077,7849,0,8159,8044,5717,8136,9075,7663,0,8316,5358,5708,5708,5401,8402,5578,5305,5448,5448,8772,8539,7706,5285,5248,5285,8509,8772,8589,8340,7977,7663,7675,8647,7957,5205,7786,7675,7982,5296,8039,7742,5274,8115,8227,7500,8312,8407,7538,7538,7982,5666,8762,5205,5629,7491,5413,7616,0,5285,5651,7675,5396,8349,8297,7571,5352,8149,7675,5325,7605,8783,8005,8397,7675,7487,8052,5564,8239,8255,5237,5519,8772,8772,5305,8329,7864,9021,8010,7710,5296,8365,8519,8140,7883,7628,8068,7558,5285,7491,5237,7706,8052,5401,7500,8919,5296,7482,7734,5291,8196,8329,8077,5329,7487,7834,7616,8245,7883,7628,7576,5309,8789,8354,5296,7706,7864,5372,5476,5391,5285,8467,7663,5218,5291,7908,7628,9054,8479,5717,7528,5471,8903,0,8159,7920,7920,7482,8397,8381,8539,5347,7738,8154,8613,8052,5367,8387,7888,5237,5442,5325,5309,8052,5377,8227,0,0,8936,5377,5329,5296,8073,5337,8077,7482,7982,5305,5464,5578,5301,7511,0,7700,7675,5305,7834,5301,5329,7610,5564,7883,8033,8751,7920,7521,8154,5305,5237,7888,7692,7834,7877,7616,8261,7828,7986,7738,7675,5285,8287,7811,8154,5622,8077,7675,8936,5301,7877,7637,5301,5476,7632,7700,8365,7588,5237,0,5407,24,7605,64,8077,7700,836,2998,7994,5296,1311,5688,7487,2675,5407,5296,7528,7482,7823,8641,8641,5471,8756,2947,7592,1400,429,8827,7982,1620,8467,5337,2599,8653,8827,7994,5647,7994,8099,819,8658,2651,8093,7786,852,1783,8039,5647,5337,8039,7482,7643,7734,7628,9080,8467,8099,7768,7628,8745,8039,8732,7994,9085,7768,9092,9097,8467,8467,7864,9104,5647,9113,8641,9122,5296,5476,8467,9130,8375,5647,8387,9137,8805,8119,9146,9152,9157,8387,5437,7994,5717,9164,7478,7952,7920,5476,8539,9172,9181,5655,5314,9187,9193,9201,9206,8582,8227,7706,5034,9214,5325,9220,7972,7972,8282,9224,9230,8196,9235,7667,9240,8068,7565,5501,8543,8772,8795,7920,7571,8946,5358,9247,9253,9259,9263,7908,8539,5485,5407,8772,5301,7904,7478,5501,8863,8312,7533,5629,8653,5726,8772,7972,5480,8811,5255,7828,7795,8447,7658,7920,7592,7924,9008,5708,7511,5688,7715,7990,8131,5529,7904,7710,7935,8093,9272,5564,7982,842,7805,7986,7610,7610,5329,9278,9281,8174,9285,5426,7582,9288,2462,5329,9298,9301,5666,5671,7482,5730,2465,7999,5564,8687,5642,7888,9305,7952,5496,7924,7482,8048,8131,8467,5459,5661,8800,7592,9308,7864,9312,7828,9316,7478,7473,9320,76,2406,8509,2468,5386,8345,5426,9326,7811,7898,8456,9329,5476,7482,3679,7500,7805,8433,8845,7663,9332,9335,5699,5218,142,9338,429,8349,7843,4262,7558,7834,5309,2422,5666,8577,8201,7558,9341,303,5391,8329,8039,2274,7491,8184,8539,7628,8201,8387,5676,5342,7920,7558,5509,8170,8099,5241,8571,5248,7582,5491,8772,8345,7774,5320,5413,8795,7972,7888,5476,7972,5476,8456,5325,9032,7706,5514,8170,8447,7692,7990,5629,8653,8447,5407,8217,5309,8461,7628,7864,5352,7786,5647,8873,0,8387,7542,0,8345,0,7888,5386,5647,7558,7482,7643,5358,5401,8039,5459,5622,5730,8222,8222,7628,8539,7790,5381,8821,7864,8845,8795,8548,7628,5464,7864,5642,7834,8222,5325,5712,8227,8762,5480,7482,8527,7548,5381,5655,8527,8461,8732,5407,0,0,7521,5464,8873,8676,0,8190,5325,5325,7908,7616,7738,7558,5218,8509,7588,7588,5391,5651,7738,5534,7558,5647,5629,5699,7548,5301,5352,7898,8159,8205,8217,5606,7478,7558,8329,5717,5655,7663,7817,7834,8795,5606,8467,7752,5661,7511,0,7843,8227,5352,5413,8033,8140,5329,7742,7542,5712,5305,5622,8617,5320,7957,7710,7800,8473,5485,7706,5320,8539,8015,8795,5218,7828,8033,7972,5237,8039,8322,5309,8539,8987,7715,8402,7834,7738,8833,7706,8322,7779,0,8863,8255,8833,7500,5261,5261,7904,7734,7908,7582,8010,5699,5305,5471,5471,5471,8539,8119,7888,7888,8772,8919,7478,5501,5464,8772,8245,7491,0,0,7628,8322,5325,8255,8800,5476,7920,5358,7828,5419,7491,5218,8073,8772,5347,8170,8359,8903,8261,7542,8329,5347,5237,7849,7663,8387,8479,8052,8821,0,5666,8582,5329,7834,8772,7994,8359,7610,7610,7872,5301,7516,8261,0,8329,7834,5666,7904,0,5391,8504,0,9345,9345,8119,5476,8349,7990,7605,9345,7982,7982,8653,5476,7982,7500,8653,8087,8211,9021,8087,5476,7972,7786,5573,5419,8527,7930,0,8411,5342,8658,8093,5564,7571,5367,7504,0,5386,5367,8005,5237,8082,8467,7790,7588,7888,7888,7491,8745,0,8131,8131,7706,8249,8249,7482,8154,8033,8245,8115,5606,7658,5564,8625,5642,7924,8560,5524,5391,8543,7658,7853,7834,5358,5476,7473,7977,7888,0,8582,8267,7977,8994,8869,5401,7811,8539,7834,8174,7768,7706,8716,8527,5661,5501,7828,0,8005,7967,7811,7706,8509,7687,8543,5476,5419,8272,8205,8543,5237,7521,7893,7757,8329,8946,7828,5708,5617,5534,7533,5708,7478,5708,5419,5401,7888,8267,5419,5391,5501,8154,5358,5401,7888,8293,8293,5693,8716,7542,5407,7738,8484,7823,5496,8052,5274,8010,5325,8851,8613,7893,7599,8062,9345,5564,7538,7883,8504,5564,5485,7834,5661,7752,7548,8994,8762,5325,8946,7548,5642,5309,8267,7622,8456,0,8354,9345,8816,7834,8136,7528,5501,5358,7628,5480,8527,8467,7565,7478,7628,7952,8762,7904,8447,7576,5314,7999,8245,8716,8467,7853,8190,7972,7864,5712,5305,7706,5358,7914,7867,7864,7849,7500,8048,5241,8438,8282,7715,5564,8093,5209,5209,5391,8297,5261,7924,7800,5381,7786,7487,5426,7994,5209,8170,5261,8751,8504,5642,8316,5419,5320,5325,5671,8987,8144,5676,5285,8131,7715,5407,5407,5476,8184,7972,5325,5325,5274,7946,8687,7667,7473,7834,8391,8577,8456,7834,5337,7834,5496,7914,7482,7710,7952,7768,5209,8022,7994,8062,8631,7957,8387,7648,5285,8375,5255,5274,8670,5301,7558,7675,8211,0,5612,7478,7994,7990,8277,8287,5693,8929,5426,7823,7628,7495,8456,8028,5676,7914,7972,7653,8539,9345,7864,8751,8422,7653,7746,7478,7834,7478,5237,7834,7628,7487,7628,8631,0,5274,8170,5726,8839,7706,5274,8863,7582,8227,5329,7663,7864,7864,7957,7746,8827,8057,8170,8312,7774,5693,5509,8473,7675,5320,7706,5296,5564,7864,8494,8772,7893,0,5381,5606,8447,7768,5485,7643,5485,7542,7828,5426,7982,8631,8277,5358,7482,5442,8287,5564,5485,8099,7834,5347,8653,7482,8387,0,5301,7828,5274,0,5396,7616,7877,7914,8805,8903,5642,7478,7823,8893,7977,7853,7500,7628,7994,5377,8110,5712,7883,8821,8196,0,7478,7542,8104,7790,5564,5381,8676,5237,7548,5712,5464,7746,8345,7616,8190,0,7994,5712,8467,5655,5688,5377,5661,5480,5401,7790,8297,7487,7858,7994,8631,8484,7982,5708,0,0,7500,7853,7952,7967,5642,7920,5237,8467,7616,8164,8613,7710,7511,5712,7533,5708,7538,8131,8005,8196,7977,5514,5655,8467,5496,5218,8391,7616,5325,7588,5453,5274,8272,8334,7843,5386,8222,7663,7920,8751,5476,8647,7628,5666,8190,7800,5476,8015,8756,5218,8447,5301,8539,7952,7834,5309,5622,8227,8604,7653,5362,8149,5305,5578,7982,7982,5708,5362,8322,0,8959,7786,7478,5285,5480,8427,7790,5274,5274,5305,8239,5401,5396,7952,8919,7972,7500,0,5396,5237,5296,7648,5237,7834,7877,5285,5712,8010,8010,5274,5476,7994,5285,5564,5391,5358,5261,5342,8322,5401,7811,0,5453,7582,5476,8179,7663,8929,8766,7538,7681,5442,5309,7839,7628,8751,0,7500,8716,7834,7658,7616,8179,5453,0,7616,7746,0,7538,7914,5426,8447,8762,7511,8467,5347,7834,8617,8093,5285,8929,5347,5274,7616,8617,8952,7768,5651,5301,8375,7834,7605,5347,5209,5381,5291,5291,8571,8267,5209,5209,8879,7962,5564,7930,5209,7675,7528,8249,7487,7658,8222,5325,7786,8293,5529,0,5622,8370,8375,7962,8359,7763,8827,7542,5320,7877,8387,5209,8447,5564,5209,5301,5529,8073,8613,8277,8077,8077,7616,8375,7746,7605,8164,7738,7952,8533,7723,7628,8087,8345,5647,8022,5509,8110,5642,8062,5476,5485,9032,5651,7605,8903,8062,8548,8721,7576,5693,5509,5476,8222,5391,5391,5248,7914,7990,8190,5391,5337,7811,5476,7681,5391,5274,7817,7558,8658,7817,7675,5437,7687,8222,8370,7834,7675,7977,7914,8033,8164,5464,5651,5568,8739,8888,7588,7774,8365,5325,8467,5325,7482,5651,5568,8745,5699,5655,5407,5564,8381,5224,7811,5712,8196,7653,7628,5391,8381,8196,8381,7972,7628,8222,8739,8381,8196,5305,5496,8170,7795,8196,7982,7478,8239,5391,7605,8533,7914,7994,5661,8739,5296,8239,7667,7811,5476,5476,8222,8196,5248,8222,8527,8879,8217,7487,8329,8479,7672,8217,8087,8312,8527,5622,5564,5617,7990,5419,5419,5622,7994,9021,8329,5320,8217,8479,7675,5325,8670,7696,7696,7696,5205,5407,5320,8033,8987,8987,0,5205,8227,8239,5274,5274,0,7599,7599,5337,8239,8239,5642,8211,7599,7599,7599,5205,5642,7888,8005,7548,8484,8811,7786,8811,8422,8811,8811,5391,7482,7710,8898,8447,5309,7994,5453,8170,7920,8554,5237,5358,8170,5381,5606,5726,5377,5381,7478,7757,5391,5391,5255,5325,5661,8033,5237,8447,8427,5237,0,7800,7864,7957,5255,5377,7893,5320,5661,5647,5329,5329,5237,8554,5329,8170,5661,8170,5291,8239,5291,5320,5237,7935,8068,8447,5261,0,5274,5301,5529,0,0,7610,5476,7706,8504,7478,7653,7914,7628,5573,5342,7706,0,8354,8282,5642,8608,8387,7478,5381,7864,5325,7542,5501,8467,7706,8433,7542,7706,5496,5381,5726,8461,5274,5708,5347,8479,7904,7571,7571,7924,8170,8461,7990,8131,5529,7930,5301,7681,8170,7834,5564,5519,7977,8093,5480,8456,5337,5491,8582,8354,5726,7982,8312,8438,8345,7681,8093,7710,0,7582,0,8005,5519,7790,8316,5529,5476,0,7849,7990,7632,7864,5564,8329,5459,5329,8345,7696,5671,7768,8316,5476,7715,5391,8345,7994,7982,7972,5651,5301,5325,8316,7994,5337,0,5367,5301,5642,5285,5241,8375,5285,7864,7864,7628,7491,8201,8255,8658,8527,8527,5386,5301,5337,5274,8062,8479,7914,7558,5647,8845,7667,8052,5367,7482,7628,7843,8795,8149,8277,5274,5699,8571,5248,5248,7738,0,5396,8170,8277,7706,8438,8653,8653,8443,7786,8402,8387,8387,8461,8099,8099,8873,5291,7888,5241,8039,7482,7582,7653,5241,7994,5642,7920,7834,8022,5381,5413,8443,5291,7582,8427,7786,7482,0,8115,5464,7628,7994,7924,8527,5688,5471,5377,5476,7972,5391,5437,5642,7864,7605,8795,5329,7834,5712,8811,5325,5325,7811,8805,8119,5712,7542,7478,7628,8548,5347,5291,7834,7834,7994,8548,5496,5342,8164,0,8196,7628,0,7957,5647,8217,5237,5534,8777,8903,8062,5381,7663,7511,7478,8022,8255,5209,7994,5413,7828,8451,7542,5218,8077,8451,5325,0,0,8022,8548,5419,8239,0,7877,5459,8433,5693,5647,7957,8015,8427,8427,5708,8340,5726,8322,8397,8397,8479,8479,5237,7628,8073,7610,7521,8554,8073,8196,5285,7834,8149,8427,7516,8239,8239,5309,8919,7888,9021,5285,7952,5642,8255,5642,5291,8427,5476,5529,5386,7491,7491,5476,7864,5274,5325,5661,7990,5237,8539,5708,8397,0,8447,8164,8479,8093,8925,7628,8354,7482,0,5381,5301,5342,7610,7610,7828,7616,5237,8170,7877,7616,8005,7834,7994,5386,7768,8783,8498,8375,7828,7828,0,7834,7834,7834,5386,7834,8732,7952,8732,7576,5464,5485,8329,8783,8783,8577,7817,7571,8255,8170,7817,0,7478,0,0,7637,5688,5688,5325,5476,7487,7904,7972,7999,7521,5476,5309,8329,5676,0,8329,5309,5564,8925,5358,5391,8201,5693,8952,8010,8077,5391,5501,8994,7972,5237,7982,7883,7994,8015,5629,5491,7538,8456,8994,8964,5564,5401,7972,7576,8073,7972,8539,5305,5476,5476,5255,5280,5676,8613,8952,5491,5241,8839,8255,5655,5391,8658,5237,7898,5491,7952,5442,5255,8015,8302,5509,5491,7994,8227,8302,7706,8115,8039,7790,8762,5730,5241,7500,5651,7994,8494,5564,5476,7898,5255,0,7817,7533,5464,8479,5651,7977,8909,5296,7548,5261,7800,7715,8473,8039,8115,7715,7898,8015,7592,7877,5564,7972,7982,8010,8519,5358,0,0,5651,8255,7877,5501,8249,7898,5442,7763,5573,5381,5274,5347,8811,5255,8184,7542,5224,5224,5426,8811,5642,7811,5337,5337,5325,7834,7920,8805,8115,8687,9345,5305,8745,5358,8196,5471,8519,7952,8387,8126,5381,7500,5329,7478,0,7692,7616,8539,8174,7828,8387,7622,8845,7839,5564,8316,8334,7893,7521,5476,7972,7681,7972,5519,8762,7571,7675,5647,7952,7692,8334,8805,5337,8201,5693,7558,8277,7930,5325,8345,5320,8756,5218,5480,8022,5320,8745,7628,7628,7622,5448,8604,7521,7893,8484,7849,5296,7511,8302,5401,7843,5448,5407,8057,8745,8484,8484,8196,5708,8144,7957,7482,8239,5491,5329,5476,5573,7877,5647,7487,7528,7622,8227,7478,7893,8687,0,7908,5358,8322,5661,7516,7516,8239,5329,0,7616,5274,8805,5218,5301,8805,7482,8329,7834,8805,8539,8312,5407,5296,8716,5296,7817,8184,8509,8411,5476,7706,8789,5501,8170,7834,5476,8205,5358,5476,5476,7696,8589,5391,7696,5391,5391,7817,7757,7482,7982,5564,8062,7478,8422,7710,5419,7904,7914,8539,8015,5291,8272,5564,8539,5651,5274,7982,8783,8293,5485,5655,5476,5651,7768,5509,8670,8893,8190,5485,5471,8015,5419,8322,5352,7883,8322,5358,5358,5617,7883,5407,8131,0,8048,5358,5568,0,5432,7675,7883,8144,8322,8062,5651,8144,5509,5291,5325,5325,7952,8903,5329,5329,8641,5296,7972,5296,5296,5476,5476,5476,5476,8154,8154,5261,5426,7752,8316,7834,7834,7628,7710,7774,8473,7757,8888,5352,5218,8647,8015,7616,7710,7710,5314,5480,5325,8334,7548,5476,7599,7935,5358,7710,8093,5564,7599,5241,8140,7599,5485,5491,5237,8329,8727,7924,8359,5329,5730,8631,7558,7972,7500,5485,5647,8827,0,8277,8827,8407,5213,5237,0,7706,8227,7972,8827,7576,8277,7628,5661,5381,5407,7710,7977,8411,5464,5564,8329,7994,8334,5642,7994,5237,5241,8647,7710,8407,5241,5241,5432,8407,5248,7935,5285,7994,5285,5296,5712,7610,7521,8222,5726,7528,5320,5358,5476,5333,8504,8479,5693,5671,5325,8504,8504,7877,8772,8594,8594,8811,7706,5391,8879,7972,7599,8211,5237,8010,7616,5606,8438,8438,7548,5248,7478,5325,7478,5237,7491,5337,5509,8827,7548,7908,7628,8411,7972,7706,8402,8033,7658,8068,7542,8184,7643,7504,8816,7599,5274,7528,8756,7982,8509,5501,8359,7658,8022,7478,7742,8227,8519,8170,5476,8222,8783,8438,8456,7571,8982,7924,5568,8282,8293,5386,5241,8994,8201,8964,7786,7982,8170,8245,5401,7487,5442,8539,5491,5476,5647,5337,5564,5325,5352,5426,8467,8447,8625,5564,5237,5358,5337,7867,8190,8827,5224,5464,7500,5573,7920,5237,5237,8772,0,8548,8217,0,0,5476,7710,8504,8329,5391,5237,8149,7946,8582,7715,7972,5661,5401,7681,5464,5459,8052,8571,7952,7946,5237,7675,5320,0,7473,5688,7924,5296,5296,7710,7977,5476,7504,8625,7516,5296,5209,7500,7710,7521,8293,8727,8869,7990,8316,5606,7817,7768,5237,5280,8354,5352,8800,8407,5296,7864,0,8316,7834,8509,8604,5386,8267,8211,5218,5218,8467,7920,8653,8255,8354,7628,7628,5381,5237,5578,7516,5377,7653,8052,7867,5377,8201,5476,7542,5296,5237,7930,7800,5476,7946,8022,7768,7558,5296,7478,7478,5337,5301,7779,7811,8827,5476,8354,5301,8566,5386,7864,5699,8190,7542,7478,8438,8827,7893,7930,7888,8402,8227,8514,5301,5325,8721,8721,5426,8479,7706,8571,5606,8099,5237,8795,8653,8447,8745,5442,5471,8039,7877,7779,5509,8467,5309,5325,7516,7628,8154,7849,7768,8772,5377,5377,8077,7599,8287,8201,7643,5237,5629,7528,7994,5296,7864,7872,8548,5407,5218,7828,8170,8179,7558,8407,7930,7867,8179,7883,8721,7888,5386,8267,7521,8863,5459,5464,8104,7723,9071,5426,8604,8411,7643,8548,5377,8255,5442,7930,8190,7994,5717,7542,7994,7628,7628,5358,8190,7811,8438,5358,5476,7746,5218,8509,8903,7482,7864,8033,8711,7990,7990,7628,7663,7867,7834,7495,8811,8267,8548,8159,8873,5501,0,8005,8005,5699,7817,8267,7588,7588,7706,5642,8022,5464,5325,7786,8170,8519,8447,5661,8509,5241,5218,8340,7962,7914,8073,8509,8467,5661,7643,5218,7924,5301,5514,8272,7482,7839,7500,7542,7658,5464,8267,7914,8391,0,5617,8827,8647,5305,7715,8039,7710,8179,8179,8179,8402,8438,8594,8015,8316,5708,8617,7800,8721,5699,5372,8484,5296,8509,5401,7742,8022,5309,7482,5329,8987,7994,8641,7800,7542,5617,5218,8745,8756,8539,7972,7957,5676,7706,7706,7628,5255,8239,8827,5337,5248,7972,8316,7558,7706,5726,5352,7930,5432,8104,5285,8833,7710,5401,5255,8005,5296,8255,7558,7482,7482,5237,5237,7883,8772,7888,8217,8217,7588,5291,8604,7675,8010,5501,8149,8149,5248,7653,7706,5274,8438,5485,8239,7478,8179,5291,8140,5274,8772,8119,7920,5476,5573,5274,5386,7883,8772,5485,8022,7653,5285,8082,7811,7582,8196,8196,5726,8340,8800,5661,7616,5708,7790,5476,7516,7719,7667,8359,8514,5347,7663,7516,7834,7795,5367,7710,7628,7482,5491,8249,7663,8249,8217,7706,5305,5347,8514,5442,7972,7495,7495,7834,7924,8073,7972,8582,8582,5337,5564,7511,7763,7558,0,7877,8249,5347,8359,8190,8261,7877,5325,8170,7811,5617,5476,5401,7823,5651,5629,5291,8461,8461,8461,5717,8354,7478,5407,7500,5320,8164,5237,7558,8494,5381,5717,5407,8322,8154,8322,8427,7582,7706,7977,5459,9353,7977,5325,5391,8795,5325,8005,5671,8919,8427,8519,5717,5407,5381,7628,5407,7834,5329,5237,8427,5237,5237,8287,5491,5726,7982,7982,7982,7864,5391,5301,7790,7982,5301,7565,5612,8411,7828,7592,8987,5476,5476,5261,7982,7893,7834,8087,9358,7774,7687,5688,5333,8201,7904,8245,8705,5564,0,5437,5480,8015,7757,8756,7628,7692,7653,7914,5496,5476,7786,5568,7478,5480,7542,7972,8504,8144,8514,5514,7924,8816,8282,7935,7648,5237,7542,8282,7977,7628,5564,5708,5237,7920,7972,5209,7864,5471,7999,7558,5325,7834,8582,8411,8783,8170,7752,7982,8354,5291,7904,8196,5655,5241,8267,5509,8293,5476,5320,9361,8057,7853,8307,8756,5578,8057,5564,8174,7904,5708,8115,8641,5568,8099,8322,0,7982,8222,5224,7972,5352,8772,7734,7920,7658,5642,5655,7687,7924,8222,7849,8447,5476,8539,7710,7920,8925,5305,5712,7914,8387,5442,5726,8438,5573,7977,5564,8427,7605,7883,7637,7715,8184,7986,5564,8174,8282,5213,8093,8136,8625,7542,8170,5480,7904,5480,5480,5708,8170,5314,7533,7999,8126,8126,8539,8447,8170,8144,7610,5407,0,5218,5347,8381,8073,8115,7542,8727,7795,5391,5381,5241,8566,7817,7817,7768,8969,8879,8365,8898,7710,5661,8604,5296,8751,5635,5391,8136,8073,7710,7795,7972,8144,7888,7888,7972,7867,8345,8789,5655,5717,5285,7692,7582,8613,7681,8577,7687,5329,5459,8631,5291,0,8170,7972,8375,8387,9361,7738,5661,5386,8903,5285,5241,5325,5296,8329,0,7663,5480,7495,8451,8073,8845,5377,5655,7558,5612,5612,8946,5712,7610,8170,5352,5407,5476,8805,5261,7495,5509,5661,5496,8527,5285,7478,7667,8484,8316,8653,8154,5647,5314,8057,5333,7904,7653,8670,8077,5676,8115,8022,7491,5291,8201,7811,8201,9021,5476,8533,7672,5448,5480,8149,7962,7898,5407,7811,8479,0,5509,8196,5352,5617,8577,8438,8514,5564,8126,8170,8479,8494,7663,5676,8354,8548,8033,7805,5381,5647,8543,8136,8184,7495,8443,8494,5237,5274,7558,7828,5514,8099,8745,8461,7888,8087,7757,8057,8539,8015,8234,5325,8272,5280,7706,5213,5320,7542,8093,5476,8387,5291,5241,7779,7795,5476,8154,5261,5241,5329,7774,8772,7935,5224,5381,5377,8571,8422,7834,8451,7967,8170,5606,7542,8800,8126,8087,5381,5329,8039,0,0,7972,7883,0,5274,5514,8721,5352,8467,7495,5407,5655,8484,5407,7628,7834,8005,7823,8062,8110,7920,7994,8159,7967,8190,7622,5377,8077,5496,5391,8548,5655,5476,8598,7946,8411,7924,7811,7834,8857,8345,7500,8387,8494,7990,8663,5325,5377,7986,7986,7558,8509,8484,8484,5647,5381,5296,8845,5248,8851,5617,5661,8467,7542,7491,5496,7687,7930,5642,0,0,5347,7752,8196,7877,5717,7957,8548,8845,5285,8131,8196,5218,5699,5442,7710,8427,7962,7962,8073,5573,8164,5413,8451,5514,7990,5642,8479,7972,8293,5661,5407,5448,5688,5524,5274,8851,8940,5309,7763,7864,7805,7495,5261,7738,5381,5629,7533,8282,8245,5301,8272,7982,7972,8631,5367,7542,7752,8322,5606,5655,7864,5237,7495,7768,5496,7687,5717,7478,8869,5617,8756,5617,5320,8126,5642,8888,5448,8772,7730,7877,5612,8239,5248,8519,8039,5224,8527,8952,8539,7706,0,8533,5401,5629,7710,8721,5329,7632,7957,8548,5671,7982,7687,8519,5629,5396,8402,8716,5305,5419,7952,7616,7972,5708,7972,8077,7800,5261,5568,5320,5612,8641,5285,5305,5305,5578,5274,8946,8647,5419,7482,8131,7542,5329,5325,7616,5291,8959,8959,8149,5471,5274,8821,8005,8365,5476,8329,7558,7558,7730,5386,7946,7487,7542,5464,8245,9021,8184,7516,5241,5296,9345,5285,5325,8340,7883,7883,7935,8073,7935,5291,7675,8239,8543,5564,8255,8073,8451,5314,5622,5480,7734,8196,5471,8184,0,5661,5329,5285,8498,7738,7653,8255,9361,5407,8811,5218,5666,8144,5329,5261,5314,8115,7734,8287,5325,8903,5612,8340,8745,5485,7811,5381,8196,7908,7883,5237,5717,7920,8073,5708,8170,7491,0,0,5448,5651,5325,7658,5314,8479,5426,5347,8936,8170,7491,8613,7491,7491,8447,7982,5325,8888,8381,5717,7972,5381,8144,5305,5407,5661,8577,8772,8126,5448,5329,5329,7648,7972,7533,7924,5666,7730,5320,8365,8184,5274,7667,8154,5301,7986,7653,7616,5329,7610,7542,7542,8365,7542,5347,8589,7616,5712,8261,7877,0,8498,5237,5617,7632,7667,5651,5301,8365,5329,8772,8857,5301,7828,8631,8903,5285,5291,5347,5301,7700,7972,7952,7952,7706,7528,7734,5342,8087,8898,5337,5671,5476,7817,8811,8282,7565,5241,8073,7828,5568,8438,7478,7786,7849,8604,7675,5651,5573,5717,8539,7500,5285,8438,8033,8484,5655,7888,7628,7710,5708,8126,7500,5285,7888,5401,8077,5358,8227,7828,7888,5476,8811,8589,7990,7834,5471,7706,8307,7675,7888,5285,5485,7924,5367,8438,5320,7663,8255,7663,5241,8539,5655,5564,5320,8239,5726,7504,5237,5285,0,5285,5314,5476,5401,5358,5301,7675,7675,7752,5529,7706,7706,8533,8888,5291,8533,0,7893,8282,8282,5347,7628,5347,7994,8307,8119,8239,7977,7982,5661,7977,7628,5655,5285,8494,7853,5573,5309,7710,7482,5564,8322,8427,5726,5314,8427,8676,5314,7487,7528,5314,5314,8196,7516,5314,5573,8676,7605,7565,5661,8560,7628,5391,8456,5337,0,8334,7706,8427,7790,5329,8456,7920,7511,5564,7565,8190,5485,5485,8447,8443,7999,5476,7511,8670,5407,0,5476,7706,7972,8010,8647,8010,8427,8625,5342,7628,7478,5325,5325,5325,8154,8915,7883,8484,7972,5372,5661,7681,8329,7500,7500,7500,8811,7653,7658,8539,7500,7877,5391,8582,7924,5391,7599,8438,8438,8068,8104,7478,8267,7930,8608,8115,8282,5381,8653,8119,7982,7565,8052,7952,8227,8190,8391,7643,5501,8196,7982,5476,5237,7528,7834,8115,7565,8772,7914,8170,7924,8365,5224,5426,7616,8190,8456,5655,8093,8104,8052,5712,7571,5305,8411,7500,7643,7999,8548,5391,5391,7628,7893,5459,7790,8447,8227,7752,7582,7663,5224,8608,7576,7610,5274,8582,7681,8783,5391,5464,5464,8190,8005,7500,5285,5309,7972,5309,8987,8316,8062,7834,5671,7521,8635,7675,7491,8267,5296,7653,7653,7667,5509,5274,8653,7576,5237,7558,8119,8201,5578,5578,7478,7653,5296,8052,8033,5509,5606,5314,8397,7576,7734,5485,5237,5261,5476,5237,8745,7706,5485,7952,5655,5476,7972,8345,5509,7516,7779,8227,7599,7675,8365,0,8190,8190,5476,5367,8267,5296,5564,8438,8005,7849,7616,8762,7994,5320,8745,7790,7823,5407,8699,8345,7924,8104,5642,5612,5309,8391,5285,8509,5524,5534,8227,7706,8052,7834,5301,7663,5325,7952,8514,8365,5661,8052,7715,5305,8827,8447,8015,5296,8222,7653,8190,8732,8239,5274,7883,5485,8959,7516,8068,5237,5476,5325,5464,7658,7491,7491,5301,8772,8316,7675,7893,8196,7999,8052,5476,7521,5426,9367,5285,8222,5485,7834,7663,7491,8613,5320,8249,8329,5642,7734,5301,8033,8582,5459,0,7834,8154,5329,5237,8164,7610,7700,7632,8375,8635,5629,7834,5573,8354,8354,5726,5476,8845,7491,7982,7849,5320,8340,8340,0,7800,8554,8554,5377,8397,8179,7500,5296,7482,7867,5501,7920,8354,5337,8821,5352,8282,5564,8093,7893,5726,0,8293,7681,8851,5655,7843,7843,5676,8391,5464,7599,8068,7999,5325,7706,7616,8548,7599,8316,7843,0,8249,7610,7715,8919,7994,5305,8543,7687,7605,7605,0,7605,8504,7904,8312,7888,7888,8479,5301,5688,5325,8484,5358,5519,8805,7839,8302,0,8925,7768,7542,7628,8811,7478,5496,5261,8329,8010,7478,5209,5301,5426,7774,8879,8504,8670,5480,8539,8015,8211,7710,5476,7786,7817,8762,5476,7706,5320,8461,7681,0,0,0,8277,0,8211,7924,7538,7622,7487,8467,8560,7893,7930,5381,7582,5524,5642,5274,5573,8658,8994,5320,5396,7528,5442,7605,5442,8022,8282,8022,5325,8354,5309,8131,5325,7790,7920,5358,7599,8608,7565,5501,5476,5564,7823,5237,7982,8387,8307,8329,5655,5642,7516,7872,7864,5712,7999,8039,7972,8827,8267,0,8307,5320,5372,0,5301,0,5407,7558,0,8322,8527,7823,7675,5274,7692,5255,5476,5237,7972,8625,7675,7977,7977,8131,5564,5305,5476,7491,7491,5391,8077,5358,7888,5476,8093,5224,8447,7986,7849,7533,8010,7548,5491,7839,7924,5564,8073,8711,8811,7706,8297,8110,7790,7999,8022,5578,5367,5337,7914,8170,5480,7571,7823,8381,5218,8539,7972,7687,5496,7883,7786,0,8484,5708,5564,5407,8073,7888,7990,8716,7605,5358,7511,7637,7687,7487,7605,8456,5391,8625,5496,5642,5291,8484,5391,7972,7511,5712,0,8164,7687,0,8473,8427,5381,7610,7582,5329,0,5301,5347,5218,5320,8329,8969,7706,5655,7706,5301,5476,7521,5655,5301,7588,7795,8795,5224,8504,7675,8073,8504,5224,5209,5442,5476,5381,7500,7605,5407,8397,7696,7696,5337,8048,7817,8898,8149,8427,5301,0,7924,7849,8682,7972,7710,5717,5437,8375,5564,5381,8560,7952,8272,8687,7994,8582,8613,8345,8345,5676,5476,5314,7692,7972,7723,8316,8940,8940,7811,5381,8484,5529,7888,8863,7628,8365,0,7616,0,0,8427,8387,5386,5655,8903,7706,8255,5396,7908,5419,9345,8329,5612,8302,5655,7930,8805,7795,5485,8073,8543,5564,7872,8316,5291,5237,7843,7511,8527,7706,7752,7667,8811,8140,7779,5688,8805,5337,5712,7898,7610,5352,9345,8539,8190,5647,5372,5708,7786,5358,8539,7653,5209,7972,8504,7930,8845,5407,7811,5509,7495,7977,7681,5325,7675,5329,7811,8670,8433,5501,8022,8249,7478,5301,5301,8307,8433,7616,5612,8170,5377,5381,8653,5314,5320,7877,8010,7994,5485,5301,5224,0,0,8272,7972,8277,5401,5407,5314,8093,5301,7706,5224,9032,8827,7542,7746,7706,7706,7774,8234,7877,8594,7893,5213,7898,8057,7643,8548,7558,5372,5325,5301,5352,7628,7628,7786,7643,7582,5629,8461,7599,8539,5237,8010,7558,8427,7706,8467,7834,7628,7768,5476,9367,8504,7805,8873,7706,5314,5564,7990,8277,7967,7967,5314,8721,5501,5501,8033,7723,8077,5509,8039,5329,8052,5381,5476,5442,8170,8170,7558,7571,7779,8322,7972,8863,8387,8170,7930,5285,5314,5209,8888,5352,5693,5573,0,0,0,5407,7952,5237,8762,7542,0,0,0,7632,7972,0,0,0,0,5352,8427,0,5480,9367,7888,7491,8998,7994,7994,8676,7977,8548,5407,5476,7914,5237,7834,7834,8732,8427,8104,7628,7849,7687,8494,7858,5358,5642,8154,7500,8893,7924,7986,7834,8543,7643,8005,5381,8005,7487,8527,5661,5480,7972,5407,7715,7790,5573,7982,7982,5401,8873,8805,7823,7628,5337,5712,8022,8190,8190,7616,5655,7858,7491,5325,5381,7491,5573,8598,7628,8694,7994,5712,8539,5325,5381,8509,5688,7616,8110,7946,7957,5333,0,0,8751,0,5671,8772,5347,8267,7628,8033,5329,8170,0,0,0,5661,7977,5476,8427,5564,8716,5651,7843,8164,7786,5407,8170,7738,5642,5655,7898,8302,8467,5209,8391,7588,7977,5642,5381,5464,8811,7893,7687,7542,7538,7962,8467,5661,8164,5391,7972,8402,8267,8805,7511,8164,5629,5699,8533,7487,8119,7738,7990,5237,7786,7706,8898,5524,7962,7533,8272,5717,8329,7828,8417,5325,8851,8131,7805,8322,5496,7542,8359,5712,7500,8461,5524,5314,5480,0,7924,7828,7977,5407,8397,0,0,8888,7605,0,5237,7706,5401,5296,5218,7542,8039,5671,5671,8641,5396,8140,9367,8402,5413,5337,7982,5419,7982,7811,8005,5476,8077,5524,8756,7952,7952,8340,7800,5309,8539,9345,5333,7843,8033,8827,5301,5519,8745,8617,8010,8316,8721,5325,5612,5666,5329,7779,5651,7957,8783,7588,7990,5320,5629,8473,5329,8484,5320,7687,5320,8015,8514,5305,5453,7924,7681,5352,7605,8302,5237,7768,7946,5407,8827,0,0,8322,0,7616,7696,7528,7972,8255,8397,8827,7734,5320,5396,8005,5655,9021,8354,8427,5237,8329,7828,8048,8811,5305,7478,8255,5386,7516,7511,8255,5274,8489,8010,8427,5480,8149,8267,8598,8422,7558,5629,8249,8170,7558,7588,5291,5291,8772,5296,5325,7495,7521,5480,8381,7872,7605,8519,8833,5501,7723,8297,7946,8433,7786,7511,8149,5524,5573,0,0,0,0,5325,8307,0,5329,0,0,8467,7738,5320,5419,8772,7516,8789,7482,5237,5342,5261,8397,8190,7558,8811,8170,8397,5476,8196,7734,7834,5666,8527,5296,8903,5325,5342,8073,5358,5693,5573,9361,7908,5325,5381,7977,8745,5578,5407,8322,8479,5476,8473,8022,5617,7516,0,5496,8249,7482,8272,7658,7746,7706,0,8888,8154,5325,7542,5651,8381,5476,5524,5407,7482,7696,8745,7834,7706,5651,7999,7952,8104,5325,8005,8312,8919,5573,0,7746,5337,7972,8687,7542,5666,5666,7730,5329,5301,8498,7672,7763,5296,7742,5329,5381,5524,8762,8467,5301,7893,5314,5448,5401,0,5573,0,7605,5329,7834,5301,7605,8033,5381,7864,7588,8359,8077,7628,7610,8277,0,5274,0,8170,7675,8093,7616,5407,7482,5337,7616,7521,8789,8073,7877,5347,8170,8617,5617,7994,8827,7768,5726,0,0,7628,7632,5301,8365,8375,8903,5291,0,5347,5347,7605,5666,5301,7904,7700,7628,8093,0,7542,7839,7811,7528,7628,7628,7542,7742,7500,8783,5573,7710,8509,5476,5291,5237,7500,5367,7628,7908,7843,8739,8851,5237,7663,5476,7706,7982,7710,8196,8739,8795,8739,8795,8732,8302,0,7482,7528,5661,7628,7542,7675,5476,5717,7478,8772,5309,7742,7982,5377,7811,7500,9054,5237,7675,7500,5237,0,5401,5573,5476,8795,7628,8287,7811,7972,5241,7839,8201,7478,7706,8201,8201,8082,8170,7542,7710,7719,7710,7719,7548,5342,5342,5301,8451,7628,5301,7952,5491,5491,7487,5407,5578,5305,5305,5642,5305,8140,7924,5606,8119,5491,8987,7500,8329,5320,8179,5291,8647,5280,5688,7588,8745,8267,5401,5476,8998,7972,7491,8179,7643,7883,5325,7491,0,8267,7752,5442,8888,5377,5309,8987,7482,7757,8140,5296,8498,5476,7883,8762,5419,5401,8888,8762,0,5407,5320,7742,8676,7605,7482,8745,5476,8302,5699,7990,5642,7667,7828,5296,5296,7786,5699,5325,5476,7742,5296,8676,5476,7675,7478,8721,7715,5712,5712,7663,7571,8762,7628,5358,5358,5358,7920,7920,5358,7877,7790,0,0,7734,0,7977,8427,8159,7715,5480,8329,7696,8164,5274,8170,7715,8052,5688,8307,0,8772,8456,8170,7888,8402,5358,8589,0,7790,5342,7628,7628,5480,7849,5407,7738,8589,5329,7715,5301,8316,8164,8227,7734,7632,8427,5514,8427,8249,7632,5726,5459,5391,8539,8438,5291,8211,7706,8959,8716,7706,8479,5274,8732,5501,8915,8028,0,5564,7914,7582,8745,7706,7482,5237,8370,7924,8345,8387,7990,8745,8387,7582,7883,5329,8745,7924,8267,8267,8839,8589,5241,5224,5218,8903,0,5333,8447,8068,8010,7952,8805,7972,5688,8888,8115,7746,8498,5296,7757,5476,5496,5314,8732,5509,8010,5491,5237,5480,8211,5688,7706,5261,5213,7653,7542,7478,7478,8329,7487,8613,7817,5419,7710,7478,0,0,5629,7972,7710,7542,5651,7628,7599,8745,5485,7872,7667,8062,7658,7768,5325,8411,8411,8811,7752,7548,7565,7504,5291,8560,0,7706,5501,7500,7500,8196,5476,8494,8267,8467,8560,7834,8033,8033,7883,5337,5471,7982,8154,7867,7715,8110,5712,7738,7746,7478,5248,5606,5358,8302,7972,8456,7533,8863,8925,8131,7511,5712,8115,8527,7742,5301,8099,8777,8527,7994,7982,5305,5712,5320,8170,8365,5480,5480,5480,7849,5301,5651,5476,8048,7893,5337,5606,7972,7999,7786,8427,7834,7487,8582,7592,8170,5661,5688,5476,7977,7548,7768,8073,5296,5651,8543,5314,5573,5333,5337,7786,8062,8811,8048,7790,7487,7982,5255,7839,8010,7710,7588,8381,7558,8093,8174,8073,7706,7487,8447,8170,8354,7605,8119,5666,8190,5529,5564,7533,8438,7658,7924,5325,5218,8721,5413,5529,7610,5329,0,8073,5661,8312,8115,5485,7482,7576,5325,5476,5296,5296,8397,7558,7482,7994,5730,5407,7924,8577,7487,5325,7696,5426,5564,7730,5612,7972,7482,7478,5237,8898,7605,5419,7632,0,8345,7582,5476,8711,5666,5476,5651,7768,5309,5305,7994,7675,5661,8062,7500,8851,8582,8272,7867,8170,7696,7576,8613,5255,8329,7504,7972,8687,7811,8391,8154,8631,5280,5213,7478,5726,7511,5476,7977,5485,7542,5419,5285,7558,5391,5314,8604,0,7908,5325,7511,7877,7643,7982,8789,8329,5309,8577,5688,7994,5578,7667,7653,7898,8211,5314,5337,5237,8073,8307,5509,8062,5476,7734,7616,5347,8052,7843,7653,5301,8845,5564,7478,5401,8104,8527,5476,7628,8316,8149,8370,5309,7977,5248,5534,5712,5342,7558,8456,5274,7914,5647,8653,8751,8077,7653,5485,7675,5617,8184,5647,5218,7675,5274,8805,7864,7588,8068,5676,8433,5377,5301,8839,7487,5666,8839,8272,8762,8149,8277,5401,7994,8494,8267,7811,8539,0,8489,5407,8015,7719,5564,8721,5426,8087,8969,8387,8287,8015,7491,8571,8494,7653,5480,7582,7849,8494,8170,8811,7779,7643,7738,5314,7828,7867,8407,5676,5209,5237,5642,7706,7786,8039,7653,7516,5629,7576,5352,8052,8227,5320,7706,7565,7710,5329,5386,8170,8272,8827,5666,7972,5352,7893,8022,5606,7920,7883,7628,8447,7628,5329,8811,5651,5485,8093,8099,7482,8402,5726,8359,7628,8184,7914,8647,8104,8631,8467,8277,7643,8745,7834,7542,5606,7622,7972,7482,5651,8543,8467,7752,9071,7893,8745,8467,7990,5325,5617,8467,0,5329,5209,8762,0,5407,8438,7920,0,7628,8467,7914,7914,8110,7811,7605,5407,8699,7628,5688,8805,5726,8762,8527,7687,8073,8494,8494,8539,7994,7752,8613,5661,8604,7994,5717,8461,7834,8062,8190,5651,7990,7790,7616,7834,5337,8062,7622,5426,5712,5437,7643,8716,8005,5391,7864,5213,5712,5401,7706,7706,5712,5606,8694,7994,8179,8641,7746,5296,5305,7500,5325,9054,5407,5407,7982,8527,7521,5476,7482,8427,5391,5381,7558,5464,8772,7628,5358,8438,5391,5647,8119,5218,5726,7548,0,8484,5285,0,7482,5629,7610,8154,8467,7823,8282,5337,7768,7805,7779,7877,7834,8104,8077,5209,5509,5391,7800,7616,0,8005,0,5407,7500,7672,7952,5671,0,0,8312,5309,7952,5629,8467,5612,8467,5401,5642,8249,9032,8119,7990,7658,8245,8631,7511,8131,5514,8391,7687,5391,5476,8099,7538,7734,7994,5301,7774,7478,5237,7752,8322,8115,5391,7588,5476,5655,5448,8267,5519,7482,8164,7643,7588,7588,8447,7738,5391,8613,7994,7533,5573,5573,8334,5534,8777,7763,7849,7828,8052,5464,5237,5209,8539,7972,7616,5564,5381,5396,5717,5209,8272,8909,5342,5358,5285,5296,8234,7482,5524,7610,7972,7834,5617,7800,7616,8104,5329,5347,8375,0,8888,8772,7628,5448,7982,5358,8473,7972,5419,5568,7914,8987,8539,5651,5708,5329,7982,7982,5337,7877,8010,5218,8234,7800,8745,7628,8647,5325,8387,5285,7706,5237,5325,5329,5329,7610,5309,8039,8827,7957,7972,5671,8322,5301,7834,8340,5285,8827,8658,8641,7742,8909,7491,5237,5651,5629,5305,8987,7653,5401,5347,8631,5301,7542,8527,8190,5476,7800,5209,8211,8756,5666,5352,5320,8015,8015,5248,7500,7811,8144,5407,7834,8519,8239,8402,5377,7924,5419,8827,7616,8062,8329,5320,8721,7616,7675,0,0,7500,5401,7972,7994,7920,5291,5285,8312,8903,5296,5381,7478,8994,5325,7533,7952,7675,7542,7542,5237,5329,5237,8190,5471,5464,8438,8489,7588,8239,5655,7548,5476,8539,5509,7883,8772,8783,8329,8005,0,8670,5647,7605,8589,9372,5358,5261,7877,7877,5320,8316,7734,7516,7994,7719,7719,7558,7592,7558,5396,5285,8149,7935,8811,8879,7946,7487,5320,5352,7653,8267,7511,8062,5564,8052,7849,5274,5407,5261,5237,0,0,5347,0,7628,7516,8438,5372,8903,5509,8115,5661,7628,5301,5391,5529,8093,7800,5209,5218,7908,8179,5401,7828,7491,5708,7811,5655,7877,7800,7883,7883,5476,5342,7920,8170,7738,8196,5325,5314,5401,8789,8039,5309,5501,7719,5325,5342,5296,5573,7738,8022,5693,0,7605,8277,8888,5401,5651,5391,5320,5651,8381,7616,7616,8249,5309,7706,7920,5325,8052,8613,8397,8494,8272,7482,7482,8217,7839,8154,5485,5347,7667,7663,7734,5314,7994,8888,7786,7542,5426,0,8144,7893,0,0,5407,7994,7632,8365,5329,5301,5337,7924,5329,7487,5381,7616,8762,7599,5296,5661,7533,7914,8473,7877,5305,7511,5407,7734,7719,7558,7786,8888,8154,5274,5329,8196,7653,7616,5218,5301,5485,7675,7616,5342,7994,5661,8359,8052,7605,7610,8277,8217,8010,7982,8277,0,0,7616,5651,5325,5401,5407,7616,5501,7516,5476,5407,8354,7877,5491,8261,5407,5347,8010,8617,8467,5314,8527,5617,5666,5347,8827,5726,5218,7542,8048,8721,5699,7834,8052,0,7730,8365,8375,7834,5301,5651,5261,8772,8903,5285,7877,0,8052,7904,7605,5347,5347,7700,5509,8375,8329,5274,7834,8811,7834,7628,8154,7487,7763,8687,8919,5296,7675,7675,0,8062,8093,5372,5622,8484,7687,5661,7710,5314,5437,5437,8179,7924,7706,7548,5501,8816,7592,7752,8312,5476,5329,7805,7920,8509,7533,7834,8509,8334,7715,8783,8239,7952,8756,8068,8687,7528,7972,8687,8687,7994,8322,0,8245,5367,5241,7605,7990,7999,5564,7904,7888,7888,8093,5314,8190,8170,8772,8174,7592,8174,7658,7658,5224,7924,8316,7653,7605,7687,7828,7994,5391,5401,7710,5464,0,7982,8851,5347,5485,5285,7500,5426,7768,8577,7811,7924,7675,5519,5274,7482,7478,7576,5642,5391,7994,8316,5529,5437,8509,8062,7632,5367,7605,8190,5241,8312,7834,5296,8687,5578,5296,8898,0,8010,8670,7491,5296,0,5629,5309,9071,0,5237,5564,7592,7935,7734,5485,5314,8805,5509,8370,5352,7843,5261,7982,8527,7710,8164,7982,0,5320,7849,8329,7692,5291,7675,5237,7834,7653,9345,7667,8539,5661,5325,7478,0,0,7653,7834,7811,8514,8170,5442,7930,5501,7516,8653,7643,8068,5237,8402,7478,8062,8170,5564,8494,5285,8827,8527,5255,7723,8893,5237,7723,7528,5314,7834,7834,5476,5291,7628,8461,8297,7920,7786,0,8119,8227,8670,0,0,0,0,8033,7849,8005,7548,8676,5367,5296,8527,5401,8190,5426,5464,5285,5407,5358,7616,7605,7994,8641,5606,5237,8699,8699,7877,7834,7592,7592,7628,5712,8302,7994,7491,7528,7723,8110,5712,7811,7500,7999,7994,8010,5688,5342,7924,8577,7605,0,5524,7795,8222,7982,8267,5367,0,0,8411,8131,8422,5629,8783,5261,8869,8073,5464,5661,5237,7478,5237,8267,7849,7849,5237,8267,7504,8716,5261,0,5642,8329,5651,5401,7616,7478,8164,0,0,8322,7511,5237,5358,8473,8548,5329,5407,8087,5476,7957,8527,5342,5476,8316,5291,8548,8267,7982,5476,7972,5708,7994,8239,7653,7742,9071,7592,7800,0,8987,8498,8833,5407,5237,5237,5237,5476,7675,7710,5464,7877,7491,7542,8297,7834,5274,8255,9021,8919,5471,7914,5285,5501,7487,8239,7628,7516,8316,8670,5501,8010,7834,8329,5347,5712,5629,5476,5485,8514,0,0,7616,7628,5476,5529,5347,7658,5372,8179,5342,5237,5699,8827,5401,5296,7605,5661,8093,7994,5476,5661,7999,5296,0,0,5347,8613,5274,8898,8329,5309,5442,5442,5642,7849,5347,8473,8164,8987,5305,8222,8893,8015,8589,5708,5367,8582,8851,7628,7746,5329,5442,5442,7834,5237,7920,5391,5391,5688,8261,8354,5347,7582,5291,8316,8721,5688,7779,5651,5629,7779,7834,5224,8093,8498,7628,9361,8387,8994,5241,5325,5381,8467,7663,7994,7877,7516,5291,5564,7491,7904,5459,7491,5642,8443,8617,7696,8322,8519,5717,8205,8716,8322,8484,8322,7858,8282,7533,8670,5407,5213,8762,7982,7853,5333,7548,5459,7898,8255,7982,5337,7558,8762,8387,7558,8816,7763,8751,8484,8711,5480,7715,8149,8504,9377,7565,7790,7715,5391,7478,7834,7977,7930,5391,5407,7786,5337,8375,8119,7715,8539,7542,7768,7768,5209,8594,5237,8211,8946,7542,7478,7786,5237,5301,5301,7516,5325,8461,7972,8543,7768,8489,8762,8170,7977,5407,8527,7858,7990,8170,8898,5573,7478,7930,7632,5381,7538,8682,7994,5301,8387,8316,8387,5419,7853,5301,8762,8577,7920,8647,8222,5237,5442,7994,8005,7521,7768,8010,7904,8010,5688,7491,7774,8073,8345,5381,9345,8687,7538,5337,8756,8504,5325,8387,7599,7811,9345,8267,8582,8816,8539,8745,7548,5442,7930,7920,8170,8447,8119,8174,5367,9008,7982,8093,7893,8777,8447,5391,5337,8131,8354,8170,7883,8312,7786,8316,8381,7811,8727,5285,7849,7999,5325,5476,5261,8227,8164,8087,8316,8149,8762,5367,5391,5396,5241,7628,7962,7628,5358,7478,5237,5676,5301,5367,7482,7940,7843,7930,7972,7834,8302,5301,8456,5274,7757,5471,7752,5642,7904,8756,8447,8473,7482,7478,5237,5407,8048,8653,7834,8039,7883,5651,8227,5476,7972,5241,5296,8249,7774,8140,5617,7994,8732,7632,7982,8548,8987,7994,8527,8494,8329,5426,7605,7790,7994,8467,5274,8641,8387,5386,5661,7977,5730,8411,5337,5337,5712,7687,5642,7962,8073,7977,5396,8467,5448,7663,8467,5491,7628,5391,7883,7605,8201,5655,8519,8670,7800,5485,5419,7715,8015,7877,5708,5377,8427,7849,5358,5291,5358,5358,5464,7734,7571,7628,8316,5642,8239,8851,7675,5218,8631,5237,5717,5676,8762,5296,7908,5274,5661,7738,5274,8329,8381,8048,8154,8417,7696,8577,5386,8687,8087,7952,7738,5274,5651,8261,7811,5386,7632,5274,7706,5291,8190,7622,5209,5329,5329,7628,7588,7599,7482,5391,5693,7605,7500,5325,8062,8539,7768,8811,8484,7628,5496,7710,8329,7538,7576,7478,8613,7752,5381,8387,5501,5329,5413,8354,5291,8604,5291,5509,7637,5712,7883,7920,7994,7811,5274,7558,8093,5491,5476,8345,5255,7828,5564,8190,5237,8073,7839,7823,7977,8670,8312,7628,7990,8297,7864,7914,5476,8443,7977,8438,5261,7511,5358,5655,5655,5291,8048,7610,5309,5285,5407,8589,8687,5274,7576,7817,7834,5519,7924,7500,7632,7487,7605,7528,5519,8329,5377,7696,5442,5642,5367,7946,7952,5248,7500,8282,8456,5564,7849,8170,7643,5213,7864,7558,5377,7893,7478,8387,7667,7930,7930,8805,5274,8287,7653,8190,5224,5688,7768,7843,5301,5519,5564,7511,7491,5237,8093,5301,5606,5401,5612,8514,8479,8772,5291,7805,8427,5352,5629,5730,8170,8033,5377,7752,5485,5381,7828,5573,5314,8456,7558,7982,7706,8548,8653,8227,7706,5413,5509,5329,8827,8721,7834,7757,7930,7516,8196,7920,8994,8716,7774,5407,8048,7491,7723,8302,8745,7643,7628,7790,5712,7834,8190,7548,5426,7977,5381,7994,5655,7994,8893,7811,5381,8732,5407,5622,7924,8494,7768,5367,7935,8467,8795,5386,7982,7616,7628,7930,7872,7719,7511,8427,8164,5612,5642,5407,7500,5309,7839,7616,7538,7972,8548,5629,5301,7843,7952,5655,8322,8451,8604,7999,5642,7663,7853,5647,8925,5329,5622,8827,5309,5476,8402,8827,7972,5301,5367,5612,5688,8190,5708,5401,7800,8015,7538,7898,7616,5629,5329,5381,8919,5261,8833,8329,8777,8903,7516,7930,7723,5218,5464,8772,7734,5485,8010,5309,7511,5325,5437,5485,7663,5358,8179,5726,5372,8903,8402,8234,5372,5296,7811,7719,5342,8732,7628,7628,5309,5329,8888,5485,7752,8397,7653,5237,5325,8190,8821,8154,5671,5666,8647,5301,5401,5329,7800,7542,7723,5578,7616,7582,5622,7610,5261,8255,8365,8422,7616,5651,7521,5237,8721,8479,8365,7811,7795,8073,5459,8222,7982,7972,8647,8282,8261,5301,8631,9026,5419,7542,5291,7864,7565,5407,5534,7742,8073,7786,0,7588,8093,5476,5255,7839,5309,7977,8255,7710,8527,8903,8249,8057,8863,7667,8411,7576,5655,8903,5485,7616,7706,5407,5573,8539,7742,8249,5622,5352,7516,8833,7478,7920,7883,5534,7616,7834,8811,5491,7914,7478,5606,7653,8227,8119,8577,8119,7877,7877,8222,8119,7588,8365,5218,8952,5329,5564,5218,5413,8527,8381,5606,5337,7977,7914,8261,7828,5296,7823,7828,5255,8805,5396,8805,7768,5209,8249,5358,8267,8365,8467,7511,8227,7715,8267,8010,5564,8845,5606,5381,5381,7768,7994,7696,5606,7696,7528,7533,7487,7834,7834,5391,7511,8543,5655,5573,7715,7790,8604,8307,8126,5358,8245,7972,5329,7706,5329,7768,7779,8010,5396,7628,7628,5237,7667,5476,7817,7628,5459,5442,5224,8903,8287,5325,5509,9382,7924,5407,8641,5666,5325,5496,7877,7592,8641,7516,8028,5419,5442,8461,5459,5459,7920,8077,5437,5320,5320,7972,5476,8519,8131,5693,5358,5381,8239,7817,5717,8484,7667,8789,5261,5651,8293,5301,7924,8033,5337,7904,5381,5568,7972,5564,5661,5358,5476,5301,7658,8519,8527,7605,8174,7920,8811,7990,7839,5661,8873,7883,5655,8048,7972,8909,8170,5325,7893,5726,7828,7977,5274,5496,5224,9032,5241,5218,5485,7768,5248,8316,5476,8509,7994,5296,7888,7924,5301,7972,8727,8387,5712,8307,5485,7675,8316,7516,8211,7920,7675,7940,7734,5301,7972,8451,8170,7478,8302,8149,8087,7811,5274,8527,5352,8527,8721,8772,7893,7538,7516,7710,5325,7828,8345,7710,5358,7930,8438,7786,8217,7864,7994,7628,8402,8479,7605,8504,7883,5501,7867,5325,7628,5730,8527,7986,7924,7752,5647,7628,7795,5396,7811,7692,7977,7482,5476,5329,5514,5291,7710,5237,8721,5476,7752,5325,8519,8282,5666,7511,7637,8087,8293,7828,8245,8959,8772,7877,8473,7795,8140,7632,5476,8548,5442,8015,8533,7972,9045,5699,5496,7957,5329,5305,5309,7482,7616,5464,7888,8239,7610,5485,7957,8149,5274,7893,7883,5529,7990,7834,8282,5496,8519,8625,5407,7628,7994,5476,7893,7920,5666,5301,5320,7883,8461,7828,8073,8277,8959,5325,7710,7924,5274,5381,5241,7834,5301,8154,8277,5407,7828,5274,7616,7482,5726,5218,8772,7828,7687,8509,8375,8073,7877,7952,8312,8048,8919,8048,8126,8126,7867,5358,5372,5372,7790,8407,5314,5381,5464,7952,7839,8093,5524,7952,5407,5285,5352,5524,7663,7864,8052,5407,5358,5509,8498,5305,5237,7663,7592,8052,5464,8052,7817,7977,7478,8349,8577,8312,8946,5285,7972,7582,7605,5301,8647,5726,5285,7920,8811,8174,7576,7667,8149,8879,5699,5568,7786,7667,7667,7576,7687,5381,8427,8762,8438,7990,5485,7628,7930,7999,7548,9054,5218,7616,7849,5476,8329,5676,7849,8608,8427,7834,5407,7849,7628,8005,7582,5671,8479,7533,7478,5688,8119,8479,5329,8427,8249,5329,5309,5391,5407,7533,8721,8345,5329,7924,7576,8903,8010,7786,5301,7715,7558,8811,7500,8211,8211,7972,7706,7935,5606,7521,5471,7528,5237,7533,8222,5471,7478,8267,8119,9389,8267,7746,7533,8827,7622,7883,5712,7790,8795,5274,7742,8345,8174,5241,5642,7914,8154,7723,5655,5396,7930,5386,7710,8119,5712,5476,7605,5655,5655,7746,8582,8467,5459,5476,5209,5209,8783,7687,8190,7893,7982,7853,5342,7952,5651,7883,5476,7696,8391,8190,7710,8582,5676,8711,5655,7681,7924,7834,5305,7548,8447,7999,5241,7528,7667,8670,7653,7811,8539,7478,5296,5676,7864,5509,5274,5237,8653,8277,5407,5261,5314,7643,5509,8093,7823,7883,8577,9394,5485,5241,8582,8277,5342,5676,8839,5342,8762,5329,5358,7715,7643,8447,5329,5476,5381,7774,7576,7628,7643,7482,7994,8670,5325,5699,8375,5391,5676,5661,8762,5309,5699,5377,8005,5524,7872,8711,7500,8048,7752,7622,5442,5301,5651,5534,5568,8387,8033,7990,5448,5367,7723,8334,8391,7800,8387,7972,8015,8334,7757,5301,5305,8598,8827,7957,7710,7800,8647,7558,5325,8249,7834,5301,8959,8302,8255,7734,7605,8919,7914,8598,8239,7487,5301,8795,7491,7500,8316,8711,7516,8005,5274,7516,7663,5296,8427,5407,7786,8762,5726,5622,8589,5514,8179,8329,8179,7663,8249,5337,8888,7746,5337,8582,5291,8190,7746,8154,8048,8893,7610,8721,7800,5726,5573,5629,8795,7605,7790,5309,5524,7628,5448,8604,7872,5391,5476,7786,7972,8093,7914,7616,8170,7972,5285,8451,8676,7663,5285,8833,8154,8126,5391,5688,8312,5325,5209,8811,7478,5480,7692,8322,5237,8010,8582,8903,7538,8302,7883,5717,5301,5564,5209,7849,8154,7757,8879,7706,8795,5381,5501,7864,8387,7920,5578,7834,5342,5622,7935,5471,5529,7834,7667,8527,8048,8068,7687,5726,7628,7632,5480,5381,8539,8533,5655,7663,7893,7715,7605,5301,8411,8293,5255,5352,8093,8174,5333,8312,8073,8884,7914,8925,7495,5301,7610,8316,7742,5301,5524,5573,7667,5329,7605,5717,8154,7663,5386,7977,5329,7994,5564,7632,8800,5426,8119,8316,8068,5237,5476,5491,8851,5367,8255,8467,7908,8255,8322,8946,7675,5699,7768,7843,7752,7628,9059,5367,8211,5337,8539,7935,7935,7864,7511,7616,8211,5699,7834,8302,8653,7628,7994,7500,7849,5386,5407,8456,8022,7675,8946,5352,8391,8863,8571,8554,7706,8925,8745,7516,7667,8073,5693,5564,7757,7849,7687,5329,7663,5342,5699,5564,7930,7576,8217,7478,7706,8099,5509,7828,8461,8456,7706,8033,7487,7834,5459,5693,8762,5480,5381,7616,8411,5476,5358,7994,8851,5655,8676,5367,8903,7576,5386,5352,5407,8539,8772,5496,7473,8359,8272,5274,8190,5699,5699,7538,7834,8184,5476,7663,7839,7914,7487,5629,7898,5396,8631,7994,8940,5666,8451,5655,5367,7706,5401,7533,7994,8245,7675,5578,7990,7986,5396,9399,8234,5622,7877,5325,5708,5352,5329,5329,8795,8322,8033,8211,5666,7864,7706,5305,7706,8402,8443,8745,8287,7675,5325,8149,8073,5726,8833,8919,7904,7763,7516,5342,7828,5464,7533,8925,8255,8539,5301,5337,5285,7786,7786,8255,5471,5612,7628,5476,7849,7977,5647,5666,7811,5573,5237,5699,8903,5476,5419,7920,5372,8387,8751,7706,7663,8196,5407,7628,8821,8249,9406,8149,7779,5612,7763,8936,5274,8582,5666,5666,7972,5301,5301,8010,5699,8087,7616,9045,7610,7696,5301,8073,8617,5381,9067,8365,7834,5391,5391,5301,8705,5617,8354,7487,7706,7738,8800,5661,5274,5274,7972,7706,7972,5413,8217,7548,8068,7478,5391,5337,7972,7681,5564,5564,5651,8443,7972,7491,5209,7571,5726,7982,7605,7914,5342,7839,7637,5651,5329,7628,7521,7986,7681,8647,7605,7952,5367,7719,5325,8077,5655,5524,7834,5309,8033,5671,8073,5693,8479,8154,8272,7706,7710,8721,8397,8484,5329,5224,5309,8479,7823,5329,5261,5578,7986,5296,5730,5237,7482,7576,8154,5655,5564,8467,7482,5274,7924,5377,5476,5712,7487,8539,5296,5314,5655,5218,5325,5325,8005,7487,7538,7738,7500,8190,8762,7628,5237,5325,7628,7877,8772,7908,5342,5301,9054,8447,8479,5301,8261,8354,7622,8365,5666,8110,7628,7628,7710,5381,8196,7849,5647,5519,7487,5642,5647,9051,5496,8811,7719,5274,5224,8062,5688,5476,5248,5501,8052,8370,7990,7622,8940,5606,5358,7972,8631,5651,8334,7478,5274,8447,5301,5337,8443,7920,7904,8631,5305,7558,8052,8196,7924,8370,7972,7972,5320,8170,5209,8154,5325,7558,8136,9075,7663,7849,8316,5209,7972,5381,5401,8762,5476,5305,5296,5448,8772,5237,7706,7843,5564,8115,8839,5329,8589,8811,7977,5377,7558,5647,8670,8839,7786,8099,5329,7786,7571,8504,9032,8115,8126,5325,7972,7972,8653,8873,7982,7990,5224,5442,5629,7628,7588,7616,9411,5476,5651,8411,5396,8349,5358,7571,5352,7548,8548,7500,7558,8479,7752,8397,7675,7487,5325,5564,5655,8255,8479,7977,5325,5524,5524,7786,7864,5342,8010,5325,5296,7715,5261,8514,7972,5325,5329,7588,5325,7616,7752,5688,7719,5401,7500,5464,5296,5325,5342,7675,8196,5285,5314,8316,7487,7834,5342,8789,7719,5296,8261,5309,8789,8354,5325,8422,7752,5372,8154,7817,5329,8473,7663,8261,5314,7908,7628,5529,8631,8484,7723,5237,7723,8571,7628,7920,5485,5642,8732,9032,5647,5347,7867,8533,5642,8411,8222,5642,5224,8631,7605,5285,8255,8052,7478,7952,5642,7972,8519,5642,7681,5285,8519,7592,7675,7482,7982,8498,8494,7883,7692,7990,9417,7500,7588,5305,8005,8307,5329,7482,8494,8631,8387,7628,8170,7723,5642,5642,7681,8783,7628,5606,7757,8170,8239,5237,7986,8005,7675,5285,8287,8739,7610,7558,8402,7675,5372,8498,8255,7637,8307,5372,7632,7867,5301,7588,5391,9423,7542,8925,8062,7706,7542,7700,5391,5320,7994,8170,8427,7605,7487,7610,5224,5485,5485,8015,5476,7487,7667,8062,8316,8022,7592,7858,8267,5320,5485,5296,5381,5255,8676,8670,8827,7994,5647,7994,8099,5485,8795,5485,7605,7605,5333,7658,7628,5647,8811,8211,7605,7643,8762,5325,7675,7904,8099,5325,5655,7999,7962,8732,7994,8467,7982,8461,7675,8433,7920,5358,5407,5647,5647,5261,8119,5367,8539,8467,8136,7710,5647,5301,5647,8438,8222,7681,7487,5564,8387,8543,7994,8245,5209,8539,8093,8115,8293,5564,8131,8010,7977,5717,8010,5391,9427,8312,8582,5407,7723,7610,5305,8411,7977,7952,7972,8282,8329,5642,5274,5358,7723,7795,8919,5285,5309,7768,7962,8613,8048,8582,7605,8316,5426,8245,5485,8604,7908,5568,5485,5358,8635,5301,5241,7478,5501,8267,5647,8994,5301,7843,8170,8721,5407,5480,8316,5255,7828,7558,7940,5391,7920,7571,7675,8670,8805,8604,7834,5524,7908,8811,5442,7904,8461,7935,8093,7542,5564,5407,7972,8277,7986,5358,5358,5358,5717,7710,8174,7628,5407,7999,7786,7710,5241,5485,8302,7576,5671,8073,5325,7516,7940,7667,5564,7687,7786,8571,7542,5496,7667,7805,8721,8467,8467,8170,5661,8115,7867,5480,7632,7883,7675,8893,5407,7473,8411,8272,7521,8509,9435,5655,8119,5426,5391,7605,8354,8456,7904,5476,7482,7675,7500,5381,5464,8845,7663,8010,7565,5622,7858,8329,7605,8062,7867,5730,8190,7622,7542,5309,8005,5666,7957,8267,7849,9441,8201,5442,5524,7893,8893,5241,8131,8077,7542,7675,8498,7817,8334,8762,5301,5358,5314,5573,8635,7710,7605,7582,7610,5347,8345,8201,5301,5296,5358,8539,7888,7834,5352,9367,5622,5325,8647,8307,7628,7763,5622,8115,5237,8039,5476,8447,7877,5568,8140,8461,5442,5325,7952,7588,7957,7687,5329,8184,5612,8498,8345,5612,7888,8676,5647,7558,8073,7643,5358,8903,8239,7883,7828,5564,5407,8438,8005,8539,8255,8919,5464,7962,8149,8365,7675,8762,5391,7864,5642,5213,8222,5325,8811,8227,8762,5480,7834,8073,8903,5381,7990,7610,7542,5401,8073,7576,8277,7521,5347,8873,7628,8354,8052,5655,7658,5377,7616,5442,5325,5218,8509,8909,7588,5391,8010,5448,5534,7558,5314,8909,5329,5329,5407,5480,7898,8159,8277,7610,5606,7478,5347,7542,8354,7548,5726,7817,8411,8676,8857,5301,7752,7632,7616,9447,7843,7500,5651,8539,8010,8140,5329,7914,8293,5661,5666,5274,5209,8447,8879,7710,7542,8473,5358,8282,7478,8539,8015,8110,5501,5358,8402,7972,5301,8039,8322,5309,7904,5301,7715,8170,8312,5305,8140,8184,8322,7779,7588,7681,5301,7706,7588,5261,8316,5209,8604,8354,7576,8010,9345,8126,5471,5501,5471,8539,8119,7779,5274,8772,7779,8179,8845,5301,8354,8245,7491,9452,8402,8126,8015,7516,8548,5485,8039,7920,8589,8589,5419,7491,8110,8073,5730,7500,8170,7982,5407,8365,7839,5301,5347,5564,8447,7930,8387,5519,5524,5629,5688,5666,8340,5329,7834,5651,8589,5501,5372,8756,7872,5305,7516,8261,9457,8329,7779,5419,7904,8073,5391,8504,8548,5274,7495,8641,8077,5301,7990,8539,9345,7681,5347,5651,5476,8484,5237,8653,5241,8484,9021,5325,5496,7576,8329,8354,8062,7628,7849,5509,8504,7752,7752,5688,8136,5209,7853,8816,8998,5381,5320,8104,7920,7482,8302,8062,7972,5325,5501,8267,8504,7883,5309,7588,7565,5612,8249,7482,8154,8033,5309,5564,7972,7658,5209,5442,5642,7687,5666,5237,5564,5661,7914,7853,7605,5726,5476,7605,8670,8811,7924,5564,5480,5655,5491,5314,5391,7548,5274,8443,7592,8282,8170,8716,8527,8397,5501,5485,8504,7946,5661,5381,7605,5442,7795,8126,5291,8582,8272,7839,8543,5237,8272,5661,7757,5285,7768,7828,7632,8190,5224,7533,5708,9462,8316,8164,7681,7482,5666,5419,7768,7500,5329,5459,8316,7487,5419,8293,5296,5291,7972,5407,5676,7710,7746,5655,5329,5274,5688,5320,7898,8613,7675,5509,8196,8845,5564,5377,5519,8504,5564,5237,5564,8805,5342,7548,8653,5564,5485,7982,7548,7834,5309,8267,5325,8456,5391,8548,7516,7888,5301,5329,7779,5442,7768,7628,5480,7706,9032,7565,5509,7542,5274,8087,7994,7706,8272,5509,8461,7599,8716,8863,7774,8190,8402,8099,7511,7706,8427,7930,8287,8093,7746,7706,7867,8772,5241,8234,7967,5476,5209,8093,7779,5309,7893,7872,5261,7628,8548,5381,5329,7500,5426,7994,5209,8170,7521,5407,7990,5274,8821,7746,7706,8676,7548,8539,8438,5676,8467,5661,7715,7487,5688,8411,7811,8845,5325,7746,5274,5564,7994,7628,8131,7622,5661,5401,8456,8411,8267,7834,5377,7795,7972,7710,7952,7768,5209,8022,8467,8062,7500,7957,8387,7648,5285,7616,5309,5476,8863,8447,7898,7786,7533,7576,5407,8451,7768,8617,8267,5367,8952,8929,5426,8272,5564,8267,5655,5524,8613,7982,5437,8909,5274,7588,5655,8548,8422,7653,7774,5358,8527,7478,5301,5612,7482,5237,8548,5666,5622,7542,7800,7972,5329,5305,8647,5401,5651,8190,8548,5325,5261,5342,8010,8952,5413,5476,5514,8711,7576,8322,7643,8473,5237,5320,5519,5296,5564,5296,8255,8833,7571,7972,5261,8589,7516,7628,8010,8239,5485,9021,5407,7648,7877,7872,5519,5296,5396,5442,8287,8322,5485,8245,8119,5296,5237,7482,5407,8170,7811,5372,5274,5381,5285,8073,7877,5476,5386,5342,5476,8635,7500,5325,8888,7542,5347,7734,8329,8857,7706,5712,7883,5305,5666,5661,8857,8582,5337,7500,5666,5274,7877,5237,5329,7877,7616,7774,7521,5274,8397,8857,7632,5237,8479,5347,5688,5377,7487,8484,7628,8062,7576,5274,7849,8354,5325,5509,8582,5325,8504,7994,8267,7853,8104,7920,8302,8461,8136,8467,8548,8099,5501,7972,7752,7565,7533,5708,8816,5320,5274,8811,5661,5564,5401,8443,7914,5237,7972,7548,7576,8282,5564,5314,8670,5476,5485,5491,7898,8164,5381,7692,7648,8647,7946,5296,9462,8190,5419,7632,5291,5285,8316,8845,7675,5676,5519,5309,5342,8272,5325,5377,5325,7500,8093,5578,7500,7706,7893,8287,8015,8234,8119,7994,7746,8087,5413,5442,7790,7511,7768,8548,8427,5509,5329,8863,5209,7990,7521,5407,7746,5347,5377,7622,7706,7834,5296,7487,7888,8676,8411,5274,8131,8863,7811,8539,5329,8411,8438,8467,5309,5564,8613,5655,8527,7582,5524,7877,5301,5476,5407,8888,8647,7800,5666,7616,5612,8711,5514,5661,8239,5261,8833,5419,7542,5372,7811,5285,8857,8998,7538,8461,8998,5699,8762,7511,8073,5255,5730,8617,8093,7616,7616,8745,5367,7616,8617,8772,7768,7706,5534,7616,5666,8772,5329,8721,7599,5291,5291,7599,7653,5209,7632,5564,7962,8048,7786,7605,8170,7790,7786,8438,8461,7904,7795,7786,5676,5564,7516,7834,7999,8397,7795,5314,7628,7972,7542,8048,7823,5320,7487,7920,7538,7511,7904,8048,5301,8783,7994,5358,5325,8833,8316,7746,5325,5676,5651,7952,7632,7920,5325,5325,8375,7924,7763,8322,7738,5712,8062,5476,5337,5501,5651,5524,7924,7786,8548,8721,7576,8093,5476,7605,8539,5519,7542,5476,5274,8164,8658,8658,7542,7811,8397,5274,5476,5274,7817,5274,7628,8845,5381,5629,8467,5237,8010,7834,5501,5261,5401,5666,8845,8417,8322,7877,7628,7817,5476,5248,5534,5501,8062,5325,7710,8402,5564,8745,9054,5476,7986,5476,7920,8093,7511,7972,5726,5237,7628,7521,5237,8196,7706,8255,7628,8222,7675,7719,8196,5419,8539,8402,7893,8272,8302,7478,7605,5407,7605,5391,7858,8548,5509,5296,5730,7849,7849,7653,5708,8647,5476,5401,5726,8222,7516,9054,8255,7487,8010,5476,5296,8190,8479,5642,8149,5622,8898,7790,5578,7706,8131,5622,7994,9021,5459,7696,9045,8821,7696,8716,5622,5666,7696,5209,8845,8783,8052,8267,8994,8987,7710,7487,8227,7977,8170,7849,5573,8149,7599,5325,5629,7742,7957,8417,8131,8334,7599,7957,5642,7888,8005,5337,5305,8811,7696,5578,8805,7491,8245,8022,8196,7710,7710,8447,7883,8062,8845,8170,7920,8077,7883,5358,8170,8093,5241,5337,5377,8239,7610,7972,8381,5391,7696,8387,5459,7795,5237,5401,8427,8119,8577,8751,7864,8170,5255,8539,8039,7752,7500,5647,8827,5329,5237,8805,5329,8170,8005,8170,8005,8827,5291,5534,5237,7935,5401,8827,5708,8119,8022,8805,8005,8077,7972,7883,8381,7706,8504,7478,8805,7610,7628,7628,5342,7487,7588,5476,7588,7487,7999,8387,8110,5476,5333,5325,7542,5501,8340,5717,8433,7811,5464,5496,8504,7675,8461,7576,8467,8762,8461,8164,8811,7571,8494,8227,7786,7972,7653,5564,7893,5301,7681,8170,5407,7565,9032,5337,8136,8560,8800,5337,7817,7628,8354,5642,5309,8312,7542,5237,7681,7893,7710,5485,7658,7914,7548,8015,5661,7914,7994,7576,5459,7883,7893,7904,8073,5726,8329,7930,7795,7658,5209,7692,7972,8316,5476,5209,5564,8334,5655,5274,7571,8456,7977,8119,7972,8222,5337,5726,7982,5301,5476,7482,7500,7924,7610,7582,9012,5329,5485,8293,7834,8658,7473,5476,8687,5671,5717,7582,7972,7675,7914,7504,7696,7864,5426,7858,8766,5209,7628,7558,7681,7972,8277,5274,5661,8402,5661,5305,8349,5325,5396,8170,7908,7730,8205,7706,5386,5651,8789,8375,5509,8073,8461,8721,5688,5285,5485,7888,7946,8451,8577,7582,8805,5712,8104,7914,5661,7834,8397,5381,5519,8443,5480,8174,7558,7786,5564,8307,5480,5453,7628,7994,7924,7795,5688,5401,8015,8402,7972,5391,7576,7920,7864,7605,8795,5329,8005,7805,8811,5606,8028,5485,8805,7986,5476,8345,8504,7914,8287,5564,5564,8170,7893,8322,5509,5496,5342,8164,8548,5693,7478,5712,7957,5622,7972,8694,7795,7742,8903,8345,5426,5386,7628,7478,7823,8255,7548,7994,7864,8159,5647,5642,5407,7994,5285,7628,7977,8676,5358,8227,5564,5274,8447,7746,8631,8909,8422,7898,7957,5329,8631,8427,5708,7588,8863,5717,5476,8073,8239,5325,7920,7628,5524,7610,8888,8554,8227,8196,8039,7834,8663,8647,5419,5564,5629,5305,5237,5622,5329,5285,7952,5329,7972,5642,5291,5401,8316,5352,8447,7687,8239,8402,7864,7675,5325,8010,7972,5285,5261,7478,8402,8489,7521,5342,5501,7616,8772,9361,7883,7908,8789,8068,5401,5358,5218,5291,5573,5476,5676,7986,5401,5514,8005,7786,7706,5386,8888,8329,8613,8154,8205,7828,8473,7834,8739,7986,7834,5329,8154,7952,7632,5726,8205,8375,8447,8205,8033,8577,8494,8687,8205,7516,8687,5209,7478,8077,5442,5325,5688,5688,5325,8077,7972,7972,7972,7999,5459,7972,5519,7658,5655,8484,8329,7628,5496,5642,7628,5248,8201,5237,8484,7628,7628,5426,8170,8484,7528,5617,5617,7883,7994,5391,8772,5320,7538,5606,7883,7734,7734,8721,7972,7576,8732,8598,5367,7478,7576,8479,8461,5261,5476,8613,8994,5491,5241,8282,8732,8560,7565,8282,5699,7898,7548,5407,8777,8093,7605,8115,7888,8073,5274,5717,8732,7977,5329,8062,8334,5237,5730,5564,7500,8527,7994,8670,7628,5476,8805,5255,7994,7817,8249,5464,8479,5464,8087,8548,8411,7790,5459,7800,7898,5496,5476,5514,8777,8340,7957,5491,7877,5642,8316,7982,8903,8772,5329,5476,5381,5407,8255,8249,7706,5329,5329,7877,8617,8682,7779,5407,5407,7719,7834,5564,5255,7834,7834,8340,8340,5476,7811,8805,5688,7663,8716,7681,8805,5381,5352,5509,5476,8745,5358,7763,5471,7542,7628,8387,8851,7723,5647,5367,7706,8211,5209,5296,8539,8174,7521,5358,5564,8068,7994,7478,7972,7706,5261,7752,7542,8504,8998,5568,5381,5329,7500,5325,8196,7706,8467,5501,7904,7872,7528,5325,5396,8277,7565,7478,5291,8131,7972,8267,8196,5708,5573,7790,8387,7893,7622,8560,8827,7482,5337,8411,5476,5296,7994,5301,5401,7864,5274,8227,8777,8249,7588,8484,8467,8297,8093,7658,7681,8539,7622,7723,5301,5573,7990,7715,7487,8222,8131,8438,5476,7893,8170,7920,8893,7692,7605,8447,7516,5224,5261,5688,5642,7616,8543,8062,7616,7687,7582,8170,5459,8077,8811,8539,5255,5712,5564,8716,5296,5358,8184,5564,5314,7790,8010,7867,7790,7790,7904,5209,5305,5358,5476,7710,5325,5485,7610,7696,5453,5391,7817,7616,7723,5717,8391,5301,8164,7482,8682,7542,7904,8033,8539,5519,5291,8577,7478,5325,8005,8604,5248,7839,8131,8687,8211,7710,5285,7768,5564,8670,7605,5209,5419,5296,7482,5496,8048,8164,8613,5248,8211,8316,5617,7883,7605,8329,7811,7473,7768,5671,8245,5485,5426,7768,8340,5485,7696,8345,8087,7710,8504,5407,5377,5464,5255,5301,5329,8149,5296,9345,5296,7977,7648,5358,5476,8255,5296,7706,8903,5426,8164,7643,7616,7632,7616,8329,5309,7962,7482,8349,8062,7576,5642,8015,7616,7710,8307,5564,8504,7482,7849,5358,5476,8267,8539,5647,5342,5342,7558,7599,7478,8140,5237,5485,5301,5564,7605,5305,5301,9345,5325,7888,5688,8302,5612,8033,5485,8170,5712,8249,5426,5485,7935,7528,7667,7834,8805,7757,8451,5509,7653,5337,7628,7977,7599,5337,8816,8670,5274,7843,5564,7864,5305,8334,5642,7823,7715,8795,8277,5401,5391,8494,5241,5401,8154,5337,7616,7616,7994,8104,8211,5712,7839,5509,5485,8539,5726,7582,5407,7967,7786,7986,5329,8170,7994,7667,8504,8653,7849,8772,5209,8594,8039,5564,8099,8227,5442,8022,7805,5237,8514,7774,7904,5301,5647,8073,7653,8222,8196,5676,5237,7491,8539,5381,7734,8903,5309,7999,8411,8845,5396,7786,8093,7893,5309,8345,7599,5413,7576,7834,8170,5407,7516,8772,5320,5606,8479,8359,7706,8022,8402,8272,8227,7930,8227,5476,7893,8277,7706,8249,5255,7994,7883,8302,7811,7834,5476,5241,8994,8201,8964,8716,7982,8170,8484,8964,7632,5442,8539,8249,5476,5647,5337,7616,7616,7616,7675,7730,5688,7990,7521,5509,7628,5337,5564,5401,8190,8467,7823,8115,8494,8104,7675,7628,7858,7790,8345,7864,7632,9467,8154,7795,8504,7746,7571,7571,8170,8222,8110,7715,5661,5661,5726,5407,5564,5329,8052,9054,8527,8062,5237,5730,5320,5647,7605,5688,7521,7914,5717,5381,5485,5651,5464,7914,7734,5671,7582,7500,7542,7994,8073,7994,8994,7990,7982,8205,7977,8676,5642,7667,8827,5352,7994,5274,8005,8805,7643,5237,5391,7706,8604,5386,5642,8467,5218,5218,8467,8539,8653,8721,8354,5529,7877,8509,8005,5578,5419,7957,7994,5213,7867,5448,7482,5476,7542,5391,5237,7930,7843,8293,8249,7849,5699,7940,8467,5301,8170,5237,8334,5573,7811,5647,5476,8354,5655,7542,8443,8312,8451,8467,5573,7628,7908,7533,7734,7734,7888,8851,8227,5501,5301,5241,5407,5325,8893,8777,8869,8571,8052,7962,5655,8391,8653,7500,7511,5237,7786,8039,8164,5534,5671,8451,5391,5629,7632,8613,5612,5274,7930,5642,5377,5358,8548,7599,8287,5519,8604,5237,5347,7528,5325,5301,7864,7872,7628,5407,7616,5305,7757,7681,7790,8316,7605,5352,8179,5401,8827,5237,5255,5688,5309,8104,5459,7994,8422,7723,7628,5476,5325,8239,5358,5329,7588,8201,5442,7632,8190,7994,5717,8093,5325,8647,8234,7653,8539,7588,5329,5617,8987,7746,7763,8329,7800,8073,8539,8451,5578,9367,8190,7994,7663,5708,5651,7957,8140,7548,7774,8159,5419,5651,8827,5453,5296,7667,7482,5407,7834,5320,7482,8005,8033,7982,8302,7786,8170,8519,8205,5661,8322,8307,5401,7516,5568,7663,5358,5301,8467,5325,8329,5218,8010,8249,7742,7877,7828,8329,5285,7872,7872,5464,7628,8255,8391,5396,5407,8827,8267,8222,5337,5726,5329,7648,5296,7864,8909,8438,5291,8397,8316,8613,5501,8005,8598,7872,8387,7500,5564,5291,8903,7478,8316,7487,7675,5237,8987,7994,8641,7706,7542,8267,7962,8093,7628,5367,5218,5325,8062,7487,7706,5666,5255,8267,7628,5237,8052,8073,8052,7663,7994,5296,7994,5325,7839,7521,8375,7763,5476,8873,5419,8005,5296,8322,5401,7990,8467,8245,5377,5629,5661,5358,8427,7962,5401,8010,5693,5381,7786,5661,8149,5407,8898,8062,7730,7528,8329,5642,8239,5448,5320,8329,8217,5274,7994,8766,5442,8349,8613,8647,7658,7706,8772,5485,5655,7696,8447,7616,5524,8827,7675,7706,8048,5407,7500,8005,5381,5708,7757,5476,5347,5309,8777,7622,7723,7867,5274,7516,7742,7795,7588,5329,5476,7500,5491,5301,5476,7986,5666,7706,8539,7972,7930,8909,5642,5305,7495,8647,8010,5448,8772,8422,7723,5337,7994,7511,5296,7558,5524,7877,8249,5301,5320,5573,7663,7516,5372,8170,8033,8277,8154,5480,7478,7872,5629,7706,7946,5329,7588,5717,8211,8267,8751,7628,5320,8164,5237,7558,5305,5381,7588,5407,8322,8077,8073,8261,5476,5274,7628,8170,9353,5401,7482,8322,7616,7610,8316,7994,8354,8093,5726,5717,8010,5651,7628,8005,5347,5325,8903,8427,5666,5666,5396,8527,7972,7994,7982,8427,8359,5301,5325,5651,7982,8359,7632,5325,8411,7828,5407,8987,5476,5666,7768,7675,7893,7834,8514,5432,7982,7982,5329,8293,5329,7487,7675,7542,5573,7982,5437,5480,5564,7500,7500,5329,7692,7628,8052,5496,8170,8039,8222,7834,8427,7542,8005,8282,5237,5320,5248,5688,7757,5325,5688,7558,5391,7542,8539,5578,7628,5564,8811,5237,8062,5296,8249,5717,5342,5367,7637,5391,5476,7538,8411,8783,7864,8994,7710,8354,5291,7752,7872,7898,5358,8354,5509,7667,7706,5320,7482,7706,5606,8467,5291,5296,8196,7706,8174,8387,7839,7715,8641,5568,5501,8560,7817,5372,5564,5224,5381,5564,7710,8307,8647,5274,5642,5655,5688,5337,7687,7849,7710,5337,7849,7653,8170,5301,5564,8297,7687,8131,7920,8073,5274,7605,7757,5432,5224,7786,8297,5726,5617,8964,8093,7786,7883,7786,7616,5301,7558,5726,8711,8170,7839,5296,5480,8211,5564,7924,8631,8577,7605,5476,5274,8144,5285,8170,8144,5255,5325,5201,5218,5476,8381,5305,7723,7867,7952,7817,7972,7800,5241,7972,7977,7648,7487,7706,5426,8577,5688,8670,5218,7977,8839,8751,5635,7864,8136,5320,8190,8527,7914,5655,5564,5274,5578,7478,5564,5301,7843,5358,5573,8467,7582,8613,7883,7834,8805,5329,8839,8631,7795,7972,8022,7893,8170,5651,9361,7706,5661,7667,7687,8249,8322,7487,5237,8099,5301,5459,7681,8272,8451,8811,7653,7628,5655,7893,7920,7994,5471,5476,8467,8170,7768,7542,5476,8447,8745,7994,5509,8039,5509,7582,7864,8282,8539,8484,8316,7924,7616,8039,7877,8057,7986,7990,5325,7849,8805,8349,7500,7746,7628,5381,5391,8190,5464,7834,8411,8110,7994,5564,8467,5712,7994,5564,8821,5661,5337,5688,7790,5367,7616,5391,5248,5629,7605,8548,5377,5564,8467,5296,7663,5676,8762,7982,5688,5459,5301,7795,8267,8170,7823,7957,8658,8164,7616,5274,7558,7843,8334,7487,5237,8461,5237,8467,5534,8057,5448,8015,8164,5325,5325,7478,5688,7653,8196,5476,8093,5396,5629,5476,5241,8716,7849,7982,5296,7538,8391,9471,7774,7616,8909,7628,5381,5377,8571,7800,7834,8987,5578,8340,7710,8539,5708,5329,8087,7972,5629,8033,8647,5726,7920,7628,5407,7632,7957,8196,7548,7482,5419,8952,5655,5285,5509,7972,5578,7538,7538,8039,7511,7790,5237,8827,7967,8190,7622,5377,8015,8211,7521,5305,5655,5476,5688,8594,7972,7924,5352,5285,5726,5432,8239,7533,7952,5237,5325,5622,5464,7893,8149,5291,8509,5296,7482,7877,5285,5261,7648,5248,8851,5248,8329,7487,7542,7558,8903,7687,5661,5261,5476,5485,5347,7877,7706,7839,7521,5617,7632,8845,7616,7637,8196,8115,8190,5401,7605,8115,5661,8211,5661,7706,8164,5413,8249,8052,8821,8154,5314,7972,8888,5661,7763,8010,5666,5524,7986,5651,5301,8104,5301,5688,8077,5329,8762,7675,7605,7610,5301,7610,8245,7491,5413,8987,7786,5676,7616,7542,5237,8179,5726,7768,8762,5237,7800,5291,5381,7605,7687,5661,5464,8816,8456,8293,7528,7548,5642,7696,5448,7763,5237,5426,7834,8795,5248,8519,8039,8190,5320,5274,8316,7610,5381,8533,8387,7982,7994,8721,5248,7994,7957,8548,7946,5726,5476,5476,8201,5396,7628,8473,5496,5476,5261,8033,5564,8234,5501,8952,5381,8136,5568,7883,8915,7548,7548,5305,7706,5578,5274,7972,8504,8196,5309,7667,8467,5712,7637,5708,5453,8959,8959,7977,5480,5274,5491,8222,8174,5642,8329,7558,8670,8772,8170,7946,5320,5519,5655,5476,8994,8073,8539,8631,5296,8234,7972,5325,8297,7533,7883,7790,8073,7935,5568,5274,8239,5726,7930,8234,5237,8451,5314,5622,5564,5476,8196,7692,8504,5419,7977,8879,5305,5381,7738,7632,8613,7605,7952,7482,7774,8577,8144,5329,9345,8903,8115,7734,8800,5453,5320,7977,8653,7558,5301,5676,5381,8057,7908,5476,7628,5655,7511,5688,8658,5325,5476,8201,9345,8839,8302,5325,5301,5401,5401,7692,5407,8936,7877,5358,8613,7491,8467,8093,7982,8227,5442,7888,8473,7779,8227,8456,5305,7893,8467,7632,8039,7834,8762,5237,7972,8170,7972,7533,7972,7795,7730,5320,5377,5377,5564,7667,5661,8411,5459,8653,7616,5519,7610,7849,8467,8222,8222,5564,7628,8641,7834,5296,7930,5573,5329,5237,5617,8267,8245,5651,8245,8359,8164,8772,8613,8716,7592,8631,8282,8068,7710,8940,5453,7616,5381,7542,8359,7692,5401,8222,5301,5578,5391,5329,5671,5476,5661,5274,7994,8473,5241,8073,5419,9367,8222,7521,7786,7734,8604,7675,5407,5573,5325,7883,8925,5285,8073,7521,7528,5432,7648,7628,7710,5708,7888,7952,8340,5342,8789,7877,5401,5358,5309,8903,5320,8772,5358,5347,7834,8613,7706,8307,5320,8631,5391,8126,5661,8073,8033,5320,7610,5237,5274,5347,8539,5491,5564,5320,5391,5407,7533,5476,5285,8387,5237,5314,5642,8527,8772,5301,8473,5564,7904,7849,7706,5325,5407,8888,5291,8533,7952,7893,8282,8282,5651,7628,7763,5391,7487,8427,8239,5291,5651,7681,5401,5642,7972,5285,5237,5261,7994,5237,7710,7628,5347,8322,8762,8196,5314,5325,8391,7616,8131,7528,5314,5309,8721,7516,5314,7632,5726,7605,7994,5291,8560,7628,5347,7914,8539,8721,5407,8721,8427,7952,5651,5391,8427,5347,5291,5325,5237,8539,5485,7628,8131,5309,5726,5285,8484,5309,8140,5291,7706,7972,5325,7786,7883,8397,8539,8987,5396,7768,5717,7710,5381,7952,7817,8062,5381,7643,5688,5661,7588,8447,5325,7500,7972,7687,5358,5426,7982,8987,7877,7972,5529,5573,5391,5476,5329,5291,8068,7834,5237,7834,7930,5693,5564,5688,5381,8653,8062,5325,7565,8329,7952,8062,8190,8272,7500,5381,5476,8504,8329,5237,5261,7706,5480,5325,8772,7914,7864,7924,8387,5642,8022,7616,8190,7528,5291,8093,8884,8052,5337,7571,5305,7565,7500,8783,5617,8548,5391,8068,7500,7893,5476,7588,8447,7849,7752,7582,7500,5655,7605,5241,7610,5209,5209,8427,7786,5391,5464,5459,8093,8539,7500,5301,5309,7533,5309,8811,5476,8170,5708,7687,5480,5476,7675,7999,5255,5358,7504,5726,7904,5509,8443,8653,7576,7839,7558,5218,7500,8329,5476,7478,7653,5391,8052,5285,5391,5606,5496,8131,7952,7768,5485,5381,5396,5671,7521,7994,7914,5442,8316,5655,8427,8391,7864,5296,7516,7779,7795,7599,5413,7681,7710,5666,7605,8582,7473,8267,7628,8909,5325,7972,8504,7616,7843,7994,8582,7864,5391,7823,7883,8073,8345,5337,7946,8255,5612,5309,8391,7628,8509,7558,5381,8227,7478,7478,5524,8022,7734,5296,7952,8851,5655,8851,8762,7715,5305,8839,8447,7990,5296,8222,7653,5342,8732,8190,7893,7710,7738,5476,8136,7482,7757,7920,5325,7877,8033,9032,5325,7706,8772,8863,7994,7893,8745,5407,5241,5476,7967,5426,5314,7779,5407,7605,8052,8099,7914,8473,8359,7858,7628,5459,7500,7858,5391,8190,5391,7643,7834,7864,5329,8539,5661,5564,7700,7632,5407,8635,7628,5209,8732,5237,5296,7482,8345,5274,7823,8033,8527,7605,8340,7834,7946,8190,8554,7495,7706,8711,7491,7994,5296,8909,7967,5261,5407,8073,5337,5464,8154,5717,7616,5651,7675,5651,5237,8322,8033,8293,5377,7588,7898,7710,7542,5391,5419,8467,5661,5529,7706,5642,5642,7599,7628,8473,8827,8745,5301,5305,8919,8987,5578,9476,7687,8115,5381,5401,7957,8504,7742,8312,5237,5314,5708,5301,5309,5578,5476,7982,5419,7800,7839,7653,5396,7877,7768,7542,7719,5237,5617,8217,5241,5386,5347,7864,5209,5291,5325,9021,5285,8073,5296,5622,7715,8982,7687,5391,8255,8772,8982,7734,8119,7706,5372,8772,7681,9481,7542,9487,5285,5485,5401,8417,7538,7663,8427,7920,5296,7893,5476,5476,5261,8261,5642,7542,5573,8658,7982,7576,8052,7491,7616,7605,7628,5407,5296,8762,5325,8190,8365,5666,7914,7994,7920,5301,7599,8411,5578,7834,5476,5442,7823,5442,7982,7877,8261,7482,5347,8033,7516,5578,7811,8365,7999,8903,7972,7834,8762,7834,5325,5693,5564,8504,5325,5381,8062,8272,8261,5261,7706,7823,8329,5476,8329,5325,5476,5419,5296,7742,8170,8387,8884,7500,7864,8099,5476,8022,8527,5617,7565,5291,5241,7786,7849,5255,8443,5391,7849,7687,7588,5655,8539,7967,7839,5726,7999,8711,5476,5476,8582,5666,5391,7795,5391,5381,5296,7558,7605,7914,5396,7883,8131,7768,7952,8329,5476,5241,5671,7521,5496,7643,7500,5708,5381,7478,7628,8255,7478,5524,5337,5296,8582,8851,8022,7663,8745,7605,7967,7495,8762,5314,7893,7990,7994,7628,8839,7914,7757,7877,7779,9032,5342,8772,5381,5476,8711,8033,5459,5464,5407,5661,8293,7491,7994,7864,7628,8190,7877,5209,8345,7834,8539,5651,7858,8795,5305,8903,7588,5661,5642,7898,5391,7542,5651,5309,7800,5578,5407,9476,8772,8982,8255,5347,7734,5291,7834,7542,8427,8033,7786,7849,8062,5367,5291,8062,7653,8062,5237,5381,8154,8239,7653,8687,7610,8307,5419,7706,5391,5301,8919,5509,5358,5320,5274,8527,8940,8940,7811,7834,5391,5573,5296,8863,7628,8068,9494,5407,7904,5730,7864,8387,7521,7811,8903,8800,5671,5726,7908,5237,8888,8052,7628,7605,7977,7930,5237,8073,7752,7811,8888,5564,8548,5501,5476,7972,8222,7637,7653,7920,8136,7667,8811,8136,5337,7757,8805,7487,5519,7811,7610,5352,7849,5305,8190,7790,5301,8447,5358,7628,7977,8641,7790,7920,5237,7930,8170,5305,5573,8772,7495,5291,7930,5655,7675,5564,7774,7817,8594,5476,8560,8249,8751,8154,8010,8772,8721,8115,7972,7696,7605,5391,8653,5209,7696,7952,7858,8411,8349,5491,8119,8582,7893,8964,5476,7710,7790,7849,5358,5329,8504,5296,5671,9032,5209,7977,7746,9353,7706,5671,8205,5407,7616,7893,5655,5617,8888,7643,5688,7558,8732,5325,7576,8277,8365,7487,8033,8987,5606,8456,8845,5274,8349,5407,5342,5564,8772,8827,8467,7972,7628,8015,8762,5309,8504,8598,8267,7706,5437,8277,8527,8277,7967,8119,7663,5622,5564,8131,8159,8115,8077,7994,8039,8033,8052,7972,8863,8473,8267,8833,8772,8365,7834,8322,5261,8863,7616,5309,7930,5285,8205,5358,8762,5320,7834,5573,8190,9501,8811,5407,7930,5655,5564,7542,7817,7774,8115,5274,8126,8015,8154,7972,8010,8772,8751,8411,8131,7605,8721,7696,5407,5358,5391,8964,7952,8582,5491,7790,7893,7849,5476,9353,7972,7977,5296,5209,8504,5329,8205,5209,5671,5309,5688,5655,8277,5617,5564,8762,5325,5320,8987,7487,8005,7487,8845,8456,8772,8277,5407,8863,5564,5622,8159,8115,7834,8365,8805,8833,5261,7616,8811,7710,8190,5617,7823,7504,5237,7491,7849,7558,5578,5367,5629,7967,7967,5301,8484,5564,7999,8811,7706,5261,7723,8110,5274,7957,5291,9509,9516,7839,8033,5671,8772,8170,8783,7628,8033,5329,7605,8354,5726,5381,7710,5573,7795,5529,7839,7681,7888,8577,8811,5612,5407,8170,7999,5642,7482,8170,8302,8467,5629,8287,8402,7977,5726,5381,5688,8811,5377,5726,7542,5464,8255,8255,5642,5372,5377,7972,8365,8365,8443,8994,8222,7692,7990,5471,5291,5564,7738,5407,5237,7972,7730,8898,8994,7706,8354,8322,5480,7658,5476,8184,5325,8851,7904,5459,7692,5480,7977,7839,8174,7500,7605,5524,5459,7972,5564,7533,8170,5261,8073,8447,8170,5301,8682,5476,5325,5491,5274,5285,5296,7605,5622,7473,5635,7952,7723,8005,8140,7710,5329,5413,7972,7977,7681,7774,5407,8005,8255,5358,5274,7888,5325,8170,5325,5320,5647,5573,9345,8077,7675,8073,7710,8658,8711,5320,7843,5218,8316,5442,8255,5612,7738,5407,7706,8494,7994,5314,5381,5325,8077,8170,8170,8099,5329,8873,8359,8170,7710,5509,7768,5377,8073,8443,7867,5325,8140,5237,7774,8548,5352,7972,5241,7828,7972,7628,7888,7696,8104,5218,7548,8539,8827,8033,8663,5407,8005,5629,8170,5564,8443,5459,7858,8110,8048,5377,5496,7478,5407,8479,8451,5237,7738,8322,8427,7828,8427,5325,8077,7800,7511,8427,5358,5248,5329,8170,8140,5629,5491,5612,5237,8005,5325,7972,8322,8539,7800,8548,5401,5413,5372,5501,5432,8422,7946,8919,8554,7511,8010,5573,5573,5471,8073,8140,8589,5291,8307,7516,5329,8255,8255,7734,7738,5320,8498,8772,5372,7738,7482,5358,7605,8170,5573,8255,5471,5442,5325,7478,8354,8184,7516,8577,7972,5301,7972,7877,5573,8676,7994,7610,7516,5274,9361,8857,5347,5661,8261,5661,8077,5407,5726,5381,8498,8857,8022,8005,5291,7516,7914,7817,7482,8272,5726,5377,7888,7888,8888,8154,5325,7542,5651,8381,5476,5629,5407,7482,7834,8745,7834,5726,5651,8149,7952,8104,5325,5377,8312,8919,7628,7935,8387,7952,7667,7994,8354,5666,8539,8190,8010,8504,8498,5329,5320,8068,7742,5329,7864,8136,7706,7790,8821,8504,8010,5622,8062,9525,5726,5464,8539,8093,5491,5301,5655,8010,5381,8608,5352,8359,7786,7628,7610,7605,8164,7864,7904,7576,7972,5642,7990,5476,8745,5391,5255,7696,7723,8170,5285,5347,5329,8509,7768,7576,5437,7972,5617,7779,7628,8670,7632,7811,8267,5578,5509,5564,8745,7576,8447,8093,5578,8427,7779,5209,5296,7599,8287,8039,7839,7811,5352,7542,8234,8184,7742,7500,7930,8099,7710,7779,5476,8110,5325,7500,5367,8732,8694,8658,5717,5337,8062,5320,5564,7706,5564,7710,8196,8427,7628,7752,8795,7786,5655,5367,8467,5329,8484,8783,5622,7675,7643,5717,7935,5285,8427,7734,8149,5501,8010,7516,5717,7719,8795,7811,5476,5325,8217,7696,7628,5274,8062,5666,8456,5301,5301,5329,8201,7935,8387,7952,8190,8082,8504,5622,7864,8099,8821,8062,7692,7990,5329,7576,7972,5476,7982,5655,5301,5642,8745,5391,5437,7972,5285,5642,7768,5329,5285,5578,5564,7779,8184,7930,5296,8234,7599,5291,8287,5209,8694,5564,5325,8267,5320,8456,8467,5367,8427,5329,7935,7516,7528,7786,5655,7839,7920,5442,8888,5377,7986,8987,7482,8282,8411,8411,8411,5476,8411,8762,7839,8411,8411,8033,8613,8789,5320,5655,8676,8287,7482,8745,8411,8411,5699,7990,8239,8245,7877,5296,7542,7628,5699,7706,8329,5476,8039,8349,7706,7675,7599,8721,9032,7616,5712,8267,5309,8560,8387,8010,7478,8245,7864,7811,5578,7877,5309,8467,7994,5401,7710,7935,8447,7681,7972,5476,7592,7977,8164,5491,7696,8539,8312,5688,5459,8115,8272,5320,8170,7888,5485,5296,8316,5337,5651,5413,5325,8716,5476,5381,8863,8577,8329,8422,8022,7795,8272,8334,5730,8447,7632,8427,8190,8329,8201,5337,7675,5688,7853,7605,5688,8539,8539,5647,5485,5377,8307,8479,5578,8316,8687,7478,5391,7558,7588,5372,8658,8527,5401,5612,5237,5301,7924,8345,8387,7990,8745,8387,7811,5491,5329,8745,7576,5309,8267,8287,8005,7994,5224,5476,8245,9531,7632,5358,7495,7628,8903,8805,5471,5342,8888,8732,7710,8498,7628,7757,5655,5337,8267,8732,8411,8039,7864,5367,5464,5688,8479,8245,7994,5213,5476,8131,5642,7478,7511,8329,7738,8539,5419,8467,7588,7738,7542,5629,5642,7710,8015,5708,5391,7599,7710,5485,5372,5578,5471,8539,7768,7710,8451,7516,9021,7648,7542,8190,7628,5476,7478,8239,8170,5391,5358,5642,8349,5661,5342,7811,8427,8909,7696,7622,8411,8411,7986,5471,8467,8154,7632,5301,5476,5712,8919,7706,5218,5248,7653,7628,8211,7972,7723,7533,8582,5564,8756,8888,7565,5426,8136,7742,7914,7898,8467,8879,7994,5661,5305,7478,5320,5337,7653,7658,5688,5358,8456,5224,8670,7930,8048,8539,7967,7491,7516,7999,7952,7605,5391,7487,7628,7592,5717,5237,7972,5476,8329,5730,7768,5285,5237,5651,8543,7843,7977,7908,5241,7786,8267,7834,7558,8307,7710,5329,5564,7478,5564,5524,8052,8381,7977,8845,7667,7616,8073,5237,7930,5301,8267,8201,7675,7706,7920,8745,8039,7533,8772,7967,5255,8277,8447,8033,5337,5529,7521,7883,8170,5218,8255,5476,8762,7834,5712,8893,7930,7864,7628,5676,7628,8267,5730,8005,7521,5407,5448,8577,8391,5237,8613,5573,5209,7967,5612,7478,7482,8267,8052,5218,5358,5305,8783,5564,8239,7482,5622,7982,8255,7834,8015,8010,5309,7500,8919,5358,7528,5358,8554,7994,5241,7790,5261,7516,8312,7883,8249,7491,8340,7908,7972,8249,7967,8582,5401,8631,5280,5337,7706,7834,7877,5476,8093,7811,5237,8527,8365,5301,5337,5693,5688,5314,8456,7576,5476,8560,7643,8484,7834,8397,7790,7883,7500,7853,5578,5501,7653,7898,8267,7658,5606,5280,5480,7571,7653,7786,7588,7734,7616,5437,8391,7632,7653,5442,8845,8316,7972,5274,5419,7558,7482,5647,7675,5712,7692,7940,5666,7734,5534,5712,7834,5237,8929,7994,5329,5347,7834,8272,8456,8427,8104,7675,8397,8287,8140,8653,5377,5274,8762,7982,7500,5426,8772,8433,5377,5301,8312,7487,5448,5651,7768,5442,8451,8267,5218,8608,7687,8370,5629,5419,8647,8489,7628,5301,7719,8149,5476,5419,7734,5285,5237,8772,8312,8789,5476,5301,8170,5480,5476,8929,8613,8329,8811,7834,8093,5651,5314,8929,7528,5285,5237,5209,7834,8411,7774,5391,5337,5391,5391,5301,7548,7491,7779,5301,5309,7706,5688,5476,7757,8479,7999,5693,8312,5688,7849,5564,5655,8022,7904,7920,5564,8716,7757,5261,5329,5568,8732,5209,8010,7628,5396,7653,7817,7706,7628,8184,8539,8647,5237,5476,7637,5391,5647,5237,8293,7542,5606,5407,7920,7482,7482,7904,7565,8033,5426,7853,8354,7834,5564,5325,8136,7528,9542,5291,5471,8131,7482,7542,8282,5325,9550,8783,8816,5501,8267,8110,8196,7920,5407,8062,7628,7667,8805,5726,8762,8527,5671,8915,8154,7658,8756,7994,7752,8613,5661,7888,5209,8447,8461,5391,8438,7786,7864,8093,7790,5476,7786,7904,7920,7628,5426,5712,8201,5337,7828,7977,5391,7864,5213,8964,8170,8427,7706,5617,8190,7999,8222,7849,5358,8119,5573,5391,8073,7605,7710,8925,5407,8316,7542,7521,5476,7511,7542,5391,5381,8354,5464,7924,8282,8073,5381,8131,5320,5241,7715,5726,7548,9556,8484,7715,7883,7482,5476,7487,8582,9012,7823,5407,5337,5426,7805,5285,7710,7696,5717,5224,5391,8711,7977,7482,7616,7576,5248,8987,8316,5305,5666,5325,5671,7500,8504,9021,7972,7723,5629,8467,5274,8467,7768,7920,8964,7605,5730,8391,7473,5642,5274,5274,5519,5514,8391,8149,5325,7920,8613,5651,5476,7482,8687,8345,9562,5337,7719,8898,7542,8375,7588,5476,5655,7653,7872,5391,8946,5688,7675,7588,5519,8447,5377,8451,8613,7994,7533,8307,5329,5712,7843,5509,7528,8267,7930,7478,7588,7628,7877,8805,8073,7653,7977,8062,8498,8527,7877,8272,7972,5342,8190,7478,7478,8234,7482,5524,5329,5426,7542,7914,7653,7616,8104,5573,5347,8170,5274,8964,8211,7734,5448,7982,7872,7558,5524,5301,8594,7914,7605,8539,5651,5708,8397,8170,8227,8467,5407,8461,5218,8272,8227,8099,7473,8093,5325,5329,5285,8322,5237,5676,8438,5329,5352,8345,7920,5274,8387,8863,5671,9051,8571,7738,8653,8762,8827,7805,5209,5241,8543,7491,8827,8658,8772,5693,7706,7542,5314,7706,7628,5309,8721,7790,8227,5606,5237,5401,8272,7786,8451,5352,5320,5329,8249,5329,5396,8473,8144,8277,8800,8519,8239,8716,5377,8365,5419,8827,7616,8062,8329,7779,8721,8514,8509,7616,8539,5401,8039,5274,8762,5647,7994,8827,5564,8110,5296,7864,8110,8033,5377,7687,8062,7924,8676,5367,5237,8946,5237,5464,7994,5464,7999,5496,7867,8239,8062,7548,5476,8711,5509,8255,5730,7853,8329,7811,7491,8670,5647,7605,5407,8022,8005,5261,8190,7888,5381,5655,7734,7548,7994,5519,7719,5712,5213,7558,5396,5285,8149,8287,7738,7898,7946,5401,5612,5237,7511,8267,7491,8417,5564,8164,5629,5612,8322,5717,8851,7805,8073,5347,7730,5358,8940,8438,5209,7478,7616,5464,7681,7511,8509,7962,8302,5448,7800,8052,5655,5655,7632,5476,8467,5629,8077,5237,7977,7877,7538,7883,7883,5476,5342,7920,8170,7738,8196,5329,5314,7742,8443,8641,5309,5501,8190,5401,5342,5708,5573,5305,7632,5358,7994,7511,8539,8255,5401,5476,5329,5708,8946,8322,8402,7706,8249,5629,7800,5218,7877,8052,5485,8473,5320,7957,5519,8827,7610,8365,8154,5485,5347,5367,7663,8756,5314,7994,8888,7786,7542,5426,5396,5325,7734,5296,8322,8312,7516,5612,5519,8589,7558,8005,7924,5329,5386,5381,7616,5342,8010,5296,7768,7924,5629,7521,5291,5285,9021,8239,5381,8149,8217,8772,8365,5325,5274,7548,8919,8387,7616,5218,5301,5485,7675,7734,8964,7994,5661,8359,8052,7605,7610,7893,5342,5329,5676,5296,7628,5401,7616,8255,7542,5314,5666,7616,8964,7811,7491,7888,8039,8903,8170,5501,8245,7930,5476,7663,8548,5314,8527,7605,7946,8863,5325,8077,7738,8154,8745,8888,5699,7834,8381,7500,5407,5407,8375,7834,7972,5651,5301,8772,8903,8857,8582,8987,5352,7986,7605,8851,5347,5329,8473,8222,8329,5274,7834,7610,7864,5329,7528,7487,5347,8073,5407,5642,7877,7521,5407,5237,8721,8777,8077,5666,7687,5661,7710,7632,8857,8375,5372,8077,5291,5629,5320,8816,5309,5688,5476,5655,5693,7999,8312,7757,5647,7542,8732,5261,7715,8010,7757,8514,8293,8068,7637,7528,5671,8201,7658,8170,5471,8783,7548,8136,7930,8461,8282,7542,5642,8196,7667,7478,8131,8756,7565,8154,7853,8816,7904,7628,7542,5241,7542,7786,8073,7864,8312,8201,7904,8354,8857,7715,7883,7977,7828,5337,8964,8073,8093,8851,7542,7511,7768,5391,8190,5617,5358,8447,7920,7888,7478,8898,8149,7696,7994,8316,5337,7558,7977,7491,5386,5367,8903,8316,5329,7757,7482,8509,7687,7605,5274,8805,9003,8010,5396,7768,8302,7977,5642,5717,8391,5241,7719,5285,5476,7877,5248,5629,8345,7482,8613,7605,8149,7734,5261,8277,8594,5401,8946,7675,5612,5301,5241,5573,8039,5524,8964,5519,8451,5329,8527,7528,7511,8509,5407,7872,5377,7843,7542,8170,9051,8322,7888,5606,8451,7790,8543,8653,8407,7473,8653,8402,5237,7786,7632,7805,8863,5309,8827,5274,5676,8154,8057,8772,5693,5407,8170,8249,5209,7867,5291,8509,8322,8022,7864,5496,8255,7548,8676,5448,5464,7811,7663,8762,5712,7957,8005,7893,5712,5305,5655,8527,5717,8077,5426,5676,8077,8136,7511,8052,7898,5476,7977,8888,5237,8473,5629,7800,7632,7592,5218,5485,8539,8443,8190,7491,8772,5291,8239,7734,5329,8217,5347,7734,8498,8322,8919,8154,5666,7605,5314,5401,5476,5496,8473,8267,5367,7521,8227,8411,8131,5708,5459,5726,8239,7555,8227,8514,5661,8969,5261,5237,8267,7834,5358,7653,5358,7504,8509,8756,5413,8062,8489,8589,5274,5274,5407,8438,9568,0,8322,7687,5386,5358,5352,5367,5329,8411,5358,8489,7849,8721,5717,5717,8048,5291,8062,7952,8438,5476,8653,7843,5329,8805,7653,8170,8354,8354,7800,5726,7516,8227,8635,5407,7834,8438,7834,7864,7643,7628,7752,7521,7491,7542,5726,7834,7752,8282,8345,7994,7482,8144,5699,5347,8762,8239,7628,5606,8077,7738,8509,7849,5699,8519,5347,8307,5629,8005,5629,8721,8647,8795,5237,5296,7877,7920,8903,7811,7738,8179,5342,8514,8969,5261,7834,5296,5358,7752,8519,8489,7994,5274,8062,5407,8438,8756,8514,7687,5386,7952,7752,7738,8438,5329,5642,8048,5717,7849,8653,8484,8354,7843,8893,7628,7834,8227,5237,7752,7643,7834,7864,7877,5347,5726,7834,8144,5699,5717,5606,7738,5699,8795,5347,5564,5564,5333,8217,7528,7542,7478,5476,5480,7605,7924,8093,7864,7628,8282,9577,8946,8267,8821,7972,7920,5485,7565,7924,7482,5655,5381,7491,7904,5726,8539,8443,7571,7828,7696,9051,5480,5717,8312,5224,5564,5325,5329,7610,8033,5426,7888,7957,5305,5419,5320,7888,5274,7696,5459,7628,5671,5261,5337,8255,5426,8387,7558,5358,5367,5261,8119,7972,5480,8307,8467,5655,5573,5274,7790,8077,8267,5274,8456,8456,7967,5476,5407,8099,7482,8170,7805,8033,7920,8093,8272,5274,5329,8594,5274,8211,7605,7924,8705,7786,5248,5301,5301,7628,5377,7628,7610,7994,7994,8077,8217,5342,7977,5464,5381,5342,7482,5671,7834,5296,5717,8267,7632,7994,7663,5237,7994,5325,8015,8316,5708,5419,7482,5301,7648,7516,8422,8647,8222,5342,5342,7628,5274,5442,8447,8010,5325,8010,5237,7482,7864,5296,7610,5381,5301,5301,5301,8863,8756,7972,9032,7667,7823,5476,5699,7834,7542,8387,7504,8387,5325,7786,7811,7972,8131,7667,7839,5218,8170,5573,7982,8925,7632,7628,5442,7757,5342,7653,8154,8617,8721,5573,8925,5218,5325,5237,5442,7511,5301,7823,5325,8925,5261,7738,7628,7628,5529,8136,8762,5367,5391,7752,5501,8334,8267,5301,5358,8608,8093,5666,8345,8222,8249,7828,7843,5426,7478,7999,7500,5325,7715,5274,7675,7687,8805,5655,8456,5712,8093,7706,7482,8154,8302,7893,5534,8427,7482,8447,8548,5651,7516,5476,7582,7616,8387,8249,8617,8093,5367,8062,7482,7632,8349,8267,7511,8249,8888,7478,7994,8154,7482,5237,5476,7663,7883,8919,5274,5386,5329,7977,5730,5325,5337,5337,7904,7920,7994,7962,8073,5325,8349,8249,5448,7538,5301,8154,7663,5391,5491,7610,8093,7663,8519,5301,7800,5485,5419,7715,8015,7877,8745,5485,8052,8402,5485,5291,8745,5485,5485,7828,5485,7628,7893,5642,8898,5651,7746,9591,8174,7491,5606,5676,7491,5717,5693,5496,5407,8504,8539,8329,7478,8504,8196,5309,5529,8577,7904,7482,7730,8136,5305,8443,8245,8282,5476,5642,8272,5480,7977,8062,8174,7924,5209,5329,7977,7628,5325,5661,5688,8345,8727,7817,7904,8245,8255,8539,5509,8811,8484,7681,8255,8190,8571,7538,7738,8015,5224,7752,8170,7986,5661,8110,7898,5476,5688,8267,7849,5509,7715,8170,5407,8411,8539,5407,5274,8322,8131,7738,7930,8345,7533,5717,5564,8144,5237,7957,7839,8131,7977,5325,8312,5419,7990,8245,8154,5274,7542,8762,8631,5407,5261,7994,8504,5564,5712,5676,5717,7610,5309,7653,8227,5301,8110,8329,5617,7994,7834,5519,8272,8131,8267,7588,7605,7994,8800,8762,8222,8762,7994,8504,5367,7653,7994,8267,8272,8484,8484,5274,5661,5274,5325,7482,7864,8267,8670,7478,5352,7946,7616,8267,7930,7482,7482,7521,7982,7491,5693,8745,7533,7521,8598,7653,7500,7511,7500,5237,8022,8005,7706,8560,7628,5509,8282,5471,7828,8751,7533,8312,8093,7628,5485,5666,7849,8255,7864,7994,7994,5381,5314,8456,5476,7982,8370,8217,7858,5255,7706,5564,7977,7853,8827,8005,7977,7757,5606,7516,7616,7667,8994,7653,5485,8422,8048,8751,8277,8422,5381,7643,7628,5693,5712,7834,7653,5407,7990,5491,7592,8196,5476,8159,5377,7864,7864,7834,7643,8795,5218,5622,7834,5274,8676,5476,8467,8940,7982,7828,5419,7692,5666,8239,8647,5391,7800,7706,7648,8052,7478,7786,5309,7542,5296,8427,7511,5629,5301,5274,8751,5329,5347,8077,7768,7491,5693,8745,7653,7521,8598,7500,7500,5509,7786,5471,7706,8282,8022,8560,7628,5329,8093,8312,5485,5381,7977,8015,7653,7616,7616,8370,5476,8751,8919,8422,7653,8329,7616,7990,5377,8239,7864,5622,7834,7864,8077,7800,7828,8940,8052,5325,8427,8613,8751,7768,8527,8473,5372,8903,5367,8427,8473,7730,7811,7658,8577,7795,8190,7628,8170,5329,7487,8087,7924,5642,5448,8509,5514,8010,7511,5237,8239,8647,8647,5301,8473,8110,5301,8239,7723,7719,8527,5342,8527,7610,5329,8473,7730,8170,7616,5448,7521,5237,8647,8473,8473,7893,8010,8073,7893,5391,7982,7972,5325,5693,8261,5480,8631,7962,7521,8068,8504,7628,7565,7710,7482,8010,9596,9609,7482,7982,5476,5476,5358,7839,9621,7511,7839,7710,7487,8222,5391,8057,8370,5255,5606,5285,5655,5224,7839,8316,7839,7521,7924,8539,7696,7834,5622,5352,8179,8073,8028,7864,8201,7667,8307,7588,7628,5491,5642,7643,5606,5391,5407,8863,5224,5274,7877,7877,8772,7893,7588,7994,8721,7864,8699,7643,7982,5464,8964,5296,5407,5730,7849,5426,7687,7994,5564,7823,7828,5255,8073,5396,5237,8005,7511,5448,5358,7491,8267,5305,7511,7800,7715,7628,5519,5309,8845,8005,5622,5381,7710,7994,5476,5325,7648,7558,5218,8272,5296,7521,8427,5501,8543,8249,7834,7715,7877,7521,8433,8857,5358,5391,7972,5325,7706,5426,8068,5237,8504,7628,7710,8010,7482,5407,5391,8222,7487,5459,5476,7696,7648,7521,5325,5509,5285,8119,5224,8073,5666,7864,7628,8028,9633,8641,7643,5274,8721,5496,8005,5459,5448,7877,8077,5305,5320,7511,5519,5309,7800,8589,5622,7700,8087,8005,8039,7521,8484,7576,8447,5358,8073,9641,5301,7893,8033,8345,7904,5564,5274,7752,7528,8402,5358,5426,7538,7628,8527,7653,8539,5480,5480,7710,8329,7605,5661,7706,7883,8915,7730,7482,7752,7972,5325,7893,5726,8073,8222,7908,5480,5476,5726,5241,5726,7592,7768,7999,8467,5352,7576,8170,7605,8293,8170,7920,8205,7977,8093,5655,7658,5564,7675,5391,5358,5480,7920,7487,7940,7734,7858,8272,7972,8164,7478,7582,8149,7768,7914,5274,7696,8370,8073,8721,7482,7632,7538,7516,5661,5325,8170,8022,8196,8211,7779,7653,5647,8190,8422,7667,7628,7558,7528,7663,5329,8863,7576,5617,5325,8653,8272,8277,7986,7990,7706,7706,7628,7893,5514,7834,5381,5642,7478,5476,5329,7858,7542,7893,5464,5407,8154,7752,5377,8519,5367,8548,8005,8110,8087,7795,5480,8005,5448,7542,7558,7972,7511,8140,8909,5237,8340,5442,7834,8467,7967,5514,5261,5301,7972,5329,5305,5237,7632,8015,5305,5708,8179,8647,8827,5726,5358,5274,7628,7500,5396,7849,7588,7675,7516,7877,5471,8789,5480,8833,8772,5285,8964,7834,5301,8427,5485,5476,7828,7516,8888,7967,7834,7710,5329,5274,7521,5325,8617,8170,5237,7811,5301,8473,7538,7628,5480,8329,7710,5726,7828,8073,5329,8205,5391,7487,5564,8170,7967,7605,5480,8293,7977,5476,7658,7675,7538,7790,5285,5396,7632,8370,7858,7839,5301,7558,7834,5325,5285,7667,7706,7706,7990,8863,7653,5358,8179,7858,7972,5237,5514,7478,5708,7511,7542,5261,8647,7632,8827,7877,7967,5325,5285,7521,7786,5464,5301,7994,7849,8811,7920,8608,7853,5661,7667,5358,7710,5699,8539,7692,8370,8255,8131,7473,5717,8427,8845,7858,5358,7990,9012,7930,5342,7957,8073,7675,7616,7849,7839,8739,8888,5305,8608,7957,5386,8762,8205,5342,5377,8888,8739,8479,5524,5564,5688,8119,8255,5329,5372,8473,5301,8653,5377,7565,7533,8721,7790,7883,5320,7576,8048,7681,5352,5209,8048,8297,8811,8174,7920,7790,5564,7706,8164,5726,7521,5519,7795,5325,5352,9361,8670,8438,8391,5301,5391,8548,5377,5352,8022,7622,7768,5712,8598,5407,7888,7982,7986,8484,7706,5401,5655,8888,8245,5305,8589,7800,7800,9026,7500,5712,5305,7605,8789,7811,7908,8539,8888,8577,8261,8307,5209,5386,8062,5237,8307,7653,7610,5342,8484,8227,5459,7628,5301,5676,8190,7542,8582,8548,8711,5655,8987,7628,7952,5305,5325,7706,7999,8005,7643,7904,8312,5712,7500,7834,7675,7779,7628,8422,7706,7599,7779,7994,8277,7710,8647,5606,5305,5325,7500,5391,7834,8427,7628,8479,5241,5301,8277,7817,5480,7757,5342,8762,7864,5358,8302,8560,8068,5329,5476,5381,8010,7576,7628,5329,7752,7982,5426,5237,7565,5501,8136,5337,5712,8762,7849,5367,5726,7653,7920,7872,5480,7500,5688,5255,7622,5358,5325,7994,8170,5568,8903,8033,7990,5448,8184,7723,7658,8222,5564,7687,8115,7786,5476,7757,7742,5305,5381,5320,7957,7710,7696,8345,7994,7681,7605,7482,8577,7632,8316,8255,7706,7605,7994,5285,8598,7952,5274,5717,8795,7491,7500,8316,8711,7888,7667,5274,7800,7675,5296,8427,5407,7628,7999,5578,8234,8845,5514,7478,5688,5642,5564,5509,5337,5301,8302,5301,8582,5291,8190,7746,8154,5329,5218,8447,5209,7706,8745,7521,5629,8104,5325,7757,7867,7516,7576,8851,8277,7893,8447,5476,8653,5329,8484,8227,5485,8170,8093,7967,5301,8676,7663,5285,8184,8154,8126,5391,5693,7914,5325,5391,5730,7478,5480,7692,8322,5325,8010,8582,8903,7538,8302,7883,7994,7628,8658,8110,5459,5464,5401,7834,5688,5688,5407,5501,7864,8387,5564,8159,5407,5367,5622,7982,5471,7533,7834,7752,5407,8052,5305,7687,5726,7511,7632,5391,5391,7482,8533,5655,7738,5642,7715,8164,7542,7706,7752,8893,5352,5401,5708,5333,5296,8073,5325,7681,8925,5407,5514,7800,5237,5688,8473,5325,5325,7605,7768,7605,5622,8154,8015,8756,5529,5432,5291,5726,7521,8073,5285,7834,7588,5352,9021,5261,5261,8010,5291,7516,8329,7622,5237,0,8946,7675,8721,5730,7908,8062,5514,8427,5367,8211,8119,5301,8227,7935,7864,7511,7616,5325,7500,7834,8302,5301,7628,5301,7763,5655,7521,7864,5329,8721,5301,8946,7628,7834,8863,5337,8554,5329,7565,8745,7516,8073,5255,8184,5329,7757,5407,5564,5329,8222,5730,5699,7658,7952,5381,8217,7994,7696,8099,5509,7828,8461,5325,5285,5274,8821,7521,8329,8845,5301,5401,5407,5301,5391,7681,8234,8302,5509,5655,5325,5367,8745,8093,5386,7967,7893,8653,8227,7786,8447,8184,7757,5485,8190,5391,5209,5501,5459,8184,7588,7864,7839,7511,8159,7864,5688,5564,5464,7994,8940,8411,5514,5708,7706,7738,7482,5642,7994,8267,5333,8473,5529,5432,7800,8438,5218,5622,7877,5291,5261,7516,9021,5329,8795,5301,8033,7482,5453,5476,5564,5305,8484,8201,7834,5564,7757,5325,5325,5261,8811,5391,8527,8391,8222,8467,7972,5693,8196,5564,5501,7752,5407,8539,7628,8998,5285,5291,5291,7920,7811,5655,8222,7834,8387,8154,8533,5666,7990,7742,5325,7977,8903,8093,5419,5564,5480,7628,5301,8411,7972,8509,8467,7628,5726,7675,7972,7883,8170,5337,7786,8936,7628,7924,5666,8387,8964,7628,5301,7616,8863,7478,7696,7795,7663,7610,7834,8073,8617,5325,8577,7605,7834,5391,8062,5301,8613,8711,5717,8504,5285,5519,7576,7972,7632,5274,7972,7706,7972,7977,5325,7628,7811,5476,8201,7478,5676,7628,8073,8329,5651,5358,5237,7667,8170,5647,5485,5377,7864,7864,8582,7982,7478,5651,5329,7628,7521,7478,5255,5320,5564,8345,7999,5352,5325,5524,7930,7757,5329,5699,5407,8170,5647,8539,8467,7706,5301,7491,8863,8721,8863,8093,8119,7706,5485,5476,7972,5485,5209,8277,7582,7706,8104,8845,8170,5726,8154,5655,5325,5320,7478,7710,7924,8110,5476,5688,5377,7982,5377,7864,5381,7790,5564,8104,5647,7834,7849,8467,7858,8267,7706,7982,7616,5471,7715,8170,8772,8777,7565,5301,7616,8447,8190,5534,5391,7849,5573,5629,5367,8893,7511,5325,7786,5407,8196,7653,5209,5519,8447,5642,7834,5325,5301,5606,5568,5377,5224,8447,5647,7538,5285,5524,5606,8033,5301,5622,5578,8845,5642,5476,7742,7710,7972,8239,8164,7957,5358,8617,5314,7920,7478,5291,7482,7768,7487,5285,7924,5396,5358,7972,7588,5464,8010,5285,8239,7834,8316,8484,5274,5274,5647,8349,9021,7628,7616,5329,8641,7605,5529,5476,7920,5320,7632,7706,5476,5325,5578,7628,8427,8811,7924,5377,8381,5647,5651,8839,8249,8539,8354,7628,5666,8222,5329,7898,7610,5325,8354,7616,8721,8873,5301,8375,5453,8484,5325,8467,8391,9471,7742,8387,5237,8222,5655,7786,7582,5329,8387,7548,7924,7500,7972,5726,8467,5401,5480,7487,7972,7696,7795,5519,8479,7977,5717,8375,8062,7786,8073,5301,5676,7982,7864,7628,5274,5377,7478,5699,7622,8345,5325,7930,5352,5688,8104,8845,5476,5464,7982,5325,7864,7786,7734,5209,5712,7715,8159,5642,5342,8789,7719,5296,5407,7849,7628,7616,5578,8239,8239,5285,8484,7628,5329,8473,8427,7616,8154,5301,8721,5529,5329,8484,7723,5407,5274,8451,7628,5407,7834,8451,5329,5337,8140,5325,8473,8533,5642,7605,7667,7605,5407,5651,5651,7478,7511,8033,7478,7952,8447,7706,5329,8484,7667,5285,5301,7592,7834,7977,5651,5301,5617,7883,7516,5485,5325,7500,7588,8140,8349,8349,5717,7482,5564,7746,8387,7628,8174,5726,5642,8711,7681,8783,7628,5726,7746,8170,8527,5564,5726,7746,7538,5305,5305,5296,7610,7967,8402,5501,5464,5464,8255,7723,8307,7853,7946,7867,5301,7946,5320,5301,8184,7710,9372,9372,5476,7542,7883,5237,7622,5305,5305,7542,5491,5573,5337,7828,5476,5367,5476,5726,7667,7834,5485,7628,5301,8903,8762,8179,7482,5726,7834,8772,8345,5655,5622,8179,5476,5712,7883,7834,8762,5485,7972,5501,5564,5564,8438,8438,8467,8140,5726,9646,8762,5325,7675,8211,5708,5432,5480,5358,8467,8387,5480,5693,5651,8716,7675,5209,7786,5358,7582,5501,8272,8467,7920,8227,5524,7706,8136,5629,7786,5320,5320,5501,7893,7752,7904,5564,8604,8316,5726,8119,5391,5337,5391,8805,7478,8184,5485,7643,8170,7834,5647,5377,5237,5274,7834,5407,5358,7610,5688,7653,7632,7478,5730,5237,5717,7687,5274,5534,7687,8789,7700,7706,8533,5325,5209,5325,5325,7706,5325,7710,5325,8033,7849,8387,8800,5661,8174,5358,5391,5209,7710,8184,8170,7681,8093,8093,5573,7768,5661,8800,8077,7653,8316,8287,8653,7628,7940,5476,8447,8451,7967,5726,8805,5274,7742,7864,8494,5476,8287,7967,7828,8267,9654,8387,7710,8033,7734,8277,8170,8093,8174,7681,5209,8800,7628,8287,5726,8494,7610,7710,5241,8334,7834,7610,5329,7817,8795,7516,7940,7738,7610,7817,8795,7952,5688,7768,7952,8354,8721,7849,8604,5661,8073,8115,7867,5480,7632,7883,7675,8893,5407,8190,8411,7962,7521,5274,5255,5655,8119,7982,5391,7605,8354,8104,5329,7478,7582,7675,5325,5381,5464,5426,5712,8010,7805,5622,7858,8527,7605,8062,7867,5730,5688,7622,7542,0,8005,9657,7957,8267,7849,5629,8354,5442,5524,7893,8893,5241,8131,8077,7542,7675,8498,7817,8334,8762,5301,5358,5314,5573,8635,7710,7605,8322,7610,5347,5407,8201,5301,5296,5358,8539,8827,7834,5352,9367,5622,5213,8647,8307,7628,7763,5622,8115,5237,8039,5476,7982,7877,5568,8140,5255,5442,5325,7952,7588,7957,7687,5329,8184,5612,8498,9662,5612,5333,8676,9670,7675,8073,5651,7487,8903,8239,7883,7828,5564,5407,8438,8005,5491,8255,8919,5464,7962,8149,8365,7675,8762,5391,8365,9676,5213,9680,5213,8811,8170,8762,5329,7834,8073,8903,8159,7990,7610,7542,5401,8073,7576,8277,9686,5347,8005,7628,8354,8052,5655,7658,5377,5651,5442,5325,9690,9680,8909,5622,7576,8010,5448,7576,9695,5314,8909,5329,5329,5407,5480,7616,7628,8277,7610,9701,5401,5347,7542,8354,7548,5726,5401,8411,8676,8857,5301,7487,7632,7616,8354,5314,7500,5651,8539,8010,7914,5578,7914,8293,5661,5666,5274,5209,8447,8879,9710,7542,9715,5358,8282,7478,8302,8756,8110,5501,5358,8402,9721,5301,7710,9726,9736,7904,5301,8811,8170,8312,5305,8140,8184,7576,5301,7588,7681,5301,7706,7588,8062,8316,5209,8604,8354,7576,7516,9345,8126,9740,5501,5325,9745,9751,7779,5274,5564,7779,8179,8845,5301,8354,5329,8170,7706,8402,8126,8015,7516,8548,5485,8039,9759,8589,8589,5629,8411,8110,7746,5730,7500,8548,7982,5407,8365,7839,5301,5661,5564,8447,7930,7786,5519,5524,5629,5688,9751,8340,9345,5629,5651,8589,5501,5372,8756,5419,5305,8589,8589,7495,5237,7779,5419,7908,8073,7811,985,8548,5274,7495,8641,8077,5301,7571,8539,8077,7681,5347,5651,5285,8484,5237,7817,5241,8484,5337,5325,5496,7576,8329,8354,8062,7628,7849,5509,8504,7752,7752,5688,8136,5209,7853,8816,8998,5381,5320,8104,7920,7482,8302,8062,7972,5325,5501,8267,8504,7883,5309,7588,7565,5612,8402,8484,7687,9766,5309,5564,7972,7839,5209,5442,8062,7687,5666,5237,5564,5661,7914,7930,7605,5726,8093,7605,8670,8811,7924,5564,5480,5655,5491,5314,5391,7548,5274,8443,7592,8282,8170,5305,5320,8397,9772,5485,8504,7946,5661,5381,7605,5442,7795,8126,5291,8582,7982,7839,8845,8845,8272,5661,7692,5285,7768,5671,7632,8190,5224,5274,5564,9462,8316,8164,7681,7482,5666,5661,7768,7500,8099,5459,8316,7487,5419,7521,5296,5291,7972,5407,5676,7710,7746,5655,5329,7558,5688,5320,7898,8316,7675,5509,8196,8845,5519,5377,5519,5661,8062,5237,5564,8805,5342,8217,8653,5564,5485,7982,7972,7834,7592,8527,5325,9778,9783,8548,7516,7888,5301,5329,7779,5442,7768,8670,7706,7706,9032,7706,5509,7542,5274,8087,7994,7706,8272,5509,8461,7599,5413,8863,7774,8039,8402,8099,7511,7706,8427,7930,8287,8093,7746,7706,7867,8772,8548,8234,7967,5476,5209,5426,7779,5309,7893,7872,8438,7628,8548,8010,5329,7500,9789,8015,9796,7500,7521,5407,7990,5274,8821,7746,7706,8676,7548,8539,8438,7622,8467,5661,8222,7487,5688,8411,7811,8845,8827,7746,7994,5564,7994,7628,8131,7622,5661,5401,8548,8411,8267,7482,5377,7795,7972,8267,7558,7877,5491,5367,8467,8548,7500,9804,9810,8845,9367,7616,5309,5476,8863,8447,7898,7786,7533,7576,5407,8451,7768,8617,8267,5367,8952,5237,8164,8272,5564,8267,5655,5524,8613,7982,5437,8909,5274,7588,5655,8548,8272,9818,7774,5358,8527,8140,5301,5612,7482,5237,8548,5666,5622,7542,7800,7972,5329,5305,8647,5401,5651,8190,8548,5325,5261,5342,8010,8952,5413,5476,5514,8711,7576,8322,7643,9824,5237,9829,5519,8297,8190,5296,8255,8833,7571,7972,5261,8589,7516,7628,8010,8239,8287,9021,5407,7648,7877,7872,5519,5296,5396,8857,5237,8322,9835,8245,8119,5296,5237,7719,5407,5419,7811,5372,8827,5381,5285,8073,7877,5476,5386,5342,5476,8635,7500,5325,8888,7542,5347,7734,8329,8857,7706,5352,5301,5305,5666,5661,8857,8582,5337,7500,5666,5274,7877,5568,5329,7877,7616,7774,7521,5274,8397,8857,7632,5237,8479,5347,5666,5401,7487,8484,7628,8062,7576,5274,7849,8354,5325,5509,8582,5325,8504,7994,8267,8062,8104,7920,8302,8461,8136,8504,8548,8099,5501,7972,7752,7565,7605,7482,8816,5320,5274,8811,5661,5564,5401,8443,7914,5237,7972,7548,7576,8282,5564,5314,8670,5476,5485,5491,7898,8164,5381,7692,7648,5459,7946,5296,9462,8190,5419,7632,5291,5285,8316,8845,7675,5676,5519,5237,5342,8272,5325,5377,5325,7500,8093,9839,7500,7706,7893,8287,8015,8234,8119,7994,7746,8087,5413,5442,8272,7511,7768,8548,8427,5509,5329,8863,5209,7990,7521,5407,7746,5347,5377,7622,7706,8267,5296,7487,7888,8676,8411,5274,8131,8863,7811,8539,5329,8411,8438,8467,5309,5564,8613,5655,8527,8952,5524,7877,5301,5476,5407,8888,8647,7800,5666,7616,5612,8711,5514,5661,8239,5261,8833,5419,7542,5372,7811,5285,8857,8998,5661,8461,8998,5699,8998,9844,8073,5255,5730,9853,8461,7616,7616,8745,5367,8721,9021,8772,8227,7706,5534,7616,5666,8772,5329,8721,7599,7599,8461,7599,7653,9858,7632,5564,5651,8048,7786,7605,8170,7790,7786,8438,8461,7904,7795,5377,5676,5564,7516,7834,7999,8397,7795,5314,7628,7972,8345,8048,7823,5320,7487,7920,7538,7511,7904,8048,5301,8783,7994,5358,5325,8833,8316,7511,5325,5676,5651,7999,7632,7920,5325,5325,8375,7924,7763,8322,7738,5712,7924,5337,5337,5501,7904,5524,7924,7786,8322,8205,5524,8093,5476,7605,8539,5519,7542,5476,5274,8164,8658,8658,7542,7811,8397,5274,5476,9863,8322,5274,7628,8845,5381,5629,8467,5237,8010,5261,5501,5261,5401,5666,8845,8417,8322,7877,7628,7817,5476,5248,5534,5501,8062,7710,7710,8402,5564,5367,9054,5476,7986,5476,7920,8093,7511,7972,5726,5237,5661,7521,5237,5237,7706,8255,5296,5296,7675,7719,8062,5419,8539,8402,7893,8272,8302,7710,7605,5407,8190,5391,7858,8548,5509,5296,5730,7849,7849,7653,5708,8647,5476,5401,5726,9873,7516,9054,8255,5708,8010,5476,5296,8190,8479,5642,8149,9877,8898,7790,5578,7706,8131,8131,8131,5459,5459,7696,9045,8821,7696,8716,5622,5666,8805,5209,8845,8783,8052,8267,8994,7920,7710,7487,5573,7977,8170,7849,5573,8149,8099,5325,5629,7742,7957,8417,8131,8334,8115,7957,5325,8149,8154,5337,5305,8359,7696,5578,8805,7491,8245,8022,8196,8267,7710,7883,7883,8062,8845,7972,9881,8077,7883,5655,7935,8093,5241,5337,8631,8239,7610,7972,8381,7681,7696,8387,5459,7795,9886,5401,8052,8119,8577,8751,5485,8170,5358,8539,8039,7752,7500,5255,8827,9891,9900,8805,7628,8827,8005,9904,8005,8827,5401,5534,8005,5401,5401,8827,5708,8119,8022,8805,8005,8077,7972,7883,8381,7867,5325,8805,8805,7610,7628,7628,7999,7487,7588,5476,7588,7487,7999,7999,8110,5476,5333,5325,5688,8800,8340,5717,8539,7811,5464,5476,8504,7675,8613,7576,8467,8762,8461,8164,8811,5496,8494,8227,7786,7972,7653,5564,7893,5501,8456,8334,5407,7565,9032,5337,8136,8560,8800,8297,7817,7628,7752,5642,5309,7920,7542,5237,5237,7893,8800,5485,7658,7914,7548,8015,5661,7914,7994,7576,5459,7883,7893,7904,8073,5726,5606,7930,7795,7658,5209,7692,7972,8447,5255,5209,5564,8334,5655,5274,7571,8456,7977,8119,7972,8222,7715,5726,7982,7990,5476,7482,7500,7924,7610,7582,9012,5329,5485,8293,7834,8334,7473,5476,8687,5671,5717,7582,7972,7675,7504,7504,7696,7864,5426,7858,8766,5209,7952,773,9915,2533,3110,9919,3464,9924,2802,9929,9934,9939,9945,3447,2826,2847,3306,9950,9955,2550,2661,3190,9961,2685,2300,3133,3157,2871,3215,9966,3179,3005,9971,3235,2606,2953,2977,40,609,2526,9977,1783,9981,3457,9985,2795,9989,9993,9997,10002,1733,2819,324,3297,10006,10010,2547,2654,3120,10015,2678,2700,3126,3148,2864,3208,10019,347,2998,10023,3228,2599,2950,2968,24,603,10028,429,10033,852,10037,803,972,800,10041,9312,10045,10050,829,833,985,806,955,959,10055,839,819,816,10060,826,988,1044,1464,926,809,10064,855,836,1659,10068,915,845,842,848,776,10073,4081,3560,10077,3573,10082,10087,10091,10096,10101,10107,4021,10112,3609,4035,10116,10121,3809,4003,10127,10131,3849,3912,10136,10140,3707,10145,10149,3959,3661,10154,10160,3747,3787,10164,64,606,2523,10169,3103,10173,3454,10177,2792,10181,10185,10189,10194,2738,2816,2840,3293,10198,10202,2544,2651,3183,10207,2675,2696,3123,3144,2861,3205,10211,670,2995,10215,3225,2596,2947,2964,52,1084,10220,2537,9220,3114,10225,3471,10229,2809,10233,10237,10241,10246,10250,2759,2833,2854,1321,10255,10259,1426,2668,3194,10264,1325,327,3137,3166,2878,3222,10268,1318,1311,10272,3242,2613,1400,2986,32,410,10277,1620,10282,482,10286,3468,10290,2806,10294,10298,10302,10307,10312,10316,3451,2830,2851,3311,10321,10325,2554,2665,432,10330,2689,2305,464,3162,2875,3219,452,1772,3009,10334,10339,3239,10345,2610,2957,2982,106,10349,0,0,0,10360,10360,10360,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,10360,10360,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10366,10366,5777,5777,4344,4344,5078,5078,6688,6688,6694,6694,10373,10373,10379,10379,5898,5898,6704,6704,670,670,6704,6704,10387,10387,10394,10394,6704,6704,1736,1736,0,0,10402,10402,10402,10402,10402,10402,10407,10407,10417,10417,10427,10427,10434,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10446,10446,10450,10450,10455,10455,6540,6540,10460,10460,2851,2851,3613,3613,10465,10465,10470,10470,10475,10475,4198,4198,3917,3917,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10480,10480,10480,10480,10480,10480,10480,10480,168,179,184,10488,193,168,179,184,10488,193,168,179,184,10495,193,618,618,618,10505,249,249,249,249,10511,10520,10520,10527,10527,10527,10527,10541,10541,10546,10546,10549,10549,10558,10558,10558,10558,4971,4971,770,770,1451,1451,908,908,5150,5150,5150,5150,1521,1521,28,28,44,44,44,44,44,44,10568,10568,46,46,52,52,52,52,776,776,54,54,54,54,54,54,56,56,56,56,58,58,10575,10575,66,66,10579,10579,10582,10582,10593,10593,10593,10593,10599,10599,5234,5234,4924,4924,10604,10604,10608,10611,10615,10619,10623,10575,4971,10627,10631,5070,5070,5070,5070,5070,5070,5070,46,46,5070,5070,5070,5070,5070,5070,10634,10645,10651,10658,10658,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,34,54,48,40,48,24,40,10667,64,32,52,10676,1311,1314,3242,4177,875,2878,3342,3222,10684,3648,4056,2514,10688,2854,1321,3194,10255,1400,3471,3373,2809,10229,2613,2668,1426,10693,1325,2537,24,40,64,32,776,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,836,911,915,922,926,812,809,848,806,955,816,842,803,968,800,845,1144,1454,1148,852,1464,1044,24,855,819,839,988,826,429,24,40,64,32,52,479,2885,833,1060,773,852,855,933,937,942,951,10697,10707,5004,10717,819,819,863,0,0,0,0,0,0,0,0,0,0,0,0,875,884,24,770,40,892,64,895,58,664,46,1057,32,773,905,52,776,908,836,911,915,918,922,926,812,809,929,848,933,937,942,946,951,806,955,816,959,842,803,968,800,972,845,855,819,839,985,988,992,826,429,976,10727,770,40,892,64,895,58,664,46,1057,32,773,905,52,776,908,1011,0,0,0,0,0,0,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,8,10,12,14,16,18,20,22,836,911,915,922,826,988,1044,848,806,10733,842,803,968,845,816,800,819,855,839,852,955,429,985,926,24,109,40,776,0,0,0,0,0,0,0,0,3715,10737,836,915,922,806,816,842,803,800,845,926,809,848,826,819,839,855,852,429,10041,10068,10060,10742,24,40,64,32,905,52,908,1786,10747,106,50,58,38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,40,64,32,905,52,836,911,915,918,10750,922,812,10755,809,929,10760,10765,10769,806,955,816,959,10774,842,942,803,10778,968,800,972,10782,845,1073,855,819,839,985,992,826,429,770,40,892,1518,64,109,52,905,908,1440,855,819,839,852,0,0,0,0,0,0,0,0,0,4580,4580,4580,4580,4580,4580,4580,4580,4580,4580,4580,4580,4580,4580,0,0,0,6,8,10,12,14,16,18,20,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7558,7681,7972,8402,5712,5661,8402,5661,5305,8349,5325,10786,0,7908,7730,8205,7706,5386,5651,8789,8375,5509,8073,7752,8721,5688,5285,5485,8110,7946,8451,8577,5261,8805,5712,8104,7914,5661,8407,8397,8140,5519,7528,5480,8174,7558,8028,5564,8307,5480,5453,10798,7920,7786,7795,5301,5401,8015,8402,5381,7779,7576,7920,8473,8099,7858,8039,8005,7805,7883,5606,8028,5485,5381,7986,5476,8345,8504,7914,8287,5564,5564,8170,7893,8322,5509,7582,7920,8039,8548,5693,7478,5712,8821,5622,7972,8694,7795,7742,8539,8345,5426,5386,7628,8732,7823,8267,7548,8110,7864,8159,5647,5642,5407,7994,5285,7628,7977,8676,5358,8227,5564,5274,8447,7746,8631,8909,8422,7898,7542,5329,8631,5448,5629,7588,8863,5717,5476,8073,8239,5325,7920,5661,5524,5329,8888,7742,8227,5329,8039,8334,8663,8647,5419,5564,5629,5305,5237,5622,5329,5285,7616,5329,7972,10803,8104,5401,8316,5352,8447,7687,8239,8402,7952,7675,8909,8010,7972,5285,5261,7478,8402,8489,7521,5342,5501,7616,8772,9361,7883,7908,8789,8068,5401,5358,5218,5291,5573,5476,5676,7986,5401,5514,8068,7786,7706,8402,8888,8329,8613,8154,8205,5274,8473,5426,8739,7986,7834,5329,8154,8205,7632,5726,8205,8375,8447,8205,8033,7542,8494,8687,8205,7516,8687,5209,5285,8077,5442,5325,5524,5442,7746,8077,7972,7972,5717,5407,5459,7972,5519,7658,5655,8484,5274,7628,5496,5642,7628,5248,5237,5237,8484,7628,7628,5426,8170,8484,7528,5617,5617,10809,8484,5391,8772,5320,5391,5606,7883,7734,7734,8721,5396,8772,8732,8598,5367,7478,7576,8479,8461,5261,5476,0,8994,7658,8010,8282,8732,8560,7565,8282,5699,10817,7548,5407,8777,8093,7605,8115,7888,8073,5274,5717,8732,7977,5329,8062,8334,5237,0,5564,5372,8527,5301,8670,7628,7843,8805,0,7994,8073,8249,8184,8170,5464,8087,8548,8411,7790,5459,10825,7898,5496,5476,5514,8777,8340,7957,5491,10830,5642,8316,10836,8903,8772,5329,5476,5381,5407,8154,8249,7706,5329,5329,7877,8617,8682,7779,5407,5407,7719,7834,5564,5255,7834,7834,8340,8340,5476,5333,8805,5688,7663,8716,7681,5285,5381,5352,5509,5476,7637,7746,7763,8811,7542,7628,7628,8851,7723,5647,5367,7706,8211,5209,5296,5342,7972,7521,5358,5564,8068,7994,7478,7972,7706,5261,7752,7542,8504,8998,5568,5381,5329,7500,5325,8196,7706,8467,5501,7904,7872,7528,5325,5396,5396,7565,7478,5291,8131,7972,8267,8196,5708,5573,7790,8387,7893,8705,8560,8827,7482,5337,8411,5476,10844,7994,5301,7920,7864,5274,8227,8777,8249,7588,5476,8467,8297,8093,7658,7681,8539,7622,7723,5301,5218,7990,7715,8456,8222,8131,8438,5476,5476,8170,7920,8893,7692,7605,8447,8073,5224,5261,5688,5642,5274,8543,8062,7616,7687,7582,8170,5459,8077,8811,7982,5255,5712,5564,8119,7786,5358,7994,5564,5314,7790,8010,7867,7790,7790,7904,5209,5305,5209,8539,7710,5325,5485,7610,0,5453,0,5529,7616,7723,5717,8391,5301,8164,7482,8682,7542,7637,8033,7628,5519,5209,8577,7478,5325,8005,8604,5248,7839,8131,8687,8211,7710,5285,5285,5564,7628,7605,5209,5419,5296,7482,5496,8048,8164,8613,5248,8211,8316,7548,0,7605,8329,7811,7473,7768,5671,8245,5485,5426,7768,8340,5485,7696,8345,8087,7710,8504,5407,5377,5464,5255,5301,5309,8149,5320,9345,8245,7977,7648,5358,8115,8255,5296,7706,8903,0,8164,7643,7616,7632,7616,8329,5309,7962,7482,8349,8062,7576,5642,8762,7994,5573,8307,5564,8504,7482,7849,5358,8201,8267,8539,5647,5342,5342,7558,8033,7478,5432,5237,8845,5301,5564,7605,5305,5301,9345,5325,7888,5688,8302,5612,8033,8527,8170,5712,8249,5426,5485,7935,7528,7667,7834,8805,7757,8451,5509,7653,5337,8451,7977,7599,5337,8816,8670,5274,7843,5564,7864,5305,7752,5381,7823,7715,8795,8277,5401,5391,8494,10850,5401,8154,5337,7616,7616,10857,8104,8211,8211,7839,5509,5485,8539,5726,7582,5407,7967,7786,7986,5329,8170,7994,7667,8077,8653,7849,7734,5209,5524,8039,5564,8099,8227,5442,8022,7805,8427,8514,7774,7904,5301,5647,8073,7653,8222,8196,5676,5237,8196,8539,5381,7734,8903,5309,7999,7658,8845,5396,7786,8093,7893,5309,8345,7599,5413,7576,7834,8170,5407,7516,8772,5320,5606,8479,7982,7706,7849,8402,8272,8104,7930,8227,7811,7893,8277,7706,8249,5255,7994,7883,8302,7811,7834,5476,7681,7706,5509,8604,8716,10862,5480,8484,8964,7632,0,0,8249,0,10874,0,7616,7616,7616,7675,7730,5688,7990,7521,5509,7628,7628,5564,5401,8190,8467,7823,8115,8494,8104,7675,7628,7858,7790,8345,7864,7632,7616,8154,7795,5419,7746,7571,7571,8170,8222,8110,5237,5661,8345,5726,5407,5564,5329,5485,9054,8527,8062,8062,5730,7834,5647,7605,5209,7521,7914,5717,5381,5485,5651,5464,7914,7734,5671,7582,7576,7542,7994,8073,7994,8994,5325,7982,8205,7977,8676,5642,7667,8827,7768,7994,5274,8005,8805,7643,5237,5391,7706,5347,8548,5642,8467,5712,8267,5320,8539,5622,8721,9751,5529,7877,8509,8005,0,5419,7957,7994,5213,0,5448,7482,5391,8104,5391,8267,5655,7843,8293,8249,7849,5699,7940,8467,5301,8170,5237,8334,5573,7500,5647,7511,8494,5655,7542,8443,8312,8451,8467,5573,7628,7908,7533,7734,7734,8136,8851,5237,5501,8267,5241,5407,5325,8893,8777,8869,5651,8052,7962,5655,8391,5448,7500,7511,5237,7786,7843,8164,5534,5671,8451,5391,5629,7632,8613,5612,5274,7930,5642,7952,5358,8548,8234,8925,5519,8604,8370,5347,0,5325,5301,8179,7843,7628,0,7616,5305,7757,7681,7790,8316,7605,5352,8345,5401,8827,5237,5255,5688,5309,8104,5381,7994,8422,8340,7628,5476,5325,8239,5358,5329,7588,8201,8015,7632,5419,7800,7834,8093,5325,8647,8234,7653,8539,7588,5329,5617,8987,8539,7763,8329,7800,8073,8539,8451,5578,9367,8190,7994,7982,5708,5651,7957,8140,7548,7774,8312,5419,5651,8827,5453,5296,7667,7482,5407,7834,5320,7482,8005,8033,7982,8302,5568,7542,7538,8205,10880,8322,8307,5401,7516,5568,7663,5358,5301,7994,5325,8329,8119,8010,8249,7742,7877,7828,8329,5285,7872,7872,5666,7628,8255,5573,5396,5407,8349,8267,8222,5337,5726,5329,7648,5296,7864,8909,7893,5291,8397,8048,8613,5501,8005,8598,7872,8387,7500,5564,5291,8903,7478,8316,7487,7675,5237,7610,8267,0,7706,5407,8267,7962,8093,7628,5367,5218,5325,8062,7487,8789,5666,8190,8267,7628,5237,8052,8073,8052,7663,7994,5296,7994,5325,7839,7521,8375,7763,5476,8873,5419,8179,7914,8322,5401,7990,8467,8245,5377,5629,5661,5358,8427,7962,5401,8010,5693,5381,7786,5661,5320,5407,8898,8062,7730,7528,8329,5642,10888,5448,5320,8329,8217,8751,7994,8766,5442,8349,8613,8647,7658,7706,5372,8033,5655,7696,8447,7616,5524,8827,7675,7706,8048,5407,7500,8005,5381,8411,7757,5651,5347,5309,8777,7622,7723,7867,5274,10893,7742,5274,7588,5329,5476,7500,5661,5301,5476,7986,5666,7675,8539,7972,7930,8909,5642,5305,7811,8647,8010,5448,8772,8422,7723,8255,7994,7511,5296,0,5524,8267,0,5301,5320,5573,7663,7516,5372,8190,8033,8277,8154,5480,7478,7872,7872,7706,7946,5329,7588,8422,8211,8267,8751,7628,8329,5325,7592,5274,5305,10898,7588,7592,0,8077,8073,8261,5476,5274,7628,8170,5401,5401,7482,8322,7616,7610,8316,7994,8354,8093,5726,5642,8010,5651,5347,8005,5347,5325,8903,9516,5666,5666,5396,8527,7972,7994,8005,8427,8359,5301,5325,5651,5666,8359,7632,5325,8005,5329,5407,8589,0,5666,7768,7675,5476,8375,8514,5432,7982,7982,5329,9641,5329,7487,7675,7542,5573,7982,7500,8451,5564,7500,7500,5329,7982,7628,8052,5285,8170,8039,8222,7834,8427,8427,8005,8282,5237,5320,5248,5688,7757,5325,5688,7558,5391,0,8539,5578,5617,7628,8811,5209,8062,5296,8249,5717,5342,5367,7637,5391,5476,7538,7521,7565,7864,8994,7710,7542,7752,7752,7872,7898,5358,8354,8354,7667,7706,8297,7482,7706,5606,8467,5291,5296,8196,7706,7548,8387,7839,7715,7599,5501,5501,8560,7817,5372,5564,8297,5381,5564,7710,8307,8647,5274,8447,8594,5688,5337,7687,5255,7710,5337,7849,7653,8170,5301,5564,8297,7687,8131,7920,8073,5274,7605,7757,5432,5224,7786,8297,5726,5617,8964,8093,7786,7883,7786,7616,5301,7558,5726,8711,5717,7839,5296,5296,8211,5564,7924,8631,8577,7605,5476,5274,8144,5285,5301,5476,5255,5325,5201,5617,5476,7599,5305,7723,7867,7952,7817,7972,7800,10904,7972,7977,7648,7487,7706,5426,8577,5688,8670,5218,7977,8839,7935,8126,7864,7653,5320,8190,8527,7914,5655,5564,5274,5578,7478,5564,5301,7843,5358,5573,8467,5337,5381,7883,7834,8805,7828,8839,5296,7795,7972,8022,7893,8170,5651,7706,7706,7628,7667,7687,8249,8322,7487,5237,8099,5301,5459,7681,8272,8653,8811,7653,7628,7898,7893,7920,7994,5471,5476,8467,7588,7768,7542,7872,8447,8745,7994,7779,8039,5509,7582,7864,8282,8539,7599,8179,7924,7616,8039,7877,0,7986,7990,5325,7849,8805,8349,7500,7746,7628,5381,5391,8190,5464,7834,8411,8110,7994,5564,8467,5712,7994,5564,8821,5661,5337,5688,7790,5367,7616,5391,5248,5629,7605,8548,5377,5564,8467,5296,8249,7986,8762,7982,5688,5459,5301,7795,8267,8170,7823,7957,8658,8164,7616,5419,0,7843,8334,7487,5237,7839,5237,8467,5534,5401,5448,8282,8164,8164,5325,7478,5688,7653,8196,5476,5358,5396,5629,5476,8762,8716,7849,7982,5296,7538,8391,5476,7752,7616,8909,7628,8777,9423,10904,7800,0,8987,5578,8340,7710,8539,5708,5329,7994,7972,5629,8033,8647,5726,7920,7628,5407,7632,7957,8196,7548,7482,5419,8952,7752,5285,5509,7972,5578,7538,7538,8039,7511,7790,5237,8827,5301,7800,7675,10909,8015,8211,7521,5305,8397,7487,5688,8594,7972,5352,5352,5285,5726,5432,8239,7533,7952,5237,5325,5622,5464,7893,8149,5291,5291,5296,7482,7877,5285,5261,7648,7675,8711,5248,8329,7487,10919,7558,8903,5301,5661,5261,5476,5485,8245,7877,7706,7839,7521,5617,7632,8196,7616,7637,5301,8115,8190,5401,7605,8115,5661,8211,5661,7706,8164,5407,8249,8052,8821,8154,5314,5381,8888,7849,7763,8010,5666,5381,7986,5651,5301,8104,5301,5688,8077,5329,8762,7675,7605,7610,5301,7610,8527,7491,5413,8987,7786,5676,7616,0,5237,8179,5726,7768,8762,8179,7800,5291,5381,7605,7687,5661,5464,8816,8456,8293,7528,7548,5305,7696,7990,7763,5237,5426,7834,8795,8467,0,8093,8190,5320,5274,8316,7610,5381,5274,8387,7982,7994,8479,5248,7994,8479,9032,7946,5726,5476,5476,8201,8811,7628,8473,5496,5476,5261,8033,5564,8234,5501,8952,5381,8136,7548,7883,8915,7548,7548,5661,7706,5661,8297,7972,8504,8196,5309,7667,8467,5712,7637,5708,5453,5296,8297,7977,5480,8093,5491,8222,8174,5642,7571,5358,8670,8772,8170,7849,5320,5519,5655,5476,8994,8073,8539,8631,5564,8234,7972,7972,8297,7533,5476,7790,5480,8136,5568,5274,7877,5726,7930,8234,5237,8884,8073,9818,5564,5476,7710,7692,8504,5419,7977,8879,5305,5381,7696,7632,8613,7605,7952,7482,7774,8577,10923,10934,9345,8903,0,8653,8800,5453,5320,7977,8653,7558,5301,5676,7914,8057,5717,5476,7628,5655,7511,5688,8658,5325,5476,8201,9345,8839,8302,8658,5301,5401,5401,7692,5407,8998,7877,5358,8234,8272,8467,8093,7710,8227,5442,7888,8473,7779,8227,8456,8456,7893,8467,7632,8039,7834,8762,5237,7972,8170,7706,5325,7972,7795,8800,7935,5377,5377,5564,5248,5661,8411,5459,8653,8676,5519,5519,7849,8467,8222,8222,5564,7628,8641,7834,5296,7930,5573,5329,10940,10945,8267,8245,8131,8245,8359,8164,8467,8613,8716,7592,5612,8282,8068,7710,8940,5453,7616,5381,7542,8359,7692,5401,8222,5301,5578,5391,5329,5476,5459,5661,5274,7994,8473,8340,5325,5419,9367,8222,7521,10952,7734,5407,5622,5407,8397,5325,7883,8925,8010,8073,7521,7528,5432,7648,7800,5347,5708,7888,7952,8340,5342,8789,7877,5401,5358,5309,8903,5320,8772,5358,5347,7734,8613,7972,0,5320,8631,5391,8126,5661,8073,8033,8277,7610,5237,5274,5347,5578,5491,10961,8365,5391,5407,7533,5476,8297,8387,5237,5237,5642,8527,8772,0,8473,5564,7904,7849,5325,5325,5407,8721,8411,7834,7952,5291,7858,7790,5651,5651,7763,5391,7487,8427,7632,5291,5651,7681,5401,5642,7972,7667,5237,5261,7994,5237,5606,7628,5347,7864,8762,8196,8334,5325,8391,7616,8131,5726,8427,5309,8721,8919,5407,7632,5726,5407,7994,5291,5726,7632,5347,7914,8539,8721,5407,8721,7834,7952,5651,5391,8427,5347,5291,5325,5237,8539,5606,7628,8131,5309,5726,5285,8484,5309,8140,5291,7972,8783,5325,7786,7883,8397,8539,8987,5396,7768,5717,7710,5381,7952,7817,8062,5381,7643,5688,5426,7588,8447,5325,5329,7972,7687,5358,5426,7982,8987,7817,7972,5529,5573,5237,5476,5329,5291,5237,7834,5237,7834,7834,5693,5564,5688,5688,5285,8062,5325,8010,8329,7757,8062,5496,8272,7500,5381,5476,8504,8329,7482,5261,7706,5480,5325,8329,7482,7864,5501,8387,5642,8022,7914,7482,7528,5291,7675,8884,8033,5337,7972,7504,7565,7528,8783,5617,5274,8302,8068,7500,5476,5476,7588,7710,7849,7914,7849,7500,5655,7605,5241,8131,5209,5209,8427,7786,5564,7768,5459,8093,8539,7924,5301,5386,7533,7576,8811,5476,8170,5708,7687,5480,5476,5726,7999,5255,5358,7504,5726,7904,8245,8443,8272,7605,7839,5578,5218,7500,8329,5476,5464,7849,5391,5496,5285,5391,8727,5496,8131,7952,7768,5296,5381,5396,5671,7521,7994,7914,5442,8316,5651,8427,8391,7864,5296,7834,7504,7795,7696,5413,7681,7710,5666,7605,8582,7473,7478,7628,8909,5325,7972,8504,7588,7843,7864,8582,7864,5391,8805,7883,8073,7877,5337,7946,8255,8196,9045,8345,7628,7675,7558,5381,5274,7478,7478,5524,8022,7734,5296,5485,8851,5655,8851,8762,10969,8227,8839,5381,7990,5726,8827,7675,5342,7599,8190,7893,7710,7738,5476,8136,7482,7757,7920,8287,7877,8033,9032,5325,7706,8772,8863,7994,8170,8745,5407,5241,8115,7967,7542,5314,7779,5407,7605,8052,8099,7914,8473,8359,7858,7628,5459,7500,7858,5391,8190,5391,7643,5396,7864,7548,8539,5661,5564,7734,8805,5407,7706,7628,5209,8732,5237,5296,7482,8345,5274,7823,8033,8527,7605,7924,7834,7946,8190,5717,7495,7706,8711,7491,7994,0,8909,7967,5261,5407,8073,7692,5464,8154,5717,7616,5651,7675,5651,5237,8322,8033,9641,5377,7588,7898,7710,7542,5391,5419,8467,5661,5529,8272,5642,5642,0,7628,8473,8827,8745,5301,5305,5305,8987,5578,9476,5407,8115,5381,5401,7957,8179,7742,8721,5237,5314,5708,5708,5309,5578,5476,7982,5419,7800,8783,7653,5396,7877,7500,8833,7719,5237,5617,8217,5241,5386,5347,7864,7616,5291,5325,9021,5285,8073,5296,5622,7715,8982,7687,5391,8255,8772,8982,7734,8119,8190,5372,8772,8903,5342,7542,5325,5285,5485,5401,8417,5476,7663,8427,7920,5296,5396,5476,5476,5261,8261,8417,7542,8863,7738,7982,7576,8052,7491,7616,5407,7628,5407,5296,8762,5578,8190,8365,5666,7914,7994,7877,5301,5476,8411,5578,7834,7864,5442,7994,5442,5642,7877,8261,7482,5347,8033,8154,5578,7811,8365,5476,8903,8427,7834,8762,7834,5325,5693,5564,8504,5325,5381,8062,8272,8261,5261,7706,5480,8329,5476,8329,5325,8504,5419,5296,7742,8170,8387,8884,7500,7864,8099,7504,8022,8527,5617,7565,5291,5241,7786,7849,5255,8443,5391,7504,7687,7588,5655,8539,7967,7839,5726,7999,5358,5476,5476,8582,5666,5391,7795,5391,5381,5296,7558,7605,7914,5396,7883,8131,7768,7952,8329,5476,5241,5671,7521,5496,7643,7500,7576,5381,7478,7628,8255,7478,5524,5337,5296,8582,8851,8022,7663,8745,7605,7967,7495,8762,5314,7893,7990,7994,7628,8839,7914,7757,7877,7779,9032,5342,8772,7914,5476,8711,8033,5459,5464,5407,5661,9641,7491,7994,7864,7628,8190,7877,5209,8345,7834,8539,5651,7858,7500,5305,8903,7588,5661,5642,7898,5391,7542,5651,5309,7800,5578,5407,9476,8772,8982,8255,5347,7734,5291,7834,7542,8427,8033,7786,7542,8062,5367,5291,8062,7653,8062,5237,5237,8154,8239,7653,8762,7610,8307,5419,7706,5391,5301,8919,5509,5358,5320,5274,8527,7972,7972,7834,7834,5391,5573,5296,8068,5476,8068,5476,5407,7904,5730,7864,8140,7521,7811,5407,8800,5671,5726,5564,5237,8888,8052,7628,7605,7977,0,5237,8073,7752,7811,8888,8539,8548,5501,5476,7972,8222,7637,7653,7920,8136,7920,8131,8136,5337,7757,5305,7487,5519,7811,8653,7849,7849,5305,7653,7790,5301,8447,5358,7628,7977,8641,7790,7920,5237,7864,8170,5305,5573,8772,7811,5291,7930,5655,8467,5564,7774,7817,8594,5476,8560,5509,8751,8154,8010,8772,8721,8115,7972,7696,7605,5391,5358,5209,7696,7952,7858,8411,8349,5491,8119,8582,7893,8964,5476,7710,7790,7849,5358,5329,8504,5296,5671,7858,5209,7977,7500,9353,5285,5671,8205,5407,7616,8329,5655,5617,8888,8888,5688,5617,8732,8205,7576,8277,8365,7487,8033,8987,5606,8456,8845,5274,8349,5407,5342,5564,8772,8827,8827,7972,5325,8015,8762,5309,7504,8598,8267,8222,5437,8277,8527,8777,5325,8119,7663,5622,5564,8131,8159,8115,8239,7994,8126,8033,5622,7972,8863,8473,8267,8833,8772,8365,7834,10978,5261,5509,7616,5309,8811,5274,8205,5358,8762,5320,7834,9259,8190,7610,8811,8205,7930,5655,5564,8467,7817,7774,8115,5274,8126,8015,8154,7972,8010,8772,8751,8411,8131,7605,8721,7696,5407,5358,5391,8964,7952,8582,5491,7790,7893,7849,5476,9353,7972,7977,5296,5209,8504,5329,8205,5209,5671,5309,5688,5655,8277,5617,5564,8762,5325,5320,8987,7487,5358,7576,8845,8456,8772,8277,8527,8863,5564,5622,8159,8115,7834,8365,8267,8833,5261,7616,8811,7710,5237,5617,7823,7504,5237,7558,7849,7558,5578,5367,5629,7967,7967,5301,8484,5564,7999,8811,7706,5261,7723,8196,5274,7839,5291,8196,7710,7839,8033,8033,5459,8170,8783,8539,7719,8427,7605,8354,5726,5381,7710,5573,7795,5529,7839,7681,7888,8577,8811,5612,8140,5237,7999,7588,7482,8170,5407,5699,5629,8287,8402,5329,5726,8903,5688,5209,5377,5726,7710,5464,8255,8255,5642,5372,5377,10982,8365,8365,8443,8994,8222,7692,7990,5471,5291,5564,8033,5407,7565,7972,7730,8354,8994,7706,8354,8322,5480,7658,5476,8184,8093,5712,7904,5459,7692,5480,7977,7839,8174,5280,7605,8170,5459,7972,5564,7533,8170,5261,8073,8447,8170,5301,8682,5476,5325,5491,5274,5285,7888,7605,5622,7473,5635,7952,7723,8005,7521,7710,5329,5413,7972,7977,7681,7774,5407,7738,8255,5358,5274,7888,5325,8170,5325,5320,5647,5573,5485,8077,7675,8073,7710,8658,8711,5320,7843,5218,5285,5442,8255,0,7738,5407,7706,8494,7994,5314,5381,5325,8077,8170,8170,8099,5329,8873,8359,8170,7710,5509,7768,5377,8073,8443,7867,5325,8140,8548,7774,8548,5352,7972,5241,7828,7972,7628,7888,5471,8104,5218,7548,8539,5578,8033,8663,5407,7795,5629,8170,5564,8443,5459,7858,8110,7495,5377,5496,8422,5407,8479,8451,5237,7738,8322,8427,7828,5377,5325,8077,7800,7511,8427,5358,5248,5329,8239,8140,5629,5491,5612,5237,8005,5325,7972,8322,8539,7800,8548,5401,5413,5372,8184,5432,8422,8170,8919,8554,5274,8010,5573,7548,5471,8073,8140,8443,5291,5291,7516,7738,8255,8255,7734,8239,5471,8498,8582,5372,7738,5358,5358,7605,8170,5573,8255,5471,5442,5325,7478,8354,8184,7516,8577,7972,5301,7972,7877,5573,8676,7994,7610,7516,5274,7994,8857,5347,5661,8261,5661,8077,7738,5726,5381,8498,8857,5237,8005,5291,7516,7914,7817,7883,10986,5726,5377,7888,7888,7817,7843,10994,7632,7663,5325,8170,5629,11001,11009,7834,11013,8756,5726,7957,8149,5480,7972,11023,5377,8479,11023,7628,7935,8387,7952,7667,7994,8354,7528,8539,8190,8010,8504,5261,8322,5320,8068,8033,5491,7864,8136,7706,7790,8821,8504,8010,5622,8062,7982,5726,5464,8539,8093,5491,5708,5655,8010,8582,8608,5352,7930,7786,7692,8174,7605,8164,7864,7904,7576,7972,5642,7990,5476,8745,5391,5255,7696,7723,8170,5285,8687,5329,8509,7768,7576,5437,7972,5617,7779,7628,8670,5337,7811,8267,5578,5509,5564,8745,7576,8447,8093,5578,8427,7779,5209,5296,7599,8287,8039,8795,5476,5352,7542,8234,8184,7858,8721,7930,8099,5213,7779,8821,8110,5325,7924,5274,8732,8694,8658,5717,5337,8062,5320,5564,5209,5564,7752,8543,8427,7628,7752,8272,7786,5655,5367,8467,5329,8484,8783,5622,7994,7643,10830,7935,5285,8427,7734,8149,5501,8010,7516,5717,7719,8795,7811,5476,5325,8217,7696,7628,5274,8062,5666,8456,5301,5301,5329,7516,7935,8387,7952,8190,8539,8504,5622,7864,8099,8821,8062,7692,7990,5329,7576,7972,5476,7982,5655,5301,5642,8745,5391,5437,7972,5285,7576,7768,5329,5285,5578,5564,7779,8184,7930,5296,8234,7599,7779,8287,5209,8694,5564,5325,7752,5320,8456,8467,5367,8427,5329,7935,7516,7528,7786,7839,7839,7920,8397,8411,7986,7986,7839,5529,8282,8411,8411,8411,0,8411,8282,7839,8411,8411,8033,8613,8789,8789,5655,8287,8287,11031,8705,8411,8411,11039,11044,8239,8245,7877,8811,7542,7628,7628,7706,8329,5476,8039,8349,7706,8925,7599,7977,9032,7616,5377,8267,5309,8560,8387,8010,7478,8245,7864,7811,5578,11048,5309,8467,7994,5401,7710,7935,8447,7681,7972,5476,7592,7977,8033,5491,7696,8539,8312,7599,5459,8115,8272,5320,5480,11056,5485,5296,8316,5337,5651,5413,5325,8716,5476,5381,8863,8577,8329,8422,8022,7795,8272,8334,5730,8447,5471,7592,8190,8329,8201,5337,7675,5688,7853,7605,5688,8539,8539,5647,5485,5377,8307,5476,5578,8316,8687,7478,5391,7558,7588,5372,8658,8527,5401,5612,5296,5301,11061,8277,8543,8451,5291,5413,7811,5491,5329,5337,7576,5309,7628,8287,8005,7994,5377,5476,8245,8467,7632,5358,7495,7628,8903,5342,5471,5342,7834,8732,7710,5377,7628,5391,5655,5337,8267,7864,8411,8039,7864,5367,5464,5688,8479,8245,7994,11067,5476,8131,5642,5573,7511,8329,7738,8539,7710,8467,7588,7738,7542,0,5642,8721,8015,5708,5391,8604,7710,7588,5372,5578,5471,8539,7957,7710,8451,7516,9021,7648,7542,8190,7628,5476,7478,8239,8170,5391,5358,5642,8349,5661,5342,7811,8427,8909,7696,7622,8411,8411,7986,5301,8467,5642,7632,5301,5476,8805,8919,7706,5218,5261,7653,7628,8211,8613,7723,0,8582,5564,8756,8888,7565,5426,8136,7528,7914,7898,8467,8879,8154,5661,7898,7478,8170,5337,7653,7658,5688,5358,8456,5224,8670,7930,8631,8539,7967,7491,7516,8582,7952,7605,5391,7692,7628,5671,5717,5237,7972,5325,8329,5730,5426,5285,5237,7952,7495,7843,7977,7908,5241,11075,8267,7834,7558,8307,7710,5329,5564,7478,5564,5524,8052,7843,7977,8845,7667,7616,8073,5237,7930,5301,8267,8201,7675,7706,7920,8745,8039,7576,8772,7967,5255,8277,8447,8033,5337,8201,7521,7883,8170,5218,8255,5476,8762,7834,5712,8893,7930,7864,7628,5676,7628,8267,5730,8005,7521,7521,5448,5629,8391,5237,8613,5573,5209,7967,8170,7478,7521,8267,8052,5218,5358,5305,8783,5564,8239,7482,5622,7982,8255,7834,8015,8010,8255,7500,8919,5358,7528,5358,8554,7994,5241,7790,5261,7516,8312,7883,8249,7491,8340,7908,8527,8249,7967,8582,5401,8365,7877,5337,7706,7834,7877,8365,8093,7811,5237,8527,8365,5301,5337,5693,5688,5314,8456,7576,5476,8560,7628,8484,7834,8397,7790,7883,7500,7853,5655,5501,11082,0,8267,7658,5606,5280,5480,7571,7653,7786,7588,8312,5442,5437,8391,7632,8287,5442,5337,8316,7972,5274,5419,7558,7482,5647,7675,5712,7692,7940,5666,7734,8397,8370,7834,5237,8929,7994,5329,5347,7834,8272,8456,8427,8104,8772,8397,8287,8140,8653,5377,7746,8762,7982,7500,5426,8772,8484,8104,8267,8312,7990,5448,5651,7768,5442,8451,8267,5218,8608,7687,8370,5629,5419,8647,5261,7628,5301,5372,8149,5476,5419,7734,5285,5237,8772,8312,8789,5476,5301,8170,5285,5476,8929,8613,8329,5442,7834,8093,5651,5651,8929,7528,5285,5237,5651,7834,8411,7774,5391,5337,5391,5391,5301,7548,7491,7779,5301,5309,5309,5688,5476,7757,8479,7999,5693,8312,5688,7849,5564,5655,7972,7904,8375,5564,8716,7757,5261,8255,5568,8732,5209,8010,7628,5396,7653,7817,7706,8211,8915,8539,7487,5237,5476,7637,5391,5647,5237,8293,7542,11094,5407,7920,7491,7482,7904,7565,8033,5426,7853,8354,7834,5564,7920,8136,7528,7864,5291,5471,8131,7482,7542,8282,5325,8504,8783,8816,5501,8267,5325,8196,7920,8039,8062,7482,7667,5391,5476,7548,8077,5671,8915,8154,7658,8756,11104,11112,11119,0,7888,5209,8447,5647,5391,8438,7786,7864,8093,5224,5476,7786,7904,7920,7628,7487,7571,8201,5337,7828,7977,5655,5391,5391,8964,8170,8427,5391,5617,8190,7999,8222,7849,5358,8119,5573,5391,8073,7605,7710,8925,8312,8316,7542,5564,8533,7511,7542,5564,5301,8354,7920,7924,8282,8073,5381,8131,5320,5241,7715,8447,8077,8539,7977,7715,7883,7914,5476,7487,8582,9012,7930,5407,8316,5426,8062,5285,7710,7696,5717,5224,5391,8711,7977,7482,7667,7576,5248,8987,8316,5305,5666,5325,7628,7500,8504,9021,7972,7723,5261,7710,5274,5426,7768,7920,8964,7605,5730,8391,7473,5642,5274,5274,5519,7667,5496,8149,5325,7920,8613,5651,5476,7482,8687,8345,9562,5337,7719,8898,7542,8375,11126,7663,7757,7653,7872,5391,8946,5688,7675,5578,5519,8205,5377,8451,7795,7817,7548,8307,5329,5712,7843,5509,7528,8267,7930,7478,7588,7628,7877,8805,8073,7653,7977,8062,8498,8527,7877,5509,7972,7487,8190,7478,7478,7681,7817,5314,9471,5426,7542,7914,7653,9471,5661,5573,5241,8170,5274,8964,8211,7734,5301,7533,7872,7558,5524,5301,8594,11135,7605,11142,5647,7511,8397,8170,8227,8467,5407,8461,7757,8272,8227,8099,7473,8093,7930,5329,5301,8322,8057,5676,8438,8397,5352,8345,7920,5274,8387,8863,5666,9051,8571,7738,8653,8762,7994,7805,5209,5241,8543,8077,8827,8658,8772,5693,7706,7542,5314,7706,7628,5309,8721,7790,8227,5606,5237,5401,8272,7786,8451,5320,5655,5329,8249,5329,5396,8473,5205,8277,8800,11147,8201,8716,5509,8365,0,8407,5274,0,8154,7779,5933,8514,8509,7616,8539,5401,8039,5274,8762,5647,7994,8827,5564,8110,5325,7864,8110,8033,5377,7687,8062,7924,8676,5367,7628,8946,5426,5464,7994,5564,7999,5496,7867,5617,8062,8005,5606,8711,8255,8255,5730,7853,8827,7811,7491,8438,5407,8676,5407,8022,8005,7982,8190,7888,5381,5655,8411,7548,5209,5519,7491,5712,5213,7663,5381,0,5712,8287,7738,7898,5367,5401,5612,5237,7511,8443,7491,8417,5534,8164,5629,5612,8322,5717,8851,7805,8073,7533,7730,5358,8940,8461,5209,7478,7616,5464,7681,7511,8509,7962,8302,5448,5509,8052,5655,5655,7632,5476,8467,5629,8077,5237,7977,5717,7538,5676,11126,11104,5612,0,0,0,8136,5329,5612,7742,8443,8641,5519,8721,8190,5401,8087,5708,7800,5305,7632,5358,7994,7511,8539,8255,8827,5476,5329,5708,8946,8322,8402,7706,8227,5629,7800,5218,7877,8527,5485,8473,5320,7957,5519,8827,7610,8365,5407,8340,5301,5367,5237,8756,0,5413,0,0,5241,5329,5396,5325,7734,5296,8322,8312,7516,5612,5519,8589,7558,8005,7487,7511,5386,7946,8959,5342,8010,8255,7768,7924,5629,7521,5291,5285,9021,8239,5381,8149,8217,8772,8365,5325,5407,7548,8919,8387,7616,8217,5291,8417,5377,7734,8964,0,0,5693,5261,8509,5407,7893,5342,5329,5676,5296,7628,5401,5314,8255,7542,5314,5666,5358,8964,7811,7491,7888,8039,8903,8170,5501,8245,7930,5476,7663,8548,8329,7757,7605,7946,8863,5325,8077,7738,8154,8745,8888,7616,8005,8381,7500,5407,5407,11155,5496,7972,7763,5301,5666,5325,8857,8582,8987,5352,7986,8762,8851,8287,5329,8473,8222,5329,0,0,7610,7864,5329,7528,5407,5347,8073,5407,5642,7877,7521,5407,5237,8721,8777,8077,5666,8498,5726,7533,7632,8857,8375,5372,8077,5291,5629,5320,5347,5309,5688,5476,5655,5693,7999,8312,7757,5647,7542,8732,5261,5325,8010,7757,8514,8293,5237,7637,5426,5671,8201,7658,8170,5471,8783,7548,8136,7930,8461,8282,7542,5642,8196,7667,7478,8131,8756,7565,8154,7853,8816,7904,7628,7542,5241,7542,7786,8073,7864,8312,8201,7904,8354,8857,7715,7883,7977,7828,5337,8964,8073,8093,8851,7542,7511,8222,5391,8190,5617,5358,8447,7920,7888,5426,8898,8149,7696,8211,8387,5337,7558,7977,7491,5386,7972,8903,8316,5329,7757,7482,8509,7687,7605,5274,8805,9003,5274,5396,7768,8302,7977,5642,5717,8391,5241,7719,5285,5476,7877,5248,5629,8345,7482,8613,7605,8149,7734,7478,8277,8594,5401,8946,7675,5612,5301,5241,5573,8039,5524,8964,5519,8451,9471,8527,7528,7511,8509,5407,7872,5377,7843,7542,8170,9051,8322,7888,5606,8451,7790,8543,7706,8407,7473,8653,8227,5237,7786,7632,7805,8863,5309,7972,5274,5676,8154,8057,8772,5693,5407,8170,8249,5209,7867,7616,8509,8322,8022,7864,5496,8255,7548,8676,5448,5464,7811,7663,8762,5712,7957,5209,7893,5712,5305,5655,8073,5717,8077,5629,5676,8077,8136,7511,8052,7898,5476,7977,8888,8164,8473,5629,7800,7632,5413,5218,5485,8539,8443,8190,7511,8772,5291,8239,7734,5329,8217,5347,7734,8498,8322,8919,8154,5666,7811,5314,5401,5476,5496,8473,7986,7877,7521,8227,8227,8484,5708,5459,5726,8239,5651,8227,8514,7538,8969,5261,8154,8514,7834,5358,7653,5358,8589,8509,8756,5413,8062,8489,8589,5274,5274,5407,8438,5367,11161,8307,7687,5386,5407,5352,5367,8093,8411,5358,8489,7849,8721,5717,5717,8048,5573,8062,7952,8438,7719,8653,7843,5329,8805,8302,8170,8354,8354,7877,5726,7516,8227,8635,8653,7834,8438,7834,7864,7643,7628,7752,7521,8222,7521,5726,5642,7752,8282,8345,7994,7482,8144,5699,5347,8762,0,11167,5606,8077,7738,8509,7849,5699,8519,8721,8307,7706,8005,5629,8721,8647,8795,5237,5296,7877,7920,8903,7811,7738,7752,0,8514,8969,5261,7834,7653,5358,7752,8519,8489,7994,5274,8062,5407,8438,8756,8514,7687,5386,7952,7752,7738,8438,5329,8509,8048,5717,7849,8653,8484,8354,7843,8762,7628,7834,8227,5237,7752,7643,7834,7864,7877,5347,5726,5296,8144,5699,5717,5606,7738,5699,8795,7811,5564,5564,5333,8217,7528,7542,7478,5476,5480,7482,7924,8307,7864,8119,8282,8456,8946,8267,8821,7972,7920,5485,7565,7924,7482,5655,5381,7558,7864,5726,8539,8443,7571,7828,8093,9051,5480,5480,8312,5224,5564,5325,5329,7610,8033,5426,7888,7957,5305,5419,5320,7888,5274,7696,7952,7628,5671,5261,8329,8255,5426,8104,5564,5358,5367,5261,8119,7972,7734,8307,8467,5655,5573,5274,7853,8077,8267,5274,8456,8456,7967,5476,8217,8099,7482,8170,7805,8033,7920,8093,8272,5274,5329,8119,5274,7482,7605,7924,8705,5261,5248,7834,7482,7628,5377,7628,7610,7994,7994,8077,8217,5342,8345,5464,5381,5342,7482,5671,7834,5296,5717,8267,7478,7994,7663,5237,5629,5325,8015,8479,5708,5237,7482,11175,7648,7516,8422,8919,7920,5342,5342,7628,5274,5501,8447,7696,5325,8479,5237,7482,7864,5296,7610,5237,5301,5301,5301,8863,7849,7972,9032,7667,7823,5476,5699,7834,7542,8387,7504,8387,5325,7786,7811,7972,8131,7667,7839,5218,8170,5573,7982,8925,7632,7628,5442,7757,5342,7653,8154,8617,8721,5573,8925,5218,5325,5237,5442,7511,5301,7823,5529,8925,5325,5325,7628,7628,5529,8136,8998,7719,7715,7752,5501,8334,8267,5301,5301,8608,8093,5666,8345,8222,8249,7828,8903,5426,7478,7999,7500,5325,7715,8033,7675,7687,8805,5655,8456,5712,8093,7706,5442,8154,8302,7893,5534,8427,7482,8447,5442,8068,7516,0,7582,7616,7478,8093,8617,8093,5367,8062,7482,5305,8349,8267,7511,8249,8888,7478,7994,8154,7482,5237,5476,7663,7883,8919,5274,7628,5329,7610,5491,5325,8334,7924,7904,7920,7994,0,5237,5325,8349,8249,8249,5666,5301,8154,7663,7893,5491,7610,8093,7663,8527,5301,8222,0,7849,7849,5224,8745,8745,5485,5606,5655,5485,7558,8745,5485,5485,7828,5485,5606,7893,7893,8898,5651,7746,7746,8174,7491,5606,5296,7491,5717,5693,5496,5407,8504,8539,8762,7478,8504,8196,5309,5529,8816,7904,7482,7730,8136,5305,8443,8245,8282,5476,5642,8272,5480,7977,8062,8174,7924,11180,7482,7977,8272,5325,5661,8345,8345,8727,7817,11186,8245,8255,8805,5509,7616,5342,7681,8255,8190,8571,5413,7738,8015,5224,8144,8170,7986,5661,8110,7898,5476,5688,5688,7849,7675,7715,8170,5407,8411,8539,5407,11194,8272,8131,7738,7930,5661,7533,5717,5358,8144,5629,7957,7952,8255,5529,5325,5407,5419,7877,8245,8154,5274,7542,8762,8631,5407,5347,7994,8504,5564,5712,5676,5717,7994,8255,7653,8227,0,8110,8329,5617,7994,5717,7930,8272,8131,8267,0,5358,7994,5296,8762,8631,8762,7994,8504,5564,7653,7994,8267,8272,8484,8484,5274,5661,5274,5325,7482,8925,8267,8670,5333,5352,7946,7616,8267,5352,7482,7482,7521,7982,7491,5693,8745,7533,7521,8598,7653,7500,5476,7500,7786,8022,8005,7706,8560,7628,5509,8282,5471,8539,7883,7533,8312,8093,8604,5485,5666,7849,8255,7864,7864,7994,5381,7795,7914,5476,7914,8370,8217,7858,5255,5666,5564,7977,7853,5296,8005,7977,5333,5606,7558,7616,7667,7982,7653,5485,8422,8422,8751,8277,8422,5209,5209,7805,5693,8277,7834,7653,5407,7990,8402,7592,8196,5476,8159,5377,7864,7864,7834,7643,8795,5218,5622,7834,5274,7528,5476,8467,8940,7828,7828,5419,7692,5666,8239,8647,5476,7800,7706,7648,8052,8255,7786,8329,7542,5296,8427,8613,8062,8888,5274,8751,5329,5347,8077,7768,7491,5693,8745,7653,7521,8598,7500,7500,5509,7786,5471,7706,8282,8022,8560,7628,5329,8093,8312,5485,5381,7977,5606,7653,7616,7592,8370,5476,8751,5296,8422,7653,7616,7616,7990,5377,5218,7864,5622,7834,7864,8077,7800,7828,8940,8052,5666,8427,8613,8751,7768,8527,8473,11198,5564,5367,8427,8473,7730,7904,7658,8577,7795,8190,8670,8170,5358,7487,7994,7924,5642,5448,8509,5514,8010,7511,5237,8239,8647,8647,7511,8473,8473,8473,8239,0,7719,8527,8473,8527,7924,8427,8473,7730,8170,7487,5448,5642,7511,8647,8473,8473,7893,8010,7893,7893,5391,5391,8179,5325,5693,7487,5480,8427,7962,7521,8068,8504,7628,5676,7710,7482,8010,8010,7962,7482,7605,5476,8533,5358,5381,8272,7511,7839,8964,7487,8222,5391,7888,8370,8504,5606,5285,7977,5224,5642,8316,7839,7521,7924,7924,7696,7834,5333,5476,8179,8073,8028,7864,8201,7667,8307,7588,7628,5391,5642,7643,8039,5391,5407,8863,5224,5274,8201,7491,8772,7893,8015,7994,8721,7864,8699,7643,7982,5464,8964,5296,5407,5730,7849,5426,5501,7994,7786,5496,8022,5629,8073,5524,5237,8005,7511,5448,8272,7491,8267,5305,5629,7800,5358,7628,5519,5309,8589,8005,5622,5261,7710,7883,5476,5325,7648,7558,5218,8272,5296,7521,8427,5501,8370,8249,7834,5305,7877,7521,7632,8857,7700,5391,5693,5325,5480,5426,8068,5237,8504,7628,7710,8010,7482,5407,5391,8222,7487,7888,5476,7696,7648,7521,7977,5333,5285,5476,5224,8073,8307,7864,7628,8028,7667,8039,7643,5274,8721,5496,8005,7786,5448,7877,7491,5305,8073,7511,5519,5309,7800,8589,5622,7700,8087,8005,8039,7521,5501,7904,8447,5358,8073,5647,7653,7893,5407,8345,7663,5564,5274,7752,7528,5501,8888,5426,7538,7628,8527,7653,8539,5480,5480,7710,8329,7605,7972,8456,7528,8915,7730,7482,7752,7972,7883,5329,5337,8073,8222,7908,5480,5476,5726,0,5726,7592,8073,7999,8467,5352,7576,8170,7605,8293,8170,7920,8205,7977,8093,5655,7658,5564,7924,5391,5358,5480,5480,7487,7511,7538,7858,8272,7972,8164,8909,7582,8329,7768,7914,7592,7696,8370,8073,7605,7482,7632,11206,7883,5661,7511,8170,8022,8196,8211,7779,7653,5647,8190,8422,7667,7864,7558,7528,7663,5329,8863,7576,5617,7858,8653,8272,8277,8548,7990,7706,7706,7834,7893,5514,7834,5381,5642,7478,7858,8827,7858,7542,7893,5464,5407,8154,7628,5377,7768,5367,8548,8005,8110,7487,7795,5480,8005,5448,7542,7558,7972,7511,8334,8909,5237,8340,8762,7834,8467,7967,5514,5261,5301,7972,8617,5329,5237,7632,8015,5305,5708,8179,8647,8827,5726,5358,7972,7628,7500,5396,8073,7588,7675,7516,7877,5471,7511,5480,8833,8772,5285,8964,7834,7632,8427,5485,5476,7491,5480,8888,7967,7834,5352,5329,8909,7521,5325,8617,8170,5237,7811,5301,8473,7538,7628,5480,8329,7710,5726,8915,8073,5329,8205,5391,7487,5564,8170,7967,7605,5480,8293,7977,5476,7658,7675,7538,7482,5285,5396,7632,8370,7858,8473,5301,7558,7834,7528,8196,7667,7706,7706,7990,8863,8548,7588,8179,7858,7972,8005,5514,7478,5708,7511,7542,5261,8647,7632,8827,7877,7967,5325,8617,7521,7786,7994,7994,7994,7628,8811,5476,8608,7853,5661,8222,5358,7710,5377,8539,7692,8370,8255,8131,7473,5717,8422,8845,7858,5358,7990,9012,7628,5342,7957,8073,7675,8334,8073,7839,8739,8888,5305,8239,7957,5386,8762,8205,5342,5377,8888,8739,5329,5524,5524,8255,8898,8255,8149,5372,8473,8653,8653,5377,7565,5519,8297,7790,7883,8653,8888,8048,7681,5352,5209,8048,8297,5377,8174,7920,7790,5564,7696,8164,5726,7817,5519,7795,5325,5352,9361,8670,8302,5377,5301,8888,8548,5377,5352,8022,5241,7768,8548,8598,5407,7888,7982,7986,8484,7706,5401,5655,8888,5352,5305,8589,7800,7800,9026,7500,8577,7542,7542,8789,7811,7908,8381,8888,8577,8261,8307,8307,5386,8062,5237,8307,7653,8307,8307,8484,8227,7628,7628,5301,5676,5564,7542,7952,8548,7511,7952,8987,7628,7952,5712,5325,7706,5381,8005,7643,7904,8312,5712,7500,7834,7675,7779,7628,8422,7706,7599,7779,7994,5407,7710,8647,5358,5305,5325,7500,5442,7834,8427,7628,8479,8504,5325,11212,7817,5480,7757,5325,7500,7864,7864,8302,8560,8068,5305,5381,7914,8010,8467,5358,5329,7752,7982,5329,8925,7565,5501,8136,5337,11217,11225,7849,5367,5726,7653,7920,8093,5480,8073,5688,5255,5564,5358,5325,7994,8170,7757,8073,5337,8234,7920,8184,5426,7658,8222,5564,7687,8170,7786,11231,8316,11239,7738,5381,5320,5730,7521,7696,8345,7994,7681,7605,7482,8577,7632,8316,5476,7706,5224,7994,5285,8201,7952,5274,5717,5296,11243,11249,8898,11255,7888,7667,5377,7800,7675,8925,8302,8196,7628,8028,5578,8234,8845,7588,7478,5688,8805,5564,5509,5337,5301,8302,8302,5524,8249,11261,11267,11274,11285,5218,8447,5209,7706,8745,7521,8028,8104,5325,7757,7867,7786,7576,7582,8277,7893,8447,5476,8653,5329,8484,8227,5485,8099,8093,7967,5301,8249,8548,7972,8184,11295,11303,11310,5391,7914,7643,5391,5730,5426,7605,7986,8548,5325,8411,8411,7811,7768,8115,7994,7994,7628,8658,8110,5459,5464,5401,7834,5688,5688,5407,5358,7864,7924,5564,8159,5407,5367,5480,7982,11319,8893,11328,7752,5407,8052,7478,7565,5514,7511,7538,5391,5391,7482,11336,8909,7738,5642,5717,8164,7542,7706,7752,8893,11339,5401,5708,5333,5296,8438,5325,7681,5726,5407,5514,7800,5237,5688,8473,5325,5325,7605,5419,5688,5622,5218,8015,8756,5529,5432,5291,5726,7521,8073,5285,8329,7588,5464,9021,5261,5261,8010,5291,7516,8329,7622,5237,11349,11354,5501,8721,5730,7908,9353,5514,8427,8811,7952,8119,5301,8227,11339,11360,11368,8613,5325,7500,8154,11360,5301,8577,5301,7763,5655,7521,7864,5329,8721,5301,5274,7628,8479,5325,5337,8068,5329,7565,7904,7849,8073,5255,8184,5329,5337,7687,5564,8073,8222,5730,7920,7658,7952,5381,8898,7994,7696,8316,8115,5730,7908,5325,5285,5274,8925,7521,8329,8845,5301,5401,5407,5301,5391,7681,8234,8302,5509,7667,5325,5218,8745,8093,7706,7967,7893,8653,8227,7786,8447,8184,7757,5485,7914,5391,5209,5501,5459,5358,8227,5391,7752,7994,8159,7864,5688,5564,5464,7768,5419,8411,5514,5708,7706,7738,7482,5642,7565,5407,5333,8473,5529,5432,7800,8438,5218,7994,5237,5291,5261,7516,9021,8154,8811,5301,8427,8721,5453,5476,5564,5301,8484,8201,7834,5564,7757,5325,8527,11375,8811,5391,8527,8391,8222,8467,7972,7982,8196,5564,5501,7752,5407,5391,7628,8998,11380,5291,5291,7920,7811,5655,8222,7834,8387,5241,7565,8527,7752,7742,8594,7977,8293,8093,7763,5564,5480,7752,5301,8411,7972,5717,8467,5209,5726,7675,7972,7883,8170,5337,7786,7548,7628,7924,8164,8387,8964,7628,11386,7616,8863,7478,7696,7795,7663,7972,7834,7946,5285,5325,8577,7605,8504,5476,8062,7632,8613,8711,5717,8504,5285,5519,7576,7972,7632,11393,11402,9457,8375,7977,5325,7628,7811,5480,8201,7478,5676,7628,8073,8329,8329,5358,5237,7667,8170,5647,5485,5377,7864,7864,8582,7982,7478,7914,8277,11408,11417,5329,5255,5320,5564,8345,7999,5352,8196,7542,7930,7757,5329,5699,5407,8170,5647,8387,8467,7706,5301,7491,8863,8571,8863,8653,8119,7706,5485,5476,5476,5485,5209,8277,7582,7706,8104,8845,8170,5726,11425,11432,5325,5320,0,7710,7622,8110,8345,5688,5377,7982,5377,7864,5381,7790,5564,8104,5647,7834,7849,8467,7858,8267,5712,7982,7616,5471,7715,8170,11445,8777,7565,7786,7616,8467,8190,5534,5391,7849,5573,5629,5367,8893,7511,5325,7786,5407,9032,7653,5209,5209,8447,5642,7834,5325,5301,5606,5568,5377,5377,8447,5647,7538,5285,5524,5606,8033,5301,5622,5578,5708,5642,5476,7742,7710,7972,8239,8164,7957,5358,8617,5314,7628,7478,5291,7482,7768,7487,5285,5476,5396,5358,7616,7588,5464,8010,5285,8239,7834,8898,8484,5274,5274,5647,8349,9021,7628,7616,5329,8641,5274,5529,5476,7920,5320,7632,7706,5476,5325,5578,7628,8427,7491,7924,7920,8381,8154,5651,7616,8249,8539,8354,7628,5666,8222,5329,7849,7610,8617,8354,7616,8721,5726,5301,8375,5453,8484,5325,8467,8391,5391,7742,8387,8777,8222,5655,7786,7582,5329,8387,7675,7924,8093,7972,5726,8467,5529,5480,7487,7972,7696,7795,5519,7946,7576,5717,8375,8062,7478,8073,5301,5676,7982,7864,7628,5274,5377,7478,5699,7622,8345,8653,7930,5352,7542,8104,8845,8467,7588,7982,7849,7864,7786,5688,5209,5712,7715,8447,5642,5534,7511,5325,8447,5407,7849,5476,7616,5578,8239,8239,5285,8484,7628,5329,8190,8427,7616,8154,8249,8721,8617,5329,5309,8093,5407,5274,8451,5407,5407,7834,8451,5329,5337,8140,5325,8473,8140,8473,7605,7667,7605,5407,5651,5651,7478,7511,8033,7667,7516,8447,7706,5329,8484,7667,5485,5301,7521,7834,7977,5651,5301,5617,8015,7516,5485,5325,8093,7834,8140,8349,8349,5717,8783,5564,7746,7746,5564,8174,5726,5726,8711,5564,5274,8277,5726,7746,0,8527,5564,5726,7746,7538,5305,5305,5296,7538,7967,5362,5501,5464,5464,5309,8687,5606,7853,7946,5237,8582,7946,5320,5301,8184,7710,9372,9372,5476,7542,7883,5237,8641,5305,5305,7542,5491,5573,5337,7828,5476,5367,7834,5726,5712,7834,5485,7628,5301,8903,8762,8179,7482,8345,7834,8772,8345,5655,5491,8179,5476,5712,7883,7834,8762,5329,7972,5501,5564,5564,8438,8438,8467,8140,5726,8783,8631,7605,7972,8211,5708,5432,5480,5358,8467,8783,5480,5693,5651,8716,5693,5209,7786,7786,7582,5501,8272,8467,7920,8227,5524,7706,8467,5629,8909,5320,5320,5501,7893,7752,7904,7757,5480,8316,5726,8119,5391,5337,5391,8805,7478,8184,5485,7643,8170,7834,5647,5377,5237,5274,7834,5666,5358,5642,5688,7653,7478,7478,5730,5237,5717,7687,5519,5534,7687,8789,7700,7706,8533,5325,5209,5325,5325,7706,5325,7710,8033,8033,7849,8387,8800,5661,8174,8451,5391,5209,7710,8184,8170,7681,8093,8093,5573,7768,5661,8800,8077,8484,5642,8287,8653,7628,5573,5476,8447,8451,7967,5726,5362,5274,7742,7864,8494,5476,8287,7967,7828,5573,5309,8387,7710,8033,7849,8800,8170,8093,8174,7681,5209,8800,7628,8287,5726,8494,7610,8334,7817,8334,7834,7610,7610,7817,8795,7738,8093,7738,7610,7817,8795,7952,5688,5432,7952,8354,7805,7849,5291,5661,7628,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4269,11454,0,0,11459,0,0,0,11464,0,0,0,0,0,0,0,11469,11474,0,11479,0,11484,0,11490,0,0,0,0,11495,11500,0,0,0,0,0,0,11506,0,0,0,0,0,0,0,0,0,0,0,11512,0,0,0,0,0,0,0,11519,0,0,0,0,0,0,0,0,0,0,0,11525,5599,40,11528,11531,11535,335,11539,4899,11543,24,11546,11549,11553,11556,380,11560,52,11563,11566,32,3103,11569,11573,2795,11577,11581,11586,11591,11595,11600,11604,800,11608,11612,11617,11621,11626,11630,2809,11634,11638,2806,11642,11646,11650,2792,9676,11654,11659,11663,11667,11671,11674,11678,11683,11687,11691,3457,11695,11699,11704,11708,11713,11717,803,9686,11721,11726,11730,11735,11739,3471,11743,11747,11751,3454,11755,9690,11759,11763,11767,11771,11774,11778,11783,11787,11792,11797,11801,11806,11812,11818,11823,11829,11834,1073,11839,11844,11850,11855,11861,11866,11871,11875,11880,11885,11889,11894,11899,11904,11908,11913,11919,11924,11929,11934,11938,11943,11948,11953,11957,11962,11968,11973,11979,11984,11989,9710,11993,11998,12003,12007,12012,12017,12022,12026,12031,12037,12042,9721,12047,12051,12056,12062,12067,12072,12077,12081,12086,12092,12097,12103,12108,12113,12117,12122,12128,12133,12139,12144,12149,12153,12158,12163,12168,12172,12177,12183,9740,12188,12192,9745,12197,12202,12206,2599,12210,12214,12219,12223,12228,12232,845,12236,12240,12245,12250,12254,12259,12263,2613,12267,12271,2610,12275,12279,2596,12283,12287,12292,12296,12300,12304,12307,12311,12315,1733,12319,12323,12327,833,12331,12335,2759,12339,12343,12347,2738,12351,12355,12360,12364,12368,12372,12375,12379,12383,2819,12387,12391,12396,12401,12405,12410,12414,985,12418,12422,12426,2833,12430,12434,12438,12442,12446,2816,12450,12454,12459,12463,12467,10579,12471,12475,12480,12484,12488,3120,12492,12496,12501,12505,12510,12514,816,12518,12522,12527,618,12531,3194,12535,12539,432,12543,12547,12551,3183,12555,12559,12564,12568,12572,324,12576,12580,5230,12585,12590,12594,806,12598,12602,12607,12612,12616,12621,12625,2854,4993,12629,2851,12633,12637,12641,2840,12645,12649,12654,12658,12663,2496,12668,12673,12679,12684,12690,12695,942,12700,12705,12711,12716,12722,12727,2514,12732,12737,2510,12742,12747,12752,3201,12757,12762,12768,12773,12778,10015,12783,12788,12794,12799,12804,10060,12809,12814,12819,10264,12824,12829,10330,12834,12839,12844,10207,12849,12854,12860,12865,12870,12875,12879,12884,12890,12896,12901,12907,12912,3064,12917,12922,12928,12933,12938,12943,12948,12953,12957,12962,12967,12971,2950,12975,12979,12984,12988,12993,842,12997,13001,13006,13010,13015,13019,1400,13023,13027,2957,13031,13035,13039,2947,13043,13047,13052,13056,13061,13066,13070,13075,13081,13086,13092,13097,4878,13102,13107,13113,13118,13124,13129,13133,13138,13143,13147,13152,13157,13162,13166,13171,13177,13182,13187,13192,13196,13201,13207,13212,13216,2547,13220,13224,13229,13234,13238,13243,13247,839,13251,13255,13260,13265,13269,13274,13278,1426,13282,13286,2554,13290,13294,13298,2544,13302,13306,13311,13315,13319,4493,13323,13327,13332,13336,13340,3228,13344,13348,13353,13358,13362,13367,13371,915,13375,13379,13384,13389,13393,13398,13402,3242,13406,13410,13414,3239,13418,13422,13426,3225,13430,13434,13439,13443,13447,2998,13451,13455,13460,13464,13469,13473,836,13477,13481,13486,13490,13495,13499,1311,13503,13507,13511,3009,13515,13519,13523,2995,13527,13531,13536,13540,13545,3271,13550,13556,13561,13567,13572,1060,13577,13582,13588,13594,13599,13605,13610,3289,13615,13620,13625,3285,13630,13635,13640,3267,13645,13650,13656,13661,13667,13672,13677,13682,13686,13691,13697,13702,13708,13713,13718,13722,13727,13732,13736,13741,13746,13751,13755,13760,13766,13771,13776,13781,13785,13790,13796,13802,13807,13813,13818,13823,13827,13832,13838,13844,13849,13855,13860,13865,13869,13874,13879,13883,13888,13894,13899,13905,13910,922,13915,13920,13926,13932,13937,13942,4136,13947,13952,13957,13961,13966,13970,1623,13975,13979,429,13983,13987,13992,13997,14001,14006,14010,2537,14014,14018,1620,14022,14026,14030,852,14034,14038,14043,14047,14052,3114,14056,14060,482,14064,14068,14072,3126,14076,14080,14085,14089,14094,14098,1044,14102,14106,14111,14115,14120,14124,3137,14128,14132,464,14136,14140,14144,3123,14148,14152,14157,14161,14165,14169,14172,14176,14181,14185,14189,2864,14193,14197,14202,14207,14211,14216,14220,926,14224,14228,14233,14237,14242,14246,2878,14250,14254,2875,14258,14262,14266,2861,14270,14274,14279,14283,14287,5159,14291,14295,14300,14304,14309,6491,14314,14319,14325,14331,14336,14342,14347,6483,14352,14357,6509,14362,14367,6505,14372,14377,6487,14382,14387,14393,14398,14403,14408,14412,14417,9839,14423,14428,14433,14437,14442,14448,14453,14459,14464,14469,14473,14478,14484,14489,14494,14499,14504,14508,14513,14517,14522,14528,14533,14538,14543,14547,14552,14558,14563,14567,2678,14571,14575,14580,14584,14589,14593,826,14597,14601,14606,14610,14615,14619,1325,14623,14627,2689,14631,14635,14639,2675,14643,14647,14652,14656,14660,14664,14667,14671,14676,14680,14685,6435,14690,14695,14701,14706,14712,14717,992,14722,14727,14732,4430,14737,14742,6449,14747,14752,14757,6431,14762,9853,14767,14772,14776,14781,14787,14792,14797,1464,14802,14807,14813,14818,14824,14829,3166,14834,14839,14844,3162,14849,14854,9858,3144,14859,14864,14870,14875,14880,14885,14889,14894,14900,14905,14910,812,14915,14920,14926,14932,14937,14943,14948,3342,14953,14958,14963,3338,14968,14973,3320,14978,14983,14989,14994,14999,15004,15008,15013,15019,15024,822,15029,15035,15040,15045,10693,15050,15055,15060,15065,15069,15074,15079,15084,15088,15093,15099,15104,15109,15114,15118,15123,15129,15134,15139,4604,15144,15149,15154,15158,15163,15168,15173,15177,15182,15187,15192,15196,15201,15207,15212,15217,15222,15226,15231,15237,15242,15247,988,15252,15257,15263,15268,15274,15279,327,15284,15289,15294,2305,15299,15304,15309,2696,15314,15319,15325,15330,15335,15340,15344,15349,15355,15360,15364,819,15368,15372,15377,15381,15386,15390,2668,15394,15398,2665,15402,15406,15410,2651,15414,15418,15423,15427,15431,4486,15435,15439,15444,15448,15452,3208,15456,15460,15465,15470,15474,15479,15484,15489,15493,15498,15502,3222,15506,15510,15514,3205,15518,15522,15527,9873,15531,5156,15535,15539,15544,15548,15552,2712,15556,15560,15565,9877,15570,15575,15580,3499,15585,15590,15594,2726,15598,15602,15606,2709,15610,15614,15619,15623,15627,15631,15634,15638,15643,15647,15652,4282,15657,15662,15668,15674,15679,15685,15691,15696,15702,15707,4269,15712,15717,15722,4265,15727,15732,15738,15743,15748,15753,15757,15762,15767,10019,15772,15777,15783,15789,15794,15800,15806,15811,15816,10268,15821,15826,10211,9881,15831,15837,15842,15847,15852,15856,15861,15867,15872,15877,2968,15882,15887,15893,15899,15904,15910,9886,15916,15922,15927,2986,15932,15937,15942,2964,15947,15952,15956,2891,15960,15964,15969,9900,15974,15979,15984,15988,15992,2905,15996,16000,16004,16008,16011,16015,16020,16024,16028,347,16032,16036,16041,16046,16050,16055,16060,16065,16069,16074,16078,1318,16082,16086,16090,670,16094,16098,16103,16107,16111,1124,16115,16119,16124,0,0,0,15590,2547,13443,15882,14291,6435,13615,13418,2599,13771,13332,11894,12267,1318,11747,13844,12590,915,0,0,12768,11659,13594,15932,2840,11563,15717,14120,11763,12149,16024,12459,15340,12438,1044,3222,0,15753,13398,15674,3114,3183,15325,13234,5159,14237,14193,13869,15242,0,15284,3338,14319,0,3009,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2995,16128,0,0,16132,0,0,0,16136,0,0,0,0,0,0,0,16140,16144,0,16148,0,16152,0,0,0,0,0,0,16157,0,0,0,16162,0,0,0,16168,0,0,0,0,0,0,0,0,0,0,0,0,16174,0,0,0,0,0,0,3030,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16181,0,0,0,0,0,0,3016,16187,0,0,16192,0,0,0,16197,0,0,0,0,0,0,0,16202,16207,0,16212,0,16217,0,0,0,0,0,0,6584,0,0,0,16223,0,0,0,16228,0,0,0,0,0,0,0,16233,0,0,0,0,0,0,0,0,0,0,0,16238,16242,0,0,16247,0,0,0,16252,0,0,0,0,0,0,0,16257,16262,0,0,0,16267,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2998,16273,0,0,16277,0,0,0,16281,0,0,0,0,0,0,0,16285,16289,0,16293,0,16297,0,0,0,0,0,0,806,16302,0,0,1655,0,0,0,16306,16310,0,0,0,0,0,0,16315,16319,0,16323,16327,16332,0,0,0,0,0,0,16337,16341,0,0,16346,0,0,0,16351,0,0,0,0,0,0,0,16356,16361,0,16366,16371,16377,0,0,0,0,0,0,4073,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16383,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16389,16393,0,0,16398,0,0,0,16403,0,16408,0,0,0,0,0,16414,16419,0,16424,16429,16435,0,0,0,0,0,0,2851,16441,0,0,16445,0,0,0,16449,0,0,0,0,0,0,0,16453,16457,0,16461,0,16465,0,0,0,0,0,0,16470,0,0,0,16475,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16481,0,0,0,0,0,0,0,4061,0,0,0,16488,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2854,16493,0,0,16497,0,0,0,16501,0,0,0,0,0,0,0,16505,16509,0,16513,0,16517,0,0,0,0,4993,0,2857,0,0,0,16522,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16527,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16532,0,0,0,16536,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16541,0,16546,0,0,0,0,0,0,4069,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2840,16552,0,0,16556,0,0,0,16560,0,0,0,0,0,0,0,10627,16564,0,16568,0,16572,0,0,0,0,0,0,16577,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16582,0,0,0,0,0,0,0,3613,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3617,16589,0,0,16594,0,0,0,16599,0,0,0,0,0,0,0,16604,16609,0,0,0,16614,0,0,0,0,0,0,16620,0,0,0,16624,0,0,0,16629,0,0,0,0,0,0,0,16634,0,0,0,0,16639,0,0,0,0,0,0,16645,16649,0,0,16654,0,0,16659,16664,0,16669,0,0,0,0,0,16675,16680,0,16685,0,0,0,0,0,0,0,0,4065,0,0,0,16690,0,0,0,16695,0,0,0,0,0,0,0,16700,16705,0,0,0,0,0,0,0,0,0,0,324,16710,0,0,16714,0,0,0,16718,0,0,0,0,0,0,0,16722,16726,0,16730,0,16734,0,0,0,0,0,0,803,16739,16743,0,16748,0,0,0,16752,0,16756,0,0,0,0,0,16761,16765,0,16769,16773,16778,0,0,0,11713,0,0,16783,16787,0,0,16792,0,0,0,16797,0,0,0,0,0,0,0,16802,16807,0,16812,16817,16823,0,0,0,0,0,0,16829,16833,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16838,16842,0,0,16847,0,0,0,16852,0,0,0,0,0,0,0,16857,16862,0,16867,16872,16878,0,0,0,0,0,0,3468,16884,0,0,16888,0,0,0,16892,0,0,0,0,0,0,0,16896,16900,0,16904,0,16908,0,0,0,0,0,0,16913,0,0,0,16918,0,0,0,16924,0,0,0,0,0,0,0,16930,16936,0,0,16942,16949,0,0,0,0,0,0,16956,0,0,0,0,0,0,0,16960,0,0,0,0,0,0,0,0,16965,0,16970,0,0,0,0,0,0,0,0,3471,16975,0,0,16979,0,0,0,16983,0,0,0,0,0,0,0,16987,16991,0,16995,0,16999,0,0,0,0,0,0,2779,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17004,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17010,0,0,0,17014,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17019,0,0,0,17023,0,0,0,17028,0,0,0,0,0,0,0,0,17033,0,17038,0,0,0,0,0,0,0,0,3454,17043,0,0,17047,0,0,17051,17055,0,17059,0,0,0,0,0,17064,17068,0,17072,0,17076,0,0,0,0,0,0,17081,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17086,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3577,0,0,0,17093,0,0,0,17098,0,0,0,0,0,0,0,17103,0,0,17108,0,0,0,0,0,0,0,0,17113,0,0,0,17117,0,0,0,17122,0,0,0,0,0,0,0,17127,0,0,17132,0,17137,0,0,0,0,0,0,17143,0,0,0,17147,0,0,0,17152,0,0,0,0,0,0,0,17157,17162,0,17167,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3457,17172,0,0,17176,0,0,0,17180,0,0,0,0,0,0,0,17184,17188,0,17192,0,17196,0,0,0,0,0,0,429,17201,0,0,10033,0,0,0,17205,0,0,0,0,17209,0,0,17214,17218,0,17222,0,17226,0,0,0,0,0,0,1616,17231,0,0,17236,0,0,0,17241,0,0,0,0,0,0,0,17246,17251,0,17256,17261,17267,0,0,0,0,0,0,17273,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17277,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17283,17287,0,0,17292,0,0,0,17297,0,17302,0,0,0,0,0,17308,17313,0,17318,0,17323,0,0,0,0,0,0,1620,17329,0,0,10282,0,0,0,17333,0,0,0,0,0,0,0,17337,17341,0,17345,0,10541,0,0,0,0,0,0,17349,17354,0,0,17360,0,0,0,17366,0,0,0,0,0,0,0,17372,17378,0,17384,17390,17397,0,0,0,0,0,0,17404,0,0,0,17408,0,0,0,17413,0,0,0,0,0,0,0,0,17418,0,0,0,0,0,0,0,0,0,0,2537,17423,0,0,9220,0,0,0,17427,0,0,0,0,17431,0,0,17436,17440,0,17444,0,17448,0,0,0,14006,0,0,4211,17453,0,0,17458,0,0,0,17463,0,0,0,0,0,0,0,0,0,0,17468,0,17473,0,0,0,0,0,0,17479,17484,0,0,17490,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17496,0,17502,0,0,0,0,0,0,1631,17509,0,0,17514,0,0,0,17519,0,0,0,0,0,0,0,0,17524,0,17529,0,17534,0,0,0,0,0,0,17540,0,0,0,17544,0,0,0,17549,0,0,0,0,0,0,0,0,17554,0,17559,0,0,0,0,0,0,0,0,2523,17564,0,0,10169,0,0,0,17568,0,0,0,0,17572,0,0,17577,0,0,17581,0,17585,0,0,0,0,0,0,17590,0,0,0,17595,0,0,0,17601,0,0,0,0,0,0,0,17607,0,0,0,0,17613,0,0,0,0,0,0,4198,17620,0,0,915,17625,17629,17634,1639,17639,17644,17649,17653,17657,17662,17667,17672,17677,17682,17687,17692,17696,17700,17705,17709,17714,17719,17723,17727,13367,13375,17731,17735,17739,17744,17750,17756,17761,17767,17773,17778,17783,17789,17795,17801,17807,17813,17819,17825,17830,17835,17841,17846,17852,17858,17863,17868,17873,17878,17883,6640,17888,17893,17899,17905,17910,17916,17922,17927,17932,17938,17944,17950,17956,17962,17968,17974,17979,17984,17990,17995,18001,18007,18012,18017,18022,18027,18032,18037,18042,18048,18055,18062,18068,18075,18082,18088,18094,18101,18108,18115,18122,18129,18136,18143,18149,18155,18162,18168,18175,18182,18188,18194,18200,18206,18212,1140,18218,18223,18229,18235,18240,18246,18252,18257,18262,18268,18274,18280,18286,18292,18298,18304,18309,18314,18320,18325,18331,18337,18342,18347,18352,18357,18362,3239,18367,18371,18376,10345,18381,18386,18391,18395,18399,18404,18409,18414,18419,18424,18429,18434,18438,18442,18447,18451,18456,18461,18465,18469,13410,13418,18473,18477,18482,18488,18495,18502,18508,18515,18522,18528,18534,18541,18548,18555,18562,18569,18576,18583,18589,18595,18602,18608,18615,18622,18628,18634,18640,18646,18652,6662,18658,18663,18669,18675,18680,18686,18692,18697,18702,18708,18714,18720,18726,18732,18738,18744,18749,18754,18760,18765,18771,18777,18782,18787,18792,18797,18802,3242,18807,18811,18816,18821,18825,18830,18835,18839,18843,18848,18853,18858,18863,18868,18873,18878,18882,18886,18891,18895,18900,18905,18909,18913,13398,13406,18917,3245,18921,18926,18932,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,6,24,54,24,54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18938,18943,18949,18955,18960,18965,18971,18977,18983,18989,18995,19001,19007,19012,19017,19023,19028,19034,19040,19045,19050,19055,19060,19065,19070,19075,19081,19088,19095,19101,19108,19115,19121,19127,19134,19141,19148,19155,19162,19169,19176,19182,19188,19195,19201,19208,19215,19221,19227,19233,19239,19245,19251,19255,19260,19266,19272,19277,19283,19289,19294,19299,19305,19311,19317,19323,19329,19335,19341,19346,19351,19357,19362,19368,19374,19379,19384,19389,19394,19399,6666,19404,19409,19415,10825,19421,19427,19433,19438,19443,19449,19455,19461,19467,19473,19479,19485,19490,19495,19501,19506,19512,19518,19523,19528,19533,19538,19543,3225,19548,19552,19557,19562,19566,19571,19576,19580,19584,19589,19594,19599,19604,19609,19614,19619,19623,19627,19632,19636,19641,19646,19650,19654,13422,13430,19658,19662,19667,19673,19680,19687,19693,19700,19707,19713,19719,19726,19733,19740,19747,19754,19761,19768,19774,19780,19787,19793,19800,19807,19813,19819,19825,19831,19837,3263,19843,19848,19854,19860,19865,19871,19877,19882,19887,19893,19899,19905,19911,19917,19923,19929,19934,19939,19945,19950,19956,19962,19967,19972,19977,19982,19987,3249,19992,19997,20003,20009,20014,20020,20026,20031,20036,20042,20048,20054,20060,20066,20072,20078,20083,20088,20094,20099,20105,20111,20116,20121,20126,20131,20136,6644,20141,20146,20152,20158,20163,20169,20175,20180,20185,20191,20197,20203,20209,20215,20221,20227,20232,20237,20243,20248,20254,20260,20265,20270,20275,20280,20285,20290,20294,20299,20305,20311,20316,20322,20328,20333,20338,20344,20350,20356,20362,20368,20374,20380,20385,20390,20396,7478,8154,20401,20406,20411,20416,20421,20426,6648,20431,20436,20442,20448,20453,20459,20465,20470,20475,20481,20487,20493,20499,20505,20511,20517,20522,20527,20533,20538,20544,20550,20555,20560,20565,20570,20575,3228,20580,20584,20589,20594,20598,20603,20608,20612,20616,20621,20626,20631,20636,20641,20646,20651,20655,20659,20664,20668,20673,20678,20682,20686,13336,13344,20690,1060,20694,20699,20705,20711,20716,20722,20728,20733,20738,20744,20750,20756,20762,20768,20774,20780,20785,20790,20796,20801,20807,20813,20818,20823,13567,13577,20828,20833,20838,20844,20851,20858,20864,20871,20878,20884,20890,20897,20904,20911,20918,20925,20932,20939,20945,20951,20958,20964,20971,20978,20984,20990,20996,21002,21008,21014,21019,21025,21032,21039,21045,21052,21059,21065,21071,21078,21085,21092,21099,21106,21113,21120,21126,21132,21139,21145,21152,21159,21165,21171,21177,21183,21189,21195,21201,21208,21216,21224,21231,21239,21247,21254,21261,21269,21277,21285,21293,21301,21309,21317,21324,21331,21339,21346,21354,21362,21369,21376,21383,21390,21397,21404,21409,21415,21422,21429,21435,21442,21449,21455,21461,21468,21475,21482,21489,21496,21503,21510,21516,21522,21529,21535,21542,21549,21555,21561,21567,21573,21579,3285,21585,21590,21596,21602,21607,21613,21619,21624,21629,21635,21641,21647,21653,21659,21665,21671,21676,21681,21687,21692,21698,21704,21709,21714,13620,13630,21719,21724,21730,21737,21745,21753,21760,21768,21776,21783,21790,21798,21806,21814,21822,21830,21838,21846,21853,21860,21868,21875,21883,21891,21898,21905,21912,21919,21926,21933,21938,21944,21951,21958,21964,21971,21978,21984,21990,21997,22004,22011,22018,22025,22032,22039,22045,22051,22058,22064,22071,22078,22084,22090,22096,22102,22108,3289,22114,22119,22125,22131,22136,22142,22148,22153,22158,22164,22170,22176,22182,22188,22194,22200,22205,22210,22216,22221,22227,22233,22238,22243,13605,13615,22248,6410,22253,22259,22266,22273,22279,22286,22293,22299,22305,22312,22319,22326,22333,22340,22347,22354,22360,22366,22373,22379,22386,22393,22399,22405,22411,22417,22423,22429,22435,22442,22450,22458,22465,22473,22481,22488,22495,22503,22511,22519,22527,22535,22543,22551,22558,22565,22573,22580,22588,22596,22603,22610,22617,22624,22631,22638,22643,22649,22656,22663,22669,22676,22683,22689,22695,22702,22709,22716,22723,22730,22737,22744,22750,22756,22763,22769,22776,22783,22789,22795,22801,22807,22813,22819,22824,22830,22837,22844,22850,22857,22864,22870,22876,22883,22890,22897,22904,22911,22918,22925,22931,22937,22944,22950,22957,22964,22970,22976,22982,22988,22994,3267,23000,23005,23011,23017,23022,23028,23034,23039,23044,23050,23056,23062,23068,23074,23080,23086,23091,23096,23102,23107,23113,23119,23124,23129,13635,13645,23134,23139,23145,23152,23160,23168,23175,23183,23191,23198,23205,23213,23221,23229,23237,23245,23253,23261,23268,23275,23283,23290,23298,23306,23313,23320,23327,23334,23341,6426,23348,23354,23361,23368,23374,23381,23388,23394,23400,23407,23414,23421,23428,23435,23442,23449,23455,23461,23468,23474,23481,23488,23494,23500,23506,23512,23518,6415,23524,23530,23537,23544,23550,23557,23564,23570,23576,23583,23590,23597,23604,23611,23618,23625,23631,23637,23644,23650,23657,23664,23670,23676,23682,23688,23694,23700,23705,23711,23718,23725,23731,23738,23745,23751,23757,23764,23771,23778,23785,23792,23799,23806,23812,23818,23825,23831,23838,23845,23851,23857,23863,23869,23875,23881,23886,23892,23899,23906,23912,23919,23926,23932,23938,23945,23952,23959,23966,23973,23980,23987,23993,23999,24006,24012,24019,24026,24032,24038,24044,24050,24056,24062,24067,24073,24080,24087,24093,24100,24107,24113,24119,24126,24133,24140,24147,24154,24161,24168,24174,24180,24187,24193,24200,24207,24213,24219,24225,24231,24237,3271,24243,24248,24254,24260,24265,24271,24277,24282,24287,24293,24299,24305,24311,24317,24323,24329,24334,24339,24345,24350,24356,24362,24367,24372,13540,24377,24382,842,24387,24391,24396,24401,24405,24410,24415,24419,24423,24428,24433,24438,24443,24448,24453,24458,24462,24466,24471,24475,24480,24485,24489,24493,24497,12997,3488,24501,24505,24510,24516,24522,24527,24533,24539,24544,24549,24555,24561,24567,24573,24579,24585,24591,24596,24601,24607,24612,24618,24624,24629,24634,24639,24644,24649,848,24654,24659,24665,10893,24671,24677,24683,24688,24693,24699,24705,24711,24717,24723,24729,24735,24740,24745,24751,24756,24762,24768,24773,24778,24783,24788,24793,24798,24803,24809,24816,24823,24829,24836,24843,24849,24855,24862,24869,24876,24883,24890,24897,24904,24910,24916,24923,24929,24936,24943,24949,24955,24961,24967,24973,24979,24983,24988,24994,25000,25005,25011,25017,25022,25027,25033,25039,25045,25051,25057,25063,25069,25074,25079,25085,25090,25096,25102,25107,25112,25117,25122,25127,2957,25132,25136,25141,25146,25150,25155,25160,25164,25168,25173,25178,25183,25188,25193,25198,25203,25207,25211,25216,25220,25225,25230,25234,25238,25242,13031,25246,25250,25255,25261,25268,25275,25281,25288,25295,25301,25307,25314,25321,25328,25335,25342,25349,25356,25362,25368,25375,25381,25388,25395,25401,25407,25413,25419,25425,2982,25431,25436,25442,25448,25453,25459,25465,25470,25475,25481,25487,25493,25499,25505,25511,25517,25522,25527,25533,25538,25544,25550,25555,25560,25565,25570,25575,1400,25580,25584,25589,25594,25598,25603,25608,25612,25616,25621,25626,25631,25636,25641,25646,25651,25655,25659,25664,25668,25673,25678,25682,25686,13015,13023,25690,2960,25694,25699,25705,25711,25716,25722,25728,25733,25738,25744,25750,25756,25762,25768,25774,25780,25785,25790,25796,25801,25807,25813,25818,25823,25828,25833,25838,25843,25848,25854,25861,25868,25874,25881,25888,25894,25900,25907,25914,25921,25928,25935,25942,25949,25955,25961,25968,25974,25981,25988,25994,26000,26006,26012,26018,26024,26028,26033,26039,26045,26050,26056,26062,26067,26072,26078,26084,26090,26096,26102,26108,26114,26119,26124,26130,26135,26141,26147,26152,26157,26162,26167,26172,2986,26177,26182,26188,26194,26199,26205,26211,26216,26221,26227,26233,26239,26245,26251,26257,26263,26268,26273,26279,26284,26290,26296,26301,26306,15922,15932,26311,2947,26316,26320,26325,26330,26334,26339,26344,26348,26352,26357,26362,26367,26372,26377,26382,10623,26387,26391,26396,26400,26405,26410,26414,26418,13035,13043,26422,26426,26431,26437,26444,26451,26457,26464,26471,26477,26483,26490,26497,26504,26511,26518,26525,26532,26538,26544,26551,26557,26564,26571,26577,26583,26589,26595,26601,3791,26607,26612,26618,26624,26629,26635,26641,26646,26651,26657,26663,26669,26675,26681,26687,26693,26698,26703,26709,26714,26720,26726,26731,26736,26741,26746,26751,26756,26760,26765,26771,26777,26782,26788,26794,26799,26804,26810,26816,26822,26828,26834,26840,26846,26851,26856,26862,26867,26873,26879,26884,26889,26894,26899,26904,2964,26909,26914,26920,26926,26931,26937,26943,26948,26953,26959,26965,26971,26977,26983,26989,26995,27000,27005,27011,27016,27022,27028,27033,27038,15937,15947,27043,27048,27052,27057,27063,27069,27074,27080,27086,27091,27096,27102,27108,27114,27120,27126,27132,27138,27143,27148,27154,27159,27165,27171,27176,27181,27186,27191,27196,2968,27201,27206,27212,27218,27223,27229,27235,27240,27245,27251,27257,27263,27269,27275,27281,27287,27292,27297,27303,27308,27314,27320,27325,27330,15872,15882,27335,2950,27340,27344,27349,27354,27358,27363,27368,27372,27376,27381,27386,27391,27396,27401,27406,27411,27415,27419,27424,27428,27433,27438,27442,27446,12967,12975,27450,816,27454,27458,27463,27468,27472,27477,27482,681,27486,27491,27496,27501,27506,27511,27516,27521,10919,27525,27530,27534,27539,27544,27548,27552,12510,12518,27556,27560,27564,27569,27575,27581,27586,27592,27598,27603,27608,27614,27620,27626,27632,27638,27644,27650,27655,27660,27666,27671,27677,27683,27688,27693,27698,27703,27708,27713,27717,27722,27728,27734,27739,27745,27751,27756,27761,27767,27773,27779,27785,27791,27797,27803,27808,27813,27819,27824,27830,27836,27841,27846,27851,27856,27861,27866,27871,27877,27884,27891,27897,27904,27911,27917,27923,27930,27937,27944,27951,27958,27965,27972,27978,27984,27991,27997,28004,28011,28017,28023,28029,28035,28041,28047,28051,28056,28062,28068,28073,28079,28085,28090,28095,28101,28107,28113,28119,28125,28131,28137,28142,28147,28153,28158,28164,28170,28175,28180,28185,28190,28195,432,82,28200,28205,28210,28214,28219,28224,28228,28232,28237,28242,28247,28252,28257,28262,28267,28271,28275,28280,28284,28289,28294,28298,28302,28306,12543,28310,28314,28319,28325,28332,28339,28345,28352,28359,28365,28371,28378,28385,28392,28399,28406,28413,28420,28426,28432,28439,28445,28452,28459,28465,28471,28477,28483,28489,28495,28499,28504,28510,28516,28521,28527,28533,28538,28543,28549,28555,28561,28567,28573,28579,28585,28590,28595,28601,28606,28612,28618,28623,28628,28633,28638,28643,3194,28648,28652,28657,1643,28662,28667,28672,28676,28680,28685,28690,28695,28700,28705,28710,28715,28719,28723,28728,28732,28737,28742,28746,28750,618,12535,28754,3197,28758,28763,28769,28775,28780,28786,28792,28797,28802,28808,28814,28820,28826,28832,28838,28844,28849,28854,28860,28865,28871,28877,28882,28887,28892,28897,28902,28907,28912,28918,28925}
--- /dev/null
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper script for development to run nominatim from the source directory.
+"""
+from pathlib import Path
+import sys
+
+sys.path.insert(1, str((Path(__file__) / '..' / 'src').resolve()))
+
+from nominatim_db import cli
+
+exit(cli.nominatim(module_dir=None, osm2pgsql_path=None))
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Subcommand definitions for the command-line tool.
-"""
-# mypy and pylint disagree about the style of explicit exports,
-# see https://github.com/PyCQA/pylint/issues/6006.
-# pylint: disable=useless-import-alias
-
-from nominatim.clicmd.setup import SetupAll as SetupAll
-from nominatim.clicmd.replication import UpdateReplication as UpdateReplication
-from nominatim.clicmd.api import (APISearch as APISearch,
- APIReverse as APIReverse,
- APILookup as APILookup,
- APIDetails as APIDetails,
- APIStatus as APIStatus)
-from nominatim.clicmd.index import UpdateIndex as UpdateIndex
-from nominatim.clicmd.refresh import UpdateRefresh as UpdateRefresh
-from nominatim.clicmd.add_data import UpdateAddData as UpdateAddData
-from nominatim.clicmd.admin import AdminFuncs as AdminFuncs
-from nominatim.clicmd.freeze import SetupFreeze as SetupFreeze
-from nominatim.clicmd.special_phrases import ImportSpecialPhrases as ImportSpecialPhrases
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Subcommand definitions for API calls from the command line.
-"""
-from typing import Mapping, Dict
-import argparse
-import logging
-
-from nominatim.tools.exec_utils import run_api_script
-from nominatim.errors import UsageError
-from nominatim.clicmd.args import NominatimArgs
-
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-
-LOG = logging.getLogger()
-
-STRUCTURED_QUERY = (
- ('street', 'housenumber and street'),
- ('city', 'city, town or village'),
- ('county', 'county'),
- ('state', 'state'),
- ('country', 'country'),
- ('postalcode', 'postcode')
-)
-
-EXTRADATA_PARAMS = (
- ('addressdetails', 'Include a breakdown of the address into elements'),
- ('extratags', ("Include additional information if available "
- "(e.g. wikipedia link, opening hours)")),
- ('namedetails', 'Include a list of alternative names')
-)
-
-DETAILS_SWITCHES = (
- ('addressdetails', 'Include a breakdown of the address into elements'),
- ('keywords', 'Include a list of name keywords and address keywords'),
- ('linkedplaces', 'Include a details of places that are linked with this one'),
- ('hierarchy', 'Include details of places lower in the address hierarchy'),
- ('group_hierarchy', 'Group the places by type'),
- ('polygon_geojson', 'Include geometry of result')
-)
-
-def _add_api_output_arguments(parser: argparse.ArgumentParser) -> None:
- group = parser.add_argument_group('Output arguments')
- group.add_argument('--format', default='jsonv2',
- choices=['xml', 'json', 'jsonv2', 'geojson', 'geocodejson'],
- help='Format of result')
- for name, desc in EXTRADATA_PARAMS:
- group.add_argument('--' + name, action='store_true', help=desc)
-
- group.add_argument('--lang', '--accept-language', metavar='LANGS',
- help='Preferred language order for presenting search results')
- group.add_argument('--polygon-output',
- choices=['geojson', 'kml', 'svg', 'text'],
- help='Output geometry of results as a GeoJSON, KML, SVG or WKT')
- group.add_argument('--polygon-threshold', type=float, metavar='TOLERANCE',
- help=("Simplify output geometry."
- "Parameter is difference tolerance in degrees."))
-
-
-def _run_api(endpoint: str, args: NominatimArgs, params: Mapping[str, object]) -> int:
- script_file = args.project_dir / 'website' / (endpoint + '.php')
-
- if not script_file.exists():
- LOG.error("Cannot find API script file.\n\n"
- "Make sure to run 'nominatim' from the project directory \n"
- "or use the option --project-dir.")
- raise UsageError("API script not found.")
-
- return run_api_script(endpoint, args.project_dir,
- phpcgi_bin=args.phpcgi_path, params=params)
-
-class APISearch:
- """\
- Execute a search query.
-
- This command works exactly the same as if calling the /search endpoint on
- the web API. See the online documentation for more details on the
- various parameters:
- https://nominatim.org/release-docs/latest/api/Search/
- """
-
- def add_args(self, parser: argparse.ArgumentParser) -> None:
- group = parser.add_argument_group('Query arguments')
- group.add_argument('--query',
- help='Free-form query string')
- for name, desc in STRUCTURED_QUERY:
- group.add_argument('--' + name, help='Structured query: ' + desc)
-
- _add_api_output_arguments(parser)
-
- group = parser.add_argument_group('Result limitation')
- group.add_argument('--countrycodes', metavar='CC,..',
- help='Limit search results to one or more countries')
- group.add_argument('--exclude_place_ids', metavar='ID,..',
- help='List of search object to be excluded')
- group.add_argument('--limit', type=int,
- help='Limit the number of returned results')
- group.add_argument('--viewbox', metavar='X1,Y1,X2,Y2',
- help='Preferred area to find search results')
- group.add_argument('--bounded', action='store_true',
- help='Strictly restrict results to viewbox area')
-
- group = parser.add_argument_group('Other arguments')
- group.add_argument('--no-dedupe', action='store_false', dest='dedupe',
- help='Do not remove duplicates from the result list')
-
-
- def run(self, args: NominatimArgs) -> int:
- params: Dict[str, object]
- if args.query:
- params = dict(q=args.query)
- else:
- params = {k: getattr(args, k) for k, _ in STRUCTURED_QUERY if getattr(args, k)}
-
- for param, _ in EXTRADATA_PARAMS:
- if getattr(args, param):
- params[param] = '1'
- for param in ('format', 'countrycodes', 'exclude_place_ids', 'limit', 'viewbox'):
- if getattr(args, param):
- params[param] = getattr(args, param)
- if args.lang:
- params['accept-language'] = args.lang
- if args.polygon_output:
- params['polygon_' + args.polygon_output] = '1'
- if args.polygon_threshold:
- params['polygon_threshold'] = args.polygon_threshold
- if args.bounded:
- params['bounded'] = '1'
- if not args.dedupe:
- params['dedupe'] = '0'
-
- return _run_api('search', args, params)
-
-class APIReverse:
- """\
- Execute API reverse query.
-
- This command works exactly the same as if calling the /reverse endpoint on
- the web API. See the online documentation for more details on the
- various parameters:
- https://nominatim.org/release-docs/latest/api/Reverse/
- """
-
- def add_args(self, parser: argparse.ArgumentParser) -> None:
- group = parser.add_argument_group('Query arguments')
- group.add_argument('--lat', type=float, required=True,
- help='Latitude of coordinate to look up (in WGS84)')
- group.add_argument('--lon', type=float, required=True,
- help='Longitude of coordinate to look up (in WGS84)')
- group.add_argument('--zoom', type=int,
- help='Level of detail required for the address')
-
- _add_api_output_arguments(parser)
-
-
- def run(self, args: NominatimArgs) -> int:
- params = dict(lat=args.lat, lon=args.lon, format=args.format)
- if args.zoom is not None:
- params['zoom'] = args.zoom
-
- for param, _ in EXTRADATA_PARAMS:
- if getattr(args, param):
- params[param] = '1'
- if args.lang:
- params['accept-language'] = args.lang
- if args.polygon_output:
- params['polygon_' + args.polygon_output] = '1'
- if args.polygon_threshold:
- params['polygon_threshold'] = args.polygon_threshold
-
- return _run_api('reverse', args, params)
-
-
-class APILookup:
- """\
- Execute API lookup query.
-
- This command works exactly the same as if calling the /lookup endpoint on
- the web API. See the online documentation for more details on the
- various parameters:
- https://nominatim.org/release-docs/latest/api/Lookup/
- """
-
- def add_args(self, parser: argparse.ArgumentParser) -> None:
- group = parser.add_argument_group('Query arguments')
- group.add_argument('--id', metavar='OSMID',
- action='append', required=True, dest='ids',
- help='OSM id to lookup in format <NRW><id> (may be repeated)')
-
- _add_api_output_arguments(parser)
-
-
- def run(self, args: NominatimArgs) -> int:
- params: Dict[str, object] = dict(osm_ids=','.join(args.ids), format=args.format)
-
- for param, _ in EXTRADATA_PARAMS:
- if getattr(args, param):
- params[param] = '1'
- if args.lang:
- params['accept-language'] = args.lang
- if args.polygon_output:
- params['polygon_' + args.polygon_output] = '1'
- if args.polygon_threshold:
- params['polygon_threshold'] = args.polygon_threshold
-
- return _run_api('lookup', args, params)
-
-
-class APIDetails:
- """\
- Execute API details query.
-
- This command works exactly the same as if calling the /details endpoint on
- the web API. See the online documentation for more details on the
- various parameters:
- https://nominatim.org/release-docs/latest/api/Details/
- """
-
- def add_args(self, parser: argparse.ArgumentParser) -> None:
- group = parser.add_argument_group('Query arguments')
- objs = group.add_mutually_exclusive_group(required=True)
- objs.add_argument('--node', '-n', type=int,
- help="Look up the OSM node with the given ID.")
- objs.add_argument('--way', '-w', type=int,
- help="Look up the OSM way with the given ID.")
- objs.add_argument('--relation', '-r', type=int,
- help="Look up the OSM relation with the given ID.")
- objs.add_argument('--place_id', '-p', type=int,
- help='Database internal identifier of the OSM object to look up')
- group.add_argument('--class', dest='object_class',
- help=("Class type to disambiguated multiple entries "
- "of the same object."))
-
- group = parser.add_argument_group('Output arguments')
- for name, desc in DETAILS_SWITCHES:
- group.add_argument('--' + name, action='store_true', help=desc)
- group.add_argument('--lang', '--accept-language', metavar='LANGS',
- help='Preferred language order for presenting search results')
-
-
- def run(self, args: NominatimArgs) -> int:
- if args.node:
- params = dict(osmtype='N', osmid=args.node)
- elif args.way:
- params = dict(osmtype='W', osmid=args.node)
- elif args.relation:
- params = dict(osmtype='R', osmid=args.node)
- else:
- params = dict(place_id=args.place_id)
- if args.object_class:
- params['class'] = args.object_class
- for name, _ in DETAILS_SWITCHES:
- params[name] = '1' if getattr(args, name) else '0'
- if args.lang:
- params['accept-language'] = args.lang
-
- return _run_api('details', args, params)
-
-
-class APIStatus:
- """\
- Execute API status query.
-
- This command works exactly the same as if calling the /status endpoint on
- the web API. See the online documentation for more details on the
- various parameters:
- https://nominatim.org/release-docs/latest/api/Status/
- """
-
- def add_args(self, parser: argparse.ArgumentParser) -> None:
- group = parser.add_argument_group('API parameters')
- group.add_argument('--format', default='text', choices=['text', 'json'],
- help='Format of result')
-
-
- def run(self, args: NominatimArgs) -> int:
- return _run_api('status', args, dict(format=args.format))
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-""" Non-blocking database connections.
-"""
-from typing import Callable, Any, Optional, Iterator, Sequence
-import logging
-import select
-import time
-
-import psycopg2
-from psycopg2.extras import wait_select
-
-# psycopg2 emits different exceptions pre and post 2.8. Detect if the new error
-# module is available and adapt the error handling accordingly.
-try:
- import psycopg2.errors # pylint: disable=no-name-in-module,import-error
- __has_psycopg2_errors__ = True
-except ImportError:
- __has_psycopg2_errors__ = False
-
-from nominatim.typing import T_cursor, Query
-
-LOG = logging.getLogger()
-
-class DeadlockHandler:
- """ Context manager that catches deadlock exceptions and calls
- the given handler function. All other exceptions are passed on
- normally.
- """
-
- def __init__(self, handler: Callable[[], None], ignore_sql_errors: bool = False) -> None:
- self.handler = handler
- self.ignore_sql_errors = ignore_sql_errors
-
- def __enter__(self) -> 'DeadlockHandler':
- return self
-
- def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> bool:
- if __has_psycopg2_errors__:
- if exc_type == psycopg2.errors.DeadlockDetected: # pylint: disable=E1101
- self.handler()
- return True
- elif exc_type == psycopg2.extensions.TransactionRollbackError \
- and exc_value.pgcode == '40P01':
- self.handler()
- return True
-
- if self.ignore_sql_errors and isinstance(exc_value, psycopg2.Error):
- LOG.info("SQL error ignored: %s", exc_value)
- return True
-
- return False
-
-
-class DBConnection:
- """ A single non-blocking database connection.
- """
-
- def __init__(self, dsn: str,
- cursor_factory: Optional[Callable[..., T_cursor]] = None,
- ignore_sql_errors: bool = False) -> None:
- self.dsn = dsn
-
- self.current_query: Optional[Query] = None
- self.current_params: Optional[Sequence[Any]] = None
- self.ignore_sql_errors = ignore_sql_errors
-
- self.conn: Optional['psycopg2.connection'] = None
- self.cursor: Optional['psycopg2.cursor'] = None
- self.connect(cursor_factory=cursor_factory)
-
- def close(self) -> None:
- """ Close all open connections. Does not wait for pending requests.
- """
- if self.conn is not None:
- if self.cursor is not None:
- self.cursor.close() # type: ignore[no-untyped-call]
- self.cursor = None
- self.conn.close()
-
- self.conn = None
-
- def connect(self, cursor_factory: Optional[Callable[..., T_cursor]] = None) -> None:
- """ (Re)connect to the database. Creates an asynchronous connection
- with JIT and parallel processing disabled. If a connection was
- already open, it is closed and a new connection established.
- The caller must ensure that no query is pending before reconnecting.
- """
- self.close()
-
- # Use a dict to hand in the parameters because async is a reserved
- # word in Python3.
- self.conn = psycopg2.connect(**{'dsn': self.dsn, 'async': True}) # type: ignore
- assert self.conn
- self.wait()
-
- if cursor_factory is not None:
- self.cursor = self.conn.cursor(cursor_factory=cursor_factory)
- else:
- self.cursor = self.conn.cursor()
- # Disable JIT and parallel workers as they are known to cause problems.
- # Update pg_settings instead of using SET because it does not yield
- # errors on older versions of Postgres where the settings are not
- # implemented.
- self.perform(
- """ UPDATE pg_settings SET setting = -1 WHERE name = 'jit_above_cost';
- UPDATE pg_settings SET setting = 0
- WHERE name = 'max_parallel_workers_per_gather';""")
- self.wait()
-
- def _deadlock_handler(self) -> None:
- LOG.info("Deadlock detected (params = %s), retry.", str(self.current_params))
- assert self.cursor is not None
- assert self.current_query is not None
- assert self.current_params is not None
-
- self.cursor.execute(self.current_query, self.current_params)
-
- def wait(self) -> None:
- """ Block until any pending operation is done.
- """
- while True:
- with DeadlockHandler(self._deadlock_handler, self.ignore_sql_errors):
- wait_select(self.conn)
- self.current_query = None
- return
-
- def perform(self, sql: Query, args: Optional[Sequence[Any]] = None) -> None:
- """ Send SQL query to the server. Returns immediately without
- blocking.
- """
- assert self.cursor is not None
- self.current_query = sql
- self.current_params = args
- self.cursor.execute(sql, args)
-
- def fileno(self) -> int:
- """ File descriptor to wait for. (Makes this class select()able.)
- """
- assert self.conn is not None
- return self.conn.fileno()
-
- def is_done(self) -> bool:
- """ Check if the connection is available for a new query.
-
- Also checks if the previous query has run into a deadlock.
- If so, then the previous query is repeated.
- """
- assert self.conn is not None
-
- if self.current_query is None:
- return True
-
- with DeadlockHandler(self._deadlock_handler, self.ignore_sql_errors):
- if self.conn.poll() == psycopg2.extensions.POLL_OK:
- self.current_query = None
- return True
-
- return False
-
-
-class WorkerPool:
- """ A pool of asynchronous database connections.
-
- The pool may be used as a context manager.
- """
- REOPEN_CONNECTIONS_AFTER = 100000
-
- def __init__(self, dsn: str, pool_size: int, ignore_sql_errors: bool = False) -> None:
- self.threads = [DBConnection(dsn, ignore_sql_errors=ignore_sql_errors)
- for _ in range(pool_size)]
- self.free_workers = self._yield_free_worker()
- self.wait_time = 0.0
-
-
- def finish_all(self) -> None:
- """ Wait for all connection to finish.
- """
- for thread in self.threads:
- while not thread.is_done():
- thread.wait()
-
- self.free_workers = self._yield_free_worker()
-
- def close(self) -> None:
- """ Close all connections and clear the pool.
- """
- for thread in self.threads:
- thread.close()
- self.threads = []
- self.free_workers = iter([])
-
-
- def next_free_worker(self) -> DBConnection:
- """ Get the next free connection.
- """
- return next(self.free_workers)
-
-
- def _yield_free_worker(self) -> Iterator[DBConnection]:
- ready = self.threads
- command_stat = 0
- while True:
- for thread in ready:
- if thread.is_done():
- command_stat += 1
- yield thread
-
- if command_stat > self.REOPEN_CONNECTIONS_AFTER:
- self._reconnect_threads()
- ready = self.threads
- command_stat = 0
- else:
- tstart = time.time()
- _, ready, _ = select.select([], self.threads, [])
- self.wait_time += time.time() - tstart
-
-
- def _reconnect_threads(self) -> None:
- for thread in self.threads:
- while not thread.is_done():
- thread.wait()
- thread.connect()
-
-
- def __enter__(self) -> 'WorkerPool':
- return self
-
-
- def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
- self.finish_all()
- self.close()
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Specialised connection and cursor functions.
-"""
-from typing import Optional, Any, Callable, ContextManager, Dict, cast, overload, Tuple, Iterable
-import contextlib
-import logging
-import os
-
-import psycopg2
-import psycopg2.extensions
-import psycopg2.extras
-from psycopg2 import sql as pysql
-
-from nominatim.typing import SysEnv, Query, T_cursor
-from nominatim.errors import UsageError
-
-LOG = logging.getLogger()
-
-class Cursor(psycopg2.extras.DictCursor):
- """ A cursor returning dict-like objects and providing specialised
- execution functions.
- """
- # pylint: disable=arguments-renamed,arguments-differ
- def execute(self, query: Query, args: Any = None) -> None:
- """ Query execution that logs the SQL query when debugging is enabled.
- """
- if LOG.isEnabledFor(logging.DEBUG):
- LOG.debug(self.mogrify(query, args).decode('utf-8')) # type: ignore[no-untyped-call]
-
- super().execute(query, args)
-
-
- def execute_values(self, sql: Query, argslist: Iterable[Tuple[Any, ...]],
- template: Optional[Query] = None) -> None:
- """ Wrapper for the psycopg2 convenience function to execute
- SQL for a list of values.
- """
- LOG.debug("SQL execute_values(%s, %s)", sql, argslist)
-
- psycopg2.extras.execute_values(self, sql, argslist, template=template)
-
-
- def scalar(self, sql: Query, args: Any = None) -> Any:
- """ Execute query that returns a single value. The value is returned.
- If the query yields more than one row, a ValueError is raised.
- """
- self.execute(sql, args)
-
- if self.rowcount != 1:
- raise RuntimeError("Query did not return a single row.")
-
- result = self.fetchone()
- assert result is not None
-
- return result[0]
-
-
- def drop_table(self, name: str, if_exists: bool = True, cascade: bool = False) -> None:
- """ Drop the table with the given name.
- Set `if_exists` to False if a non-existent table should raise
- an exception instead of just being ignored. If 'cascade' is set
- to True then all dependent tables are deleted as well.
- """
- sql = 'DROP TABLE '
- if if_exists:
- sql += 'IF EXISTS '
- sql += '{}'
- if cascade:
- sql += ' CASCADE'
-
- self.execute(pysql.SQL(sql).format(pysql.Identifier(name)))
-
-
-class Connection(psycopg2.extensions.connection):
- """ A connection that provides the specialised cursor by default and
- adds convenience functions for administrating the database.
- """
- @overload # type: ignore[override]
- def cursor(self) -> Cursor:
- ...
-
- @overload
- def cursor(self, name: str) -> Cursor:
- ...
-
- @overload
- def cursor(self, cursor_factory: Callable[..., T_cursor]) -> T_cursor:
- ...
-
- def cursor(self, cursor_factory = Cursor, **kwargs): # type: ignore
- """ Return a new cursor. By default the specialised cursor is returned.
- """
- return super().cursor(cursor_factory=cursor_factory, **kwargs)
-
-
- def table_exists(self, table: str) -> bool:
- """ Check that a table with the given name exists in the database.
- """
- with self.cursor() as cur:
- num = cur.scalar("""SELECT count(*) FROM pg_tables
- WHERE tablename = %s and schemaname = 'public'""", (table, ))
- return num == 1 if isinstance(num, int) else False
-
-
- def table_has_column(self, table: str, column: str) -> bool:
- """ Check if the table 'table' exists and has a column with name 'column'.
- """
- with self.cursor() as cur:
- has_column = cur.scalar("""SELECT count(*) FROM information_schema.columns
- WHERE table_name = %s
- and column_name = %s""",
- (table, column))
- return has_column > 0 if isinstance(has_column, int) else False
-
-
- def index_exists(self, index: str, table: Optional[str] = None) -> bool:
- """ Check that an index with the given name exists in the database.
- If table is not None then the index must relate to the given
- table.
- """
- with self.cursor() as cur:
- cur.execute("""SELECT tablename FROM pg_indexes
- WHERE indexname = %s and schemaname = 'public'""", (index, ))
- if cur.rowcount == 0:
- return False
-
- if table is not None:
- row = cur.fetchone()
- if row is None or not isinstance(row[0], str):
- return False
- return row[0] == table
-
- return True
-
-
- def drop_table(self, name: str, if_exists: bool = True, cascade: bool = False) -> None:
- """ Drop the table with the given name.
- Set `if_exists` to False if a non-existent table should raise
- an exception instead of just being ignored.
- """
- with self.cursor() as cur:
- cur.drop_table(name, if_exists, cascade)
- self.commit()
-
-
- def server_version_tuple(self) -> Tuple[int, int]:
- """ Return the server version as a tuple of (major, minor).
- Converts correctly for pre-10 and post-10 PostgreSQL versions.
- """
- version = self.server_version
- if version < 100000:
- return (int(version / 10000), int((version % 10000) / 100))
-
- return (int(version / 10000), version % 10000)
-
-
- def postgis_version_tuple(self) -> Tuple[int, int]:
- """ Return the postgis version installed in the database as a
- tuple of (major, minor). Assumes that the PostGIS extension
- has been installed already.
- """
- with self.cursor() as cur:
- version = cur.scalar('SELECT postgis_lib_version()')
-
- version_parts = version.split('.')
- if len(version_parts) < 2:
- raise UsageError(f"Error fetching Postgis version. Bad format: {version}")
-
- return (int(version_parts[0]), int(version_parts[1]))
-
-class ConnectionContext(ContextManager[Connection]):
- """ Context manager of the connection that also provides direct access
- to the underlying connection.
- """
- connection: Connection
-
-def connect(dsn: str) -> ConnectionContext:
- """ Open a connection to the database using the specialised connection
- factory. The returned object may be used in conjunction with 'with'.
- When used outside a context manager, use the `connection` attribute
- to get the connection.
- """
- try:
- conn = psycopg2.connect(dsn, connection_factory=Connection)
- ctxmgr = cast(ConnectionContext, contextlib.closing(conn))
- ctxmgr.connection = conn
- return ctxmgr
- except psycopg2.OperationalError as err:
- raise UsageError(f"Cannot connect to database: {err}") from err
-
-
-# Translation from PG connection string parameters to PG environment variables.
-# Derived from https://www.postgresql.org/docs/current/libpq-envars.html.
-_PG_CONNECTION_STRINGS = {
- 'host': 'PGHOST',
- 'hostaddr': 'PGHOSTADDR',
- 'port': 'PGPORT',
- 'dbname': 'PGDATABASE',
- 'user': 'PGUSER',
- 'password': 'PGPASSWORD',
- 'passfile': 'PGPASSFILE',
- 'channel_binding': 'PGCHANNELBINDING',
- 'service': 'PGSERVICE',
- 'options': 'PGOPTIONS',
- 'application_name': 'PGAPPNAME',
- 'sslmode': 'PGSSLMODE',
- 'requiressl': 'PGREQUIRESSL',
- 'sslcompression': 'PGSSLCOMPRESSION',
- 'sslcert': 'PGSSLCERT',
- 'sslkey': 'PGSSLKEY',
- 'sslrootcert': 'PGSSLROOTCERT',
- 'sslcrl': 'PGSSLCRL',
- 'requirepeer': 'PGREQUIREPEER',
- 'ssl_min_protocol_version': 'PGSSLMINPROTOCOLVERSION',
- 'ssl_max_protocol_version': 'PGSSLMAXPROTOCOLVERSION',
- 'gssencmode': 'PGGSSENCMODE',
- 'krbsrvname': 'PGKRBSRVNAME',
- 'gsslib': 'PGGSSLIB',
- 'connect_timeout': 'PGCONNECT_TIMEOUT',
- 'target_session_attrs': 'PGTARGETSESSIONATTRS',
-}
-
-
-def get_pg_env(dsn: str,
- base_env: Optional[SysEnv] = None) -> Dict[str, str]:
- """ Return a copy of `base_env` with the environment variables for
- PostgresSQL set up from the given database connection string.
- If `base_env` is None, then the OS environment is used as a base
- environment.
- """
- env = dict(base_env if base_env is not None else os.environ)
-
- for param, value in psycopg2.extensions.parse_dsn(dsn).items():
- if param in _PG_CONNECTION_STRINGS:
- env[_PG_CONNECTION_STRINGS[param]] = value
- else:
- LOG.error("Unknown connection parameter '%s' ignored.", param)
-
- return env
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Main work horse for indexing (computing addresses) the database.
-"""
-from typing import Optional, Any, cast
-import logging
-import time
-
-import psycopg2.extras
-
-from nominatim.tokenizer.base import AbstractTokenizer
-from nominatim.indexer.progress import ProgressLogger
-from nominatim.indexer import runners
-from nominatim.db.async_connection import DBConnection, WorkerPool
-from nominatim.db.connection import connect, Connection, Cursor
-from nominatim.typing import DictCursorResults
-
-LOG = logging.getLogger()
-
-
-class PlaceFetcher:
- """ Asynchronous connection that fetches place details for processing.
- """
- def __init__(self, dsn: str, setup_conn: Connection) -> None:
- self.wait_time = 0.0
- self.current_ids: Optional[DictCursorResults] = None
- self.conn: Optional[DBConnection] = DBConnection(dsn,
- cursor_factory=psycopg2.extras.DictCursor)
-
- with setup_conn.cursor() as cur:
- # need to fetch those manually because register_hstore cannot
- # fetch them on an asynchronous connection below.
- hstore_oid = cur.scalar("SELECT 'hstore'::regtype::oid")
- hstore_array_oid = cur.scalar("SELECT 'hstore[]'::regtype::oid")
-
- psycopg2.extras.register_hstore(self.conn.conn, oid=hstore_oid,
- array_oid=hstore_array_oid)
-
- def close(self) -> None:
- """ Close the underlying asynchronous connection.
- """
- if self.conn:
- self.conn.close()
- self.conn = None
-
-
- def fetch_next_batch(self, cur: Cursor, runner: runners.Runner) -> bool:
- """ Send a request for the next batch of places.
- If details for the places are required, they will be fetched
- asynchronously.
-
- Returns true if there is still data available.
- """
- ids = cast(Optional[DictCursorResults], cur.fetchmany(100))
-
- if not ids:
- self.current_ids = None
- return False
-
- assert self.conn is not None
- self.current_ids = runner.get_place_details(self.conn, ids)
-
- return True
-
- def get_batch(self) -> DictCursorResults:
- """ Get the next batch of data, previously requested with
- `fetch_next_batch`.
- """
- assert self.conn is not None
- assert self.conn.cursor is not None
-
- if self.current_ids is not None and not self.current_ids:
- tstart = time.time()
- self.conn.wait()
- self.wait_time += time.time() - tstart
- self.current_ids = cast(Optional[DictCursorResults],
- self.conn.cursor.fetchall())
-
- return self.current_ids if self.current_ids is not None else []
-
- def __enter__(self) -> 'PlaceFetcher':
- return self
-
-
- def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
- assert self.conn is not None
- self.conn.wait()
- self.close()
-
-
-class Indexer:
- """ Main indexing routine.
- """
-
- def __init__(self, dsn: str, tokenizer: AbstractTokenizer, num_threads: int):
- self.dsn = dsn
- self.tokenizer = tokenizer
- self.num_threads = num_threads
-
-
- def has_pending(self) -> bool:
- """ Check if any data still needs indexing.
- This function must only be used after the import has finished.
- Otherwise it will be very expensive.
- """
- with connect(self.dsn) as conn:
- with conn.cursor() as cur:
- cur.execute("SELECT 'a' FROM placex WHERE indexed_status > 0 LIMIT 1")
- return cur.rowcount > 0
-
-
- def index_full(self, analyse: bool = True) -> None:
- """ Index the complete database. This will first index boundaries
- followed by all other objects. When `analyse` is True, then the
- database will be analysed at the appropriate places to
- ensure that database statistics are updated.
- """
- with connect(self.dsn) as conn:
- conn.autocommit = True
-
- def _analyze() -> None:
- if analyse:
- with conn.cursor() as cur:
- cur.execute('ANALYZE')
-
- if self.index_by_rank(0, 4) > 0:
- _analyze()
-
- if self.index_boundaries(0, 30) > 100:
- _analyze()
-
- if self.index_by_rank(5, 25) > 100:
- _analyze()
-
- if self.index_by_rank(26, 30) > 1000:
- _analyze()
-
- if self.index_postcodes() > 100:
- _analyze()
-
-
- def index_boundaries(self, minrank: int, maxrank: int) -> int:
- """ Index only administrative boundaries within the given rank range.
- """
- total = 0
- LOG.warning("Starting indexing boundaries using %s threads",
- self.num_threads)
-
- with self.tokenizer.name_analyzer() as analyzer:
- for rank in range(max(minrank, 4), min(maxrank, 26)):
- total += self._index(runners.BoundaryRunner(rank, analyzer))
-
- return total
-
- def index_by_rank(self, minrank: int, maxrank: int) -> int:
- """ Index all entries of placex in the given rank range (inclusive)
- in order of their address rank.
-
- When rank 30 is requested then also interpolations and
- places with address rank 0 will be indexed.
- """
- total = 0
- maxrank = min(maxrank, 30)
- LOG.warning("Starting indexing rank (%i to %i) using %i threads",
- minrank, maxrank, self.num_threads)
-
- with self.tokenizer.name_analyzer() as analyzer:
- for rank in range(max(1, minrank), maxrank + 1):
- total += self._index(runners.RankRunner(rank, analyzer), 20 if rank == 30 else 1)
-
- if maxrank == 30:
- total += self._index(runners.RankRunner(0, analyzer))
- total += self._index(runners.InterpolationRunner(analyzer), 20)
-
- return total
-
-
- def index_postcodes(self) -> int:
- """Index the entries of the location_postcode table.
- """
- LOG.warning("Starting indexing postcodes using %s threads", self.num_threads)
-
- return self._index(runners.PostcodeRunner(), 20)
-
-
- def update_status_table(self) -> None:
- """ Update the status in the status table to 'indexed'.
- """
- with connect(self.dsn) as conn:
- with conn.cursor() as cur:
- cur.execute('UPDATE import_status SET indexed = true')
-
- conn.commit()
-
- def _index(self, runner: runners.Runner, batch: int = 1) -> int:
- """ Index a single rank or table. `runner` describes the SQL to use
- for indexing. `batch` describes the number of objects that
- should be processed with a single SQL statement
- """
- LOG.warning("Starting %s (using batch size %s)", runner.name(), batch)
-
- with connect(self.dsn) as conn:
- psycopg2.extras.register_hstore(conn)
- with conn.cursor() as cur:
- total_tuples = cur.scalar(runner.sql_count_objects())
- LOG.debug("Total number of rows: %i", total_tuples)
-
- conn.commit()
-
- progress = ProgressLogger(runner.name(), total_tuples)
-
- if total_tuples > 0:
- with conn.cursor(name='places') as cur:
- cur.execute(runner.sql_get_objects())
-
- with PlaceFetcher(self.dsn, conn) as fetcher:
- with WorkerPool(self.dsn, self.num_threads) as pool:
- has_more = fetcher.fetch_next_batch(cur, runner)
- while has_more:
- places = fetcher.get_batch()
-
- # asynchronously get the next batch
- has_more = fetcher.fetch_next_batch(cur, runner)
-
- # And insert the current batch
- for idx in range(0, len(places), batch):
- part = places[idx:idx + batch]
- LOG.debug("Processing places: %s", str(part))
- runner.index_places(pool.next_free_worker(), part)
- progress.add(len(part))
-
- LOG.info("Wait time: fetcher: %.2fs, pool: %.2fs",
- fetcher.wait_time, pool.wait_time)
-
- conn.commit()
-
- return progress.done()
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Mix-ins that provide the actual commands for the indexer for various indexing
-tasks.
-"""
-from typing import Any, List
-import functools
-
-from psycopg2 import sql as pysql
-import psycopg2.extras
-
-from nominatim.data.place_info import PlaceInfo
-from nominatim.tokenizer.base import AbstractAnalyzer
-from nominatim.db.async_connection import DBConnection
-from nominatim.typing import Query, DictCursorResult, DictCursorResults, Protocol
-
-# pylint: disable=C0111
-
-def _mk_valuelist(template: str, num: int) -> pysql.Composed:
- return pysql.SQL(',').join([pysql.SQL(template)] * num)
-
-def _analyze_place(place: DictCursorResult, analyzer: AbstractAnalyzer) -> psycopg2.extras.Json:
- return psycopg2.extras.Json(analyzer.process_place(PlaceInfo(place)))
-
-
-class Runner(Protocol):
- def name(self) -> str: ...
- def sql_count_objects(self) -> Query: ...
- def sql_get_objects(self) -> Query: ...
- def get_place_details(self, worker: DBConnection,
- ids: DictCursorResults) -> DictCursorResults: ...
- def index_places(self, worker: DBConnection, places: DictCursorResults) -> None: ...
-
-
-class AbstractPlacexRunner:
- """ Returns SQL commands for indexing of the placex table.
- """
- SELECT_SQL = pysql.SQL('SELECT place_id FROM placex ')
- UPDATE_LINE = "(%s, %s::hstore, %s::hstore, %s::int, %s::jsonb)"
-
- def __init__(self, rank: int, analyzer: AbstractAnalyzer) -> None:
- self.rank = rank
- self.analyzer = analyzer
-
-
- @functools.lru_cache(maxsize=1)
- def _index_sql(self, num_places: int) -> pysql.Composed:
- return pysql.SQL(
- """ UPDATE placex
- SET indexed_status = 0, address = v.addr, token_info = v.ti,
- name = v.name, linked_place_id = v.linked_place_id
- FROM (VALUES {}) as v(id, name, addr, linked_place_id, ti)
- WHERE place_id = v.id
- """).format(_mk_valuelist(AbstractPlacexRunner.UPDATE_LINE, num_places))
-
-
- def get_place_details(self, worker: DBConnection, ids: DictCursorResults) -> DictCursorResults:
- worker.perform("""SELECT place_id, extra.*
- FROM placex, LATERAL placex_indexing_prepare(placex) as extra
- WHERE place_id IN %s""",
- (tuple((p[0] for p in ids)), ))
-
- return []
-
-
- def index_places(self, worker: DBConnection, places: DictCursorResults) -> None:
- values: List[Any] = []
- for place in places:
- for field in ('place_id', 'name', 'address', 'linked_place_id'):
- values.append(place[field])
- values.append(_analyze_place(place, self.analyzer))
-
- worker.perform(self._index_sql(len(places)), values)
-
-
-class RankRunner(AbstractPlacexRunner):
- """ Returns SQL commands for indexing one rank within the placex table.
- """
-
- def name(self) -> str:
- return f"rank {self.rank}"
-
- def sql_count_objects(self) -> pysql.Composed:
- return pysql.SQL("""SELECT count(*) FROM placex
- WHERE rank_address = {} and indexed_status > 0
- """).format(pysql.Literal(self.rank))
-
- def sql_get_objects(self) -> pysql.Composed:
- return self.SELECT_SQL + pysql.SQL(
- """WHERE indexed_status > 0 and rank_address = {}
- ORDER BY geometry_sector
- """).format(pysql.Literal(self.rank))
-
-
-class BoundaryRunner(AbstractPlacexRunner):
- """ Returns SQL commands for indexing the administrative boundaries
- of a certain rank.
- """
-
- def name(self) -> str:
- return f"boundaries rank {self.rank}"
-
- def sql_count_objects(self) -> pysql.Composed:
- return pysql.SQL("""SELECT count(*) FROM placex
- WHERE indexed_status > 0
- AND rank_search = {}
- AND class = 'boundary' and type = 'administrative'
- """).format(pysql.Literal(self.rank))
-
- def sql_get_objects(self) -> pysql.Composed:
- return self.SELECT_SQL + pysql.SQL(
- """WHERE indexed_status > 0 and rank_search = {}
- and class = 'boundary' and type = 'administrative'
- ORDER BY partition, admin_level
- """).format(pysql.Literal(self.rank))
-
-
-class InterpolationRunner:
- """ Returns SQL commands for indexing the address interpolation table
- location_property_osmline.
- """
-
- def __init__(self, analyzer: AbstractAnalyzer) -> None:
- self.analyzer = analyzer
-
-
- def name(self) -> str:
- return "interpolation lines (location_property_osmline)"
-
- def sql_count_objects(self) -> str:
- return """SELECT count(*) FROM location_property_osmline
- WHERE indexed_status > 0"""
-
- def sql_get_objects(self) -> str:
- return """SELECT place_id
- FROM location_property_osmline
- WHERE indexed_status > 0
- ORDER BY geometry_sector"""
-
-
- def get_place_details(self, worker: DBConnection, ids: DictCursorResults) -> DictCursorResults:
- worker.perform("""SELECT place_id, get_interpolation_address(address, osm_id) as address
- FROM location_property_osmline WHERE place_id IN %s""",
- (tuple((p[0] for p in ids)), ))
- return []
-
-
- @functools.lru_cache(maxsize=1)
- def _index_sql(self, num_places: int) -> pysql.Composed:
- return pysql.SQL("""UPDATE location_property_osmline
- SET indexed_status = 0, address = v.addr, token_info = v.ti
- FROM (VALUES {}) as v(id, addr, ti)
- WHERE place_id = v.id
- """).format(_mk_valuelist("(%s, %s::hstore, %s::jsonb)", num_places))
-
-
- def index_places(self, worker: DBConnection, places: DictCursorResults) -> None:
- values: List[Any] = []
- for place in places:
- values.extend((place[x] for x in ('place_id', 'address')))
- values.append(_analyze_place(place, self.analyzer))
-
- worker.perform(self._index_sql(len(places)), values)
-
-
-
-class PostcodeRunner(Runner):
- """ Provides the SQL commands for indexing the location_postcode table.
- """
-
- def name(self) -> str:
- return "postcodes (location_postcode)"
-
-
- def sql_count_objects(self) -> str:
- return 'SELECT count(*) FROM location_postcode WHERE indexed_status > 0'
-
-
- def sql_get_objects(self) -> str:
- return """SELECT place_id FROM location_postcode
- WHERE indexed_status > 0
- ORDER BY country_code, postcode"""
-
-
- def get_place_details(self, worker: DBConnection, ids: DictCursorResults) -> DictCursorResults:
- return ids
-
- def index_places(self, worker: DBConnection, places: DictCursorResults) -> None:
- worker.perform(pysql.SQL("""UPDATE location_postcode SET indexed_status = 0
- WHERE place_id IN ({})""")
- .format(pysql.SQL(',').join((pysql.Literal(i[0]) for i in places))))
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Tokenizer implementing normalisation as used before Nominatim 4.
-"""
-from typing import Optional, Sequence, List, Tuple, Mapping, Any, Callable, \
- cast, Dict, Set, Iterable
-from collections import OrderedDict
-import logging
-from pathlib import Path
-import re
-import shutil
-from textwrap import dedent
-
-from icu import Transliterator
-import psycopg2
-import psycopg2.extras
-
-from nominatim.db.connection import connect, Connection
-from nominatim.config import Configuration
-from nominatim.db import properties
-from nominatim.db import utils as db_utils
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.data.place_info import PlaceInfo
-from nominatim.errors import UsageError
-from nominatim.tokenizer.base import AbstractAnalyzer, AbstractTokenizer
-
-DBCFG_NORMALIZATION = "tokenizer_normalization"
-DBCFG_MAXWORDFREQ = "tokenizer_maxwordfreq"
-
-LOG = logging.getLogger()
-
-def create(dsn: str, data_dir: Path) -> 'LegacyTokenizer':
- """ Create a new instance of the tokenizer provided by this module.
- """
- return LegacyTokenizer(dsn, data_dir)
-
-
-def _install_module(config_module_path: str, src_dir: Path, module_dir: Path) -> str:
- """ Copies the PostgreSQL normalisation module into the project
- directory if necessary. For historical reasons the module is
- saved in the '/module' subdirectory and not with the other tokenizer
- data.
-
- The function detects when the installation is run from the
- build directory. It doesn't touch the module in that case.
- """
- # Custom module locations are simply used as is.
- if config_module_path:
- LOG.info("Using custom path for database module at '%s'", config_module_path)
- return config_module_path
-
- # Compatibility mode for builddir installations.
- if module_dir.exists() and src_dir.samefile(module_dir):
- LOG.info('Running from build directory. Leaving database module as is.')
- return str(module_dir)
-
- # In any other case install the module in the project directory.
- if not module_dir.exists():
- module_dir.mkdir()
-
- destfile = module_dir / 'nominatim.so'
- shutil.copy(str(src_dir / 'nominatim.so'), str(destfile))
- destfile.chmod(0o755)
-
- LOG.info('Database module installed at %s', str(destfile))
-
- return str(module_dir)
-
-
-def _check_module(module_dir: str, conn: Connection) -> None:
- """ Try to use the PostgreSQL module to confirm that it is correctly
- installed and accessible from PostgreSQL.
- """
- with conn.cursor() as cur:
- try:
- cur.execute("""CREATE FUNCTION nominatim_test_import_func(text)
- RETURNS text AS %s, 'transliteration'
- LANGUAGE c IMMUTABLE STRICT;
- DROP FUNCTION nominatim_test_import_func(text)
- """, (f'{module_dir}/nominatim.so', ))
- except psycopg2.DatabaseError as err:
- LOG.fatal("Error accessing database module: %s", err)
- raise UsageError("Database module cannot be accessed.") from err
-
-
-class LegacyTokenizer(AbstractTokenizer):
- """ The legacy tokenizer uses a special PostgreSQL module to normalize
- names and queries. The tokenizer thus implements normalization through
- calls to the database.
- """
-
- def __init__(self, dsn: str, data_dir: Path) -> None:
- self.dsn = dsn
- self.data_dir = data_dir
- self.normalization: Optional[str] = None
-
-
- def init_new_db(self, config: Configuration, init_db: bool = True) -> None:
- """ Set up a new tokenizer for the database.
-
- This copies all necessary data in the project directory to make
- sure the tokenizer remains stable even over updates.
- """
- module_dir = _install_module(config.DATABASE_MODULE_PATH,
- config.lib_dir.module,
- config.project_dir / 'module')
-
- self.normalization = config.TERM_NORMALIZATION
-
- self._install_php(config, overwrite=True)
-
- with connect(self.dsn) as conn:
- _check_module(module_dir, conn)
- self._save_config(conn, config)
- conn.commit()
-
- if init_db:
- self.update_sql_functions(config)
- self._init_db_tables(config)
-
-
- def init_from_project(self, config: Configuration) -> None:
- """ Initialise the tokenizer from the project directory.
- """
- with connect(self.dsn) as conn:
- self.normalization = properties.get_property(conn, DBCFG_NORMALIZATION)
-
- if not (config.project_dir / 'module' / 'nominatim.so').exists():
- _install_module(config.DATABASE_MODULE_PATH,
- config.lib_dir.module,
- config.project_dir / 'module')
-
- self._install_php(config, overwrite=False)
-
- def finalize_import(self, config: Configuration) -> None:
- """ Do any required postprocessing to make the tokenizer data ready
- for use.
- """
- with connect(self.dsn) as conn:
- sqlp = SQLPreprocessor(conn, config)
- sqlp.run_sql_file(conn, 'tokenizer/legacy_tokenizer_indices.sql')
-
-
- def update_sql_functions(self, config: Configuration) -> None:
- """ Reimport the SQL functions for this tokenizer.
- """
- with connect(self.dsn) as conn:
- max_word_freq = properties.get_property(conn, DBCFG_MAXWORDFREQ)
- modulepath = config.DATABASE_MODULE_PATH or \
- str((config.project_dir / 'module').resolve())
- sqlp = SQLPreprocessor(conn, config)
- sqlp.run_sql_file(conn, 'tokenizer/legacy_tokenizer.sql',
- max_word_freq=max_word_freq,
- modulepath=modulepath)
-
-
- def check_database(self, _: Configuration) -> Optional[str]:
- """ Check that the tokenizer is set up correctly.
- """
- hint = """\
- The Postgresql extension nominatim.so was not correctly loaded.
-
- Error: {error}
-
- Hints:
- * Check the output of the CMmake/make installation step
- * Does nominatim.so exist?
- * Does nominatim.so exist on the database server?
- * Can nominatim.so be accessed by the database user?
- """
- with connect(self.dsn) as conn:
- with conn.cursor() as cur:
- try:
- out = cur.scalar("SELECT make_standard_name('a')")
- except psycopg2.Error as err:
- return hint.format(error=str(err))
-
- if out != 'a':
- return hint.format(error='Unexpected result for make_standard_name()')
-
- return None
-
-
- def migrate_database(self, config: Configuration) -> None:
- """ Initialise the project directory of an existing database for
- use with this tokenizer.
-
- This is a special migration function for updating existing databases
- to new software versions.
- """
- self.normalization = config.TERM_NORMALIZATION
- module_dir = _install_module(config.DATABASE_MODULE_PATH,
- config.lib_dir.module,
- config.project_dir / 'module')
-
- with connect(self.dsn) as conn:
- _check_module(module_dir, conn)
- self._save_config(conn, config)
-
-
- def update_statistics(self) -> None:
- """ Recompute the frequency of full words.
- """
- with connect(self.dsn) as conn:
- if conn.table_exists('search_name'):
- with conn.cursor() as cur:
- cur.drop_table("word_frequencies")
- LOG.info("Computing word frequencies")
- cur.execute("""CREATE TEMP TABLE word_frequencies AS
- SELECT unnest(name_vector) as id, count(*)
- FROM search_name GROUP BY id""")
- cur.execute("CREATE INDEX ON word_frequencies(id)")
- LOG.info("Update word table with recomputed frequencies")
- cur.execute("""UPDATE word SET search_name_count = count
- FROM word_frequencies
- WHERE word_token like ' %' and word_id = id""")
- cur.drop_table("word_frequencies")
- conn.commit()
-
-
- def update_word_tokens(self) -> None:
- """ No house-keeping implemented for the legacy tokenizer.
- """
- LOG.info("No tokenizer clean-up available.")
-
-
- def name_analyzer(self) -> 'LegacyNameAnalyzer':
- """ Create a new analyzer for tokenizing names and queries
- using this tokinzer. Analyzers are context managers and should
- be used accordingly:
-
- ```
- with tokenizer.name_analyzer() as analyzer:
- analyser.tokenize()
- ```
-
- When used outside the with construct, the caller must ensure to
- call the close() function before destructing the analyzer.
-
- Analyzers are not thread-safe. You need to instantiate one per thread.
- """
- normalizer = Transliterator.createFromRules("phrase normalizer",
- self.normalization)
- return LegacyNameAnalyzer(self.dsn, normalizer)
-
-
- def _install_php(self, config: Configuration, overwrite: bool = True) -> None:
- """ Install the php script for the tokenizer.
- """
- php_file = self.data_dir / "tokenizer.php"
-
- if not php_file.exists() or overwrite:
- php_file.write_text(dedent(f"""\
- <?php
- @define('CONST_Max_Word_Frequency', {config.MAX_WORD_FREQUENCY});
- @define('CONST_Term_Normalization_Rules', "{config.TERM_NORMALIZATION}");
- require_once('{config.lib_dir.php}/tokenizer/legacy_tokenizer.php');
- """), encoding='utf-8')
-
-
- def _init_db_tables(self, config: Configuration) -> None:
- """ Set up the word table and fill it with pre-computed word
- frequencies.
- """
- with connect(self.dsn) as conn:
- sqlp = SQLPreprocessor(conn, config)
- sqlp.run_sql_file(conn, 'tokenizer/legacy_tokenizer_tables.sql')
- conn.commit()
-
- LOG.warning("Precomputing word tokens")
- db_utils.execute_file(self.dsn, config.lib_dir.data / 'words.sql')
-
-
- def _save_config(self, conn: Connection, config: Configuration) -> None:
- """ Save the configuration that needs to remain stable for the given
- database as database properties.
- """
- assert self.normalization is not None
-
- properties.set_property(conn, DBCFG_NORMALIZATION, self.normalization)
- properties.set_property(conn, DBCFG_MAXWORDFREQ, config.MAX_WORD_FREQUENCY)
-
-
-class LegacyNameAnalyzer(AbstractAnalyzer):
- """ The legacy analyzer uses the special Postgresql module for
- splitting names.
-
- Each instance opens a connection to the database to request the
- normalization.
- """
-
- def __init__(self, dsn: str, normalizer: Any):
- self.conn: Optional[Connection] = connect(dsn).connection
- self.conn.autocommit = True
- self.normalizer = normalizer
- psycopg2.extras.register_hstore(self.conn)
-
- self._cache = _TokenCache(self.conn)
-
-
- def close(self) -> None:
- """ Free all resources used by the analyzer.
- """
- if self.conn:
- self.conn.close()
- self.conn = None
-
-
- def get_word_token_info(self, words: Sequence[str]) -> List[Tuple[str, str, int]]:
- """ Return token information for the given list of words.
- If a word starts with # it is assumed to be a full name
- otherwise is a partial name.
-
- The function returns a list of tuples with
- (original word, word token, word id).
-
- The function is used for testing and debugging only
- and not necessarily efficient.
- """
- assert self.conn is not None
- with self.conn.cursor() as cur:
- cur.execute("""SELECT t.term, word_token, word_id
- FROM word, (SELECT unnest(%s::TEXT[]) as term) t
- WHERE word_token = (CASE
- WHEN left(t.term, 1) = '#' THEN
- ' ' || make_standard_name(substring(t.term from 2))
- ELSE
- make_standard_name(t.term)
- END)
- and class is null and country_code is null""",
- (words, ))
-
- return [(r[0], r[1], r[2]) for r in cur]
-
-
- def normalize(self, phrase: str) -> str:
- """ Normalize the given phrase, i.e. remove all properties that
- are irrelevant for search.
- """
- return cast(str, self.normalizer.transliterate(phrase))
-
-
- def normalize_postcode(self, postcode: str) -> str:
- """ Convert the postcode to a standardized form.
-
- This function must yield exactly the same result as the SQL function
- 'token_normalized_postcode()'.
- """
- return postcode.strip().upper()
-
-
- def update_postcodes_from_db(self) -> None:
- """ Update postcode tokens in the word table from the location_postcode
- table.
- """
- assert self.conn is not None
-
- with self.conn.cursor() as cur:
- # This finds us the rows in location_postcode and word that are
- # missing in the other table.
- cur.execute("""SELECT * FROM
- (SELECT pc, word FROM
- (SELECT distinct(postcode) as pc FROM location_postcode) p
- FULL JOIN
- (SELECT word FROM word
- WHERE class ='place' and type = 'postcode') w
- ON pc = word) x
- WHERE pc is null or word is null""")
-
- to_delete = []
- to_add = []
-
- for postcode, word in cur:
- if postcode is None:
- to_delete.append(word)
- else:
- to_add.append(postcode)
-
- if to_delete:
- cur.execute("""DELETE FROM WORD
- WHERE class ='place' and type = 'postcode'
- and word = any(%s)
- """, (to_delete, ))
- if to_add:
- cur.execute("""SELECT count(create_postcode_id(pc))
- FROM unnest(%s) as pc
- """, (to_add, ))
-
-
-
- def update_special_phrases(self, phrases: Iterable[Tuple[str, str, str, str]],
- should_replace: bool) -> None:
- """ Replace the search index for special phrases with the new phrases.
- """
- assert self.conn is not None
-
- norm_phrases = set(((self.normalize(p[0]), p[1], p[2], p[3])
- for p in phrases))
-
- with self.conn.cursor() as cur:
- # Get the old phrases.
- existing_phrases = set()
- cur.execute("""SELECT word, class, type, operator FROM word
- WHERE class != 'place'
- OR (type != 'house' AND type != 'postcode')""")
- for label, cls, typ, oper in cur:
- existing_phrases.add((label, cls, typ, oper or '-'))
-
- to_add = norm_phrases - existing_phrases
- to_delete = existing_phrases - norm_phrases
-
- if to_add:
- cur.execute_values(
- """ INSERT INTO word (word_id, word_token, word, class, type,
- search_name_count, operator)
- (SELECT nextval('seq_word'), ' ' || make_standard_name(name), name,
- class, type, 0,
- CASE WHEN op in ('in', 'near') THEN op ELSE null END
- FROM (VALUES %s) as v(name, class, type, op))""",
- to_add)
-
- if to_delete and should_replace:
- cur.execute_values(
- """ DELETE FROM word USING (VALUES %s) as v(name, in_class, in_type, op)
- WHERE word = name and class = in_class and type = in_type
- and ((op = '-' and operator is null) or op = operator)""",
- to_delete)
-
- LOG.info("Total phrases: %s. Added: %s. Deleted: %s",
- len(norm_phrases), len(to_add), len(to_delete))
-
-
- def add_country_names(self, country_code: str, names: Mapping[str, str]) -> None:
- """ Add names for the given country to the search index.
- """
- assert self.conn is not None
-
- with self.conn.cursor() as cur:
- cur.execute(
- """INSERT INTO word (word_id, word_token, country_code)
- (SELECT nextval('seq_word'), lookup_token, %s
- FROM (SELECT DISTINCT ' ' || make_standard_name(n) as lookup_token
- FROM unnest(%s)n) y
- WHERE NOT EXISTS(SELECT * FROM word
- WHERE word_token = lookup_token and country_code = %s))
- """, (country_code, list(names.values()), country_code))
-
-
- def process_place(self, place: PlaceInfo) -> Mapping[str, Any]:
- """ Determine tokenizer information about the given place.
-
- Returns a JSON-serialisable structure that will be handed into
- the database via the token_info field.
- """
- assert self.conn is not None
-
- token_info = _TokenInfo(self._cache)
-
- names = place.name
-
- if names:
- token_info.add_names(self.conn, names)
-
- if place.is_country():
- assert place.country_code is not None
- self.add_country_names(place.country_code, names)
-
- address = place.address
- if address:
- self._process_place_address(token_info, address)
-
- return token_info.data
-
-
- def _process_place_address(self, token_info: '_TokenInfo', address: Mapping[str, str]) -> None:
- assert self.conn is not None
- hnrs = []
- addr_terms = []
-
- for key, value in address.items():
- if key == 'postcode':
- # Make sure the normalized postcode is present in the word table.
- if re.search(r'[:,;]', value) is None:
- norm_pc = self.normalize_postcode(value)
- token_info.set_postcode(norm_pc)
- self._cache.add_postcode(self.conn, norm_pc)
- elif key in ('housenumber', 'streetnumber', 'conscriptionnumber'):
- hnrs.append(value)
- elif key == 'street':
- token_info.add_street(self.conn, value)
- elif key == 'place':
- token_info.add_place(self.conn, value)
- elif not key.startswith('_') \
- and key not in ('country', 'full', 'inclusion'):
- addr_terms.append((key, value))
-
- if hnrs:
- token_info.add_housenumbers(self.conn, hnrs)
-
- if addr_terms:
- token_info.add_address_terms(self.conn, addr_terms)
-
-
-
-class _TokenInfo:
- """ Collect token information to be sent back to the database.
- """
- def __init__(self, cache: '_TokenCache') -> None:
- self.cache = cache
- self.data: Dict[str, Any] = {}
-
-
- def add_names(self, conn: Connection, names: Mapping[str, str]) -> None:
- """ Add token information for the names of the place.
- """
- with conn.cursor() as cur:
- # Create the token IDs for all names.
- self.data['names'] = cur.scalar("SELECT make_keywords(%s)::text",
- (names, ))
-
-
- def add_housenumbers(self, conn: Connection, hnrs: Sequence[str]) -> None:
- """ Extract housenumber information from the address.
- """
- if len(hnrs) == 1:
- token = self.cache.get_housenumber(hnrs[0])
- if token is not None:
- self.data['hnr_tokens'] = token
- self.data['hnr'] = hnrs[0]
- return
-
- # split numbers if necessary
- simple_list: List[str] = []
- for hnr in hnrs:
- simple_list.extend((x.strip() for x in re.split(r'[;,]', hnr)))
-
- if len(simple_list) > 1:
- simple_list = list(set(simple_list))
-
- with conn.cursor() as cur:
- cur.execute("SELECT * FROM create_housenumbers(%s)", (simple_list, ))
- result = cur.fetchone()
- assert result is not None
- self.data['hnr_tokens'], self.data['hnr'] = result
-
-
- def set_postcode(self, postcode: str) -> None:
- """ Set or replace the postcode token with the given value.
- """
- self.data['postcode'] = postcode
-
- def add_street(self, conn: Connection, street: str) -> None:
- """ Add addr:street match terms.
- """
- def _get_street(name: str) -> List[int]:
- with conn.cursor() as cur:
- return cast(List[int],
- cur.scalar("SELECT word_ids_from_name(%s)::text", (name, )))
-
- tokens = self.cache.streets.get(street, _get_street)
- if tokens:
- self.data['street'] = tokens
-
-
- def add_place(self, conn: Connection, place: str) -> None:
- """ Add addr:place search and match terms.
- """
- def _get_place(name: str) -> Tuple[List[int], List[int]]:
- with conn.cursor() as cur:
- cur.execute("""SELECT make_keywords(hstore('name' , %s))::text,
- word_ids_from_name(%s)::text""",
- (name, name))
- return cast(Tuple[List[int], List[int]], cur.fetchone())
-
- self.data['place_search'], self.data['place_match'] = \
- self.cache.places.get(place, _get_place)
-
-
- def add_address_terms(self, conn: Connection, terms: Sequence[Tuple[str, str]]) -> None:
- """ Add additional address terms.
- """
- def _get_address_term(name: str) -> Tuple[List[int], List[int]]:
- with conn.cursor() as cur:
- cur.execute("""SELECT addr_ids_from_name(%s)::text,
- word_ids_from_name(%s)::text""",
- (name, name))
- return cast(Tuple[List[int], List[int]], cur.fetchone())
-
- tokens = {}
- for key, value in terms:
- items = self.cache.address_terms.get(value, _get_address_term)
- if items[0] or items[1]:
- tokens[key] = items
-
- if tokens:
- self.data['addr'] = tokens
-
-
-class _LRU:
- """ Least recently used cache that accepts a generator function to
- produce the item when there is a cache miss.
- """
-
- def __init__(self, maxsize: int = 128):
- self.data: 'OrderedDict[str, Any]' = OrderedDict()
- self.maxsize = maxsize
-
-
- def get(self, key: str, generator: Callable[[str], Any]) -> Any:
- """ Get the item with the given key from the cache. If nothing
- is found in the cache, generate the value through the
- generator function and store it in the cache.
- """
- value = self.data.get(key)
- if value is not None:
- self.data.move_to_end(key)
- else:
- value = generator(key)
- if len(self.data) >= self.maxsize:
- self.data.popitem(last=False)
- self.data[key] = value
-
- return value
-
-
-class _TokenCache:
- """ Cache for token information to avoid repeated database queries.
-
- This cache is not thread-safe and needs to be instantiated per
- analyzer.
- """
- def __init__(self, conn: Connection):
- # various LRU caches
- self.streets = _LRU(maxsize=256)
- self.places = _LRU(maxsize=128)
- self.address_terms = _LRU(maxsize=1024)
-
- # Lookup houseunumbers up to 100 and cache them
- with conn.cursor() as cur:
- cur.execute("""SELECT i, ARRAY[getorcreate_housenumber_id(i::text)]::text
- FROM generate_series(1, 100) as i""")
- self._cached_housenumbers: Dict[str, str] = {str(r[0]): r[1] for r in cur}
-
- # For postcodes remember the ones that have already been added
- self.postcodes: Set[str] = set()
-
- def get_housenumber(self, number: str) -> Optional[str]:
- """ Get a housenumber token from the cache.
- """
- return self._cached_housenumbers.get(number)
-
-
- def add_postcode(self, conn: Connection, postcode: str) -> None:
- """ Make sure the given postcode is in the database.
- """
- if postcode not in self.postcodes:
- with conn.cursor() as cur:
- cur.execute('SELECT create_postcode_id(%s)', (postcode, ))
- self.postcodes.add(postcode)
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Configuration for Sanitizers.
-"""
-from typing import Sequence, Optional, Pattern, Callable, Any, TYPE_CHECKING
-from collections import UserDict
-import re
-
-from nominatim.errors import UsageError
-
-# working around missing generics in Python < 3.8
-# See https://github.com/python/typing/issues/60#issuecomment-869757075
-if TYPE_CHECKING:
- _BaseUserDict = UserDict[str, Any]
-else:
- _BaseUserDict = UserDict
-
-class SanitizerConfig(_BaseUserDict):
- """ The `SanitizerConfig` class is a read-only dictionary
- with configuration options for the sanitizer.
- In addition to the usual dictionary functions, the class provides
- accessors to standard sanitizer options that are used by many of the
- sanitizers.
- """
-
- def get_string_list(self, param: str, default: Sequence[str] = tuple()) -> Sequence[str]:
- """ Extract a configuration parameter as a string list.
-
- Arguments:
- param: Name of the configuration parameter.
- default: Value to return, when the parameter is missing.
-
- Returns:
- If the parameter value is a simple string, it is returned as a
- one-item list. If the parameter value does not exist, the given
- default is returned. If the parameter value is a list, it is
- checked to contain only strings before being returned.
- """
- values = self.data.get(param, None)
-
- if values is None:
- return None if default is None else list(default)
-
- if isinstance(values, str):
- return [values] if values else []
-
- if not isinstance(values, (list, tuple)):
- raise UsageError(f"Parameter '{param}' must be string or list of strings.")
-
- if any(not isinstance(value, str) for value in values):
- raise UsageError(f"Parameter '{param}' must be string or list of strings.")
-
- return values
-
-
- def get_bool(self, param: str, default: Optional[bool] = None) -> bool:
- """ Extract a configuration parameter as a boolean.
-
- Arguments:
- param: Name of the configuration parameter. The parameter must
- contain one of the yaml boolean values or an
- UsageError will be raised.
- default: Value to return, when the parameter is missing.
- When set to `None`, the parameter must be defined.
-
- Returns:
- Boolean value of the given parameter.
- """
- value = self.data.get(param, default)
-
- if not isinstance(value, bool):
- raise UsageError(f"Parameter '{param}' must be a boolean value ('yes' or 'no'.")
-
- return value
-
-
- def get_delimiter(self, default: str = ',;') -> Pattern[str]:
- """ Return the 'delimiters' parameter in the configuration as a
- compiled regular expression that can be used to split strings on
- these delimiters.
-
- Arguments:
- default: Delimiters to be used when 'delimiters' parameter
- is not explicitly configured.
-
- Returns:
- A regular expression pattern which can be used to
- split a string. The regular expression makes sure that the
- resulting names are stripped and that repeated delimiters
- are ignored. It may still create empty fields on occasion. The
- code needs to filter those.
- """
- delimiter_set = set(self.data.get('delimiters', default))
- if not delimiter_set:
- raise UsageError("Empty 'delimiter' parameter not allowed for sanitizer.")
-
- return re.compile('\\s*[{}]+\\s*'.format(''.join('\\' + d for d in delimiter_set)))
-
-
- def get_filter_kind(self, *default: str) -> Callable[[str], bool]:
- """ Return a filter function for the name kind from the 'filter-kind'
- config parameter.
-
- If the 'filter-kind' parameter is empty, the filter lets all items
- pass. If the parameter is a string, it is interpreted as a single
- regular expression that must match the full kind string.
- If the parameter is a list then
- any of the regular expressions in the list must match to pass.
-
- Arguments:
- default: Filters to be used, when the 'filter-kind' parameter
- is not specified. If omitted then the default is to
- let all names pass.
-
- Returns:
- A filter function which takes a name string and returns
- True when the item passes the filter.
- """
- filters = self.get_string_list('filter-kind', default)
-
- if not filters:
- return lambda _: True
-
- regexes = [re.compile(regex) for regex in filters]
-
- return lambda name: any(regex.fullmatch(name) for regex in regexes)
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Helper functions for executing external programs.
-"""
-from typing import Any, Union, Optional, Mapping, IO
-from pathlib import Path
-import logging
-import subprocess
-import urllib.request as urlrequest
-from urllib.parse import urlencode
-
-from nominatim.typing import StrPath
-from nominatim.version import version_str
-from nominatim.db.connection import get_pg_env
-
-LOG = logging.getLogger()
-
-def run_legacy_script(script: StrPath, *args: Union[int, str],
- nominatim_env: Any,
- throw_on_fail: bool = False) -> int:
- """ Run a Nominatim PHP script with the given arguments.
-
- Returns the exit code of the script. If `throw_on_fail` is True
- then throw a `CalledProcessError` on a non-zero exit.
- """
- cmd = ['/usr/bin/env', 'php', '-Cq',
- str(nominatim_env.phplib_dir / 'admin' / script)]
- cmd.extend([str(a) for a in args])
-
- env = nominatim_env.config.get_os_env()
- env['NOMINATIM_DATADIR'] = str(nominatim_env.data_dir)
- env['NOMINATIM_SQLDIR'] = str(nominatim_env.sqllib_dir)
- env['NOMINATIM_CONFIGDIR'] = str(nominatim_env.config_dir)
- env['NOMINATIM_DATABASE_MODULE_SRC_PATH'] = str(nominatim_env.module_dir)
- if not env['NOMINATIM_OSM2PGSQL_BINARY']:
- env['NOMINATIM_OSM2PGSQL_BINARY'] = str(nominatim_env.osm2pgsql_path)
-
- proc = subprocess.run(cmd, cwd=str(nominatim_env.project_dir), env=env,
- check=throw_on_fail)
-
- return proc.returncode
-
-def run_api_script(endpoint: str, project_dir: Path,
- extra_env: Optional[Mapping[str, str]] = None,
- phpcgi_bin: Optional[Path] = None,
- params: Optional[Mapping[str, Any]] = None) -> int:
- """ Execute a Nominatim API function.
-
- The function needs a project directory that contains the website
- directory with the scripts to be executed. The scripts will be run
- using php_cgi. Query parameters can be added as named arguments.
-
- Returns the exit code of the script.
- """
- log = logging.getLogger()
- webdir = str(project_dir / 'website')
- query_string = urlencode(params or {})
-
- env = dict(QUERY_STRING=query_string,
- SCRIPT_NAME=f'/{endpoint}.php',
- REQUEST_URI=f'/{endpoint}.php?{query_string}',
- CONTEXT_DOCUMENT_ROOT=webdir,
- SCRIPT_FILENAME=f'{webdir}/{endpoint}.php',
- HTTP_HOST='localhost',
- HTTP_USER_AGENT='nominatim-tool',
- REMOTE_ADDR='0.0.0.0',
- DOCUMENT_ROOT=webdir,
- REQUEST_METHOD='GET',
- SERVER_PROTOCOL='HTTP/1.1',
- GATEWAY_INTERFACE='CGI/1.1',
- REDIRECT_STATUS='CGI')
-
- if extra_env:
- env.update(extra_env)
-
- if phpcgi_bin is None:
- cmd = ['/usr/bin/env', 'php-cgi']
- else:
- cmd = [str(phpcgi_bin)]
-
- proc = subprocess.run(cmd, cwd=str(project_dir), env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- check=False)
-
- if proc.returncode != 0 or proc.stderr:
- if proc.stderr:
- log.error(proc.stderr.decode('utf-8').replace('\\n', '\n'))
- else:
- log.error(proc.stdout.decode('utf-8').replace('\\n', '\n'))
- return proc.returncode or 1
-
- result = proc.stdout.decode('utf-8')
- content_start = result.find('\r\n\r\n')
-
- print(result[content_start + 4:].replace('\\n', '\n'))
-
- return 0
-
-
-def run_php_server(server_address: str, base_dir: StrPath) -> None:
- """ Run the built-in server from the given directory.
- """
- subprocess.run(['/usr/bin/env', 'php', '-S', server_address],
- cwd=str(base_dir), check=True)
-
-
-def run_osm2pgsql(options: Mapping[str, Any]) -> None:
- """ Run osm2pgsql with the given options.
- """
- env = get_pg_env(options['dsn'])
- cmd = [str(options['osm2pgsql']),
- '--hstore', '--latlon', '--slim',
- '--with-forward-dependencies', 'false',
- '--log-progress', 'true',
- '--number-processes', str(options['threads']),
- '--cache', str(options['osm2pgsql_cache']),
- '--output', 'gazetteer',
- '--style', str(options['osm2pgsql_style'])
- ]
- if options['append']:
- cmd.append('--append')
- else:
- cmd.append('--create')
-
- if options['flatnode_file']:
- cmd.extend(('--flat-nodes', options['flatnode_file']))
-
- for key, param in (('slim_data', '--tablespace-slim-data'),
- ('slim_index', '--tablespace-slim-index'),
- ('main_data', '--tablespace-main-data'),
- ('main_index', '--tablespace-main-index')):
- if options['tablespaces'][key]:
- cmd.extend((param, options['tablespaces'][key]))
-
- if options.get('disable_jit', False):
- env['PGOPTIONS'] = '-c jit=off -c max_parallel_workers_per_gather=0'
-
- if 'import_data' in options:
- cmd.extend(('-r', 'xml', '-'))
- elif isinstance(options['import_file'], list):
- for fname in options['import_file']:
- cmd.append(str(fname))
- else:
- cmd.append(str(options['import_file']))
-
- subprocess.run(cmd, cwd=options.get('cwd', '.'),
- input=options.get('import_data'),
- env=env, check=True)
-
-
-def get_url(url: str) -> str:
- """ Get the contents from the given URL and return it as a UTF-8 string.
- """
- headers = {"User-Agent": f"Nominatim/{version_str()}"}
-
- try:
- request = urlrequest.Request(url, headers=headers)
- with urlrequest.urlopen(request) as response: # type: IO[bytes]
- return response.read().decode('utf-8')
- except Exception:
- LOG.fatal('Failed to load URL: %s', url)
- raise
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Functions for database migration to newer software versions.
-"""
-from typing import List, Tuple, Callable, Any
-import logging
-
-from psycopg2 import sql as pysql
-
-from nominatim.config import Configuration
-from nominatim.db import properties
-from nominatim.db.connection import connect, Connection
-from nominatim.version import NOMINATIM_VERSION, version_str
-from nominatim.tools import refresh
-from nominatim.tokenizer import factory as tokenizer_factory
-from nominatim.errors import UsageError
-
-LOG = logging.getLogger()
-
-VersionTuple = Tuple[int, int, int, int]
-
-_MIGRATION_FUNCTIONS : List[Tuple[VersionTuple, Callable[..., None]]] = []
-
-def migrate(config: Configuration, paths: Any) -> int:
- """ Check for the current database version and execute migrations,
- if necesssary.
- """
- with connect(config.get_libpq_dsn()) as conn:
- if conn.table_exists('nominatim_properties'):
- db_version_str = properties.get_property(conn, 'database_version')
- else:
- db_version_str = None
-
- if db_version_str is not None:
- parts = db_version_str.split('.')
- db_version = tuple(int(x) for x in parts[:2] + parts[2].split('-'))
-
- if db_version == NOMINATIM_VERSION:
- LOG.warning("Database already at latest version (%s)", db_version_str)
- return 0
-
- LOG.info("Detected database version: %s", db_version_str)
- else:
- db_version = _guess_version(conn)
-
-
- has_run_migration = False
- for version, func in _MIGRATION_FUNCTIONS:
- if db_version <= version:
- title = func.__doc__ or ''
- LOG.warning("Running: %s (%s)", title.split('\n', 1)[0],
- version_str(version))
- kwargs = dict(conn=conn, config=config, paths=paths)
- func(**kwargs)
- conn.commit()
- has_run_migration = True
-
- if has_run_migration:
- LOG.warning('Updating SQL functions.')
- refresh.create_functions(conn, config)
- tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
- tokenizer.update_sql_functions(config)
-
- properties.set_property(conn, 'database_version', version_str())
-
- conn.commit()
-
- return 0
-
-
-def _guess_version(conn: Connection) -> VersionTuple:
- """ Guess a database version when there is no property table yet.
- Only migrations for 3.6 and later are supported, so bail out
- when the version seems older.
- """
- with conn.cursor() as cur:
- # In version 3.6, the country_name table was updated. Check for that.
- cnt = cur.scalar("""SELECT count(*) FROM
- (SELECT svals(name) FROM country_name
- WHERE country_code = 'gb')x;
- """)
- if cnt < 100:
- LOG.fatal('It looks like your database was imported with a version '
- 'prior to 3.6.0. Automatic migration not possible.')
- raise UsageError('Migration not possible.')
-
- return (3, 5, 0, 99)
-
-
-
-def _migration(major: int, minor: int, patch: int = 0,
- dbpatch: int = 0) -> Callable[[Callable[..., None]], Callable[..., None]]:
- """ Decorator for a single migration step. The parameters describe the
- version after which the migration is applicable, i.e before changing
- from the given version to the next, the migration is required.
-
- All migrations are run in the order in which they are defined in this
- file. Do not run global SQL scripts for migrations as you cannot be sure
- that these scripts do the same in later versions.
-
- Functions will always be reimported in full at the end of the migration
- process, so the migration functions may leave a temporary state behind
- there.
- """
- def decorator(func: Callable[..., None]) -> Callable[..., None]:
- _MIGRATION_FUNCTIONS.append(((major, minor, patch, dbpatch), func))
- return func
-
- return decorator
-
-
-@_migration(3, 5, 0, 99)
-def import_status_timestamp_change(conn: Connection, **_: Any) -> None:
- """ Add timezone to timestamp in status table.
-
- The import_status table has been changed to include timezone information
- with the time stamp.
- """
- with conn.cursor() as cur:
- cur.execute("""ALTER TABLE import_status ALTER COLUMN lastimportdate
- TYPE timestamp with time zone;""")
-
-
-@_migration(3, 5, 0, 99)
-def add_nominatim_property_table(conn: Connection, config: Configuration, **_: Any) -> None:
- """ Add nominatim_property table.
- """
- if not conn.table_exists('nominatim_properties'):
- with conn.cursor() as cur:
- cur.execute(pysql.SQL("""CREATE TABLE nominatim_properties (
- property TEXT,
- value TEXT);
- GRANT SELECT ON TABLE nominatim_properties TO {};
- """).format(pysql.Identifier(config.DATABASE_WEBUSER)))
-
-@_migration(3, 6, 0, 0)
-def change_housenumber_transliteration(conn: Connection, **_: Any) -> None:
- """ Transliterate housenumbers.
-
- The database schema switched from saving raw housenumbers in
- placex.housenumber to saving transliterated ones.
-
- Note: the function create_housenumber_id() has been dropped in later
- versions.
- """
- with conn.cursor() as cur:
- cur.execute("""CREATE OR REPLACE FUNCTION create_housenumber_id(housenumber TEXT)
- RETURNS TEXT AS $$
- DECLARE
- normtext TEXT;
- BEGIN
- SELECT array_to_string(array_agg(trans), ';')
- INTO normtext
- FROM (SELECT lookup_word as trans,
- getorcreate_housenumber_id(lookup_word)
- FROM (SELECT make_standard_name(h) as lookup_word
- FROM regexp_split_to_table(housenumber, '[,;]') h) x) y;
- return normtext;
- END;
- $$ LANGUAGE plpgsql STABLE STRICT;""")
- cur.execute("DELETE FROM word WHERE class = 'place' and type = 'house'")
- cur.execute("""UPDATE placex
- SET housenumber = create_housenumber_id(housenumber)
- WHERE housenumber is not null""")
-
-
-@_migration(3, 7, 0, 0)
-def switch_placenode_geometry_index(conn: Connection, **_: Any) -> None:
- """ Replace idx_placex_geometry_reverse_placeNode index.
-
- Make the index slightly more permissive, so that it can also be used
- when matching up boundaries and place nodes. It makes the index
- idx_placex_adminname index unnecessary.
- """
- with conn.cursor() as cur:
- cur.execute(""" CREATE INDEX IF NOT EXISTS idx_placex_geometry_placenode ON placex
- USING GIST (geometry)
- WHERE osm_type = 'N' and rank_search < 26
- and class = 'place' and type != 'postcode'
- and linked_place_id is null""")
- cur.execute(""" DROP INDEX IF EXISTS idx_placex_adminname """)
-
-
-@_migration(3, 7, 0, 1)
-def install_legacy_tokenizer(conn: Connection, config: Configuration, **_: Any) -> None:
- """ Setup legacy tokenizer.
-
- If no other tokenizer has been configured yet, then create the
- configuration for the backwards-compatible legacy tokenizer
- """
- if properties.get_property(conn, 'tokenizer') is None:
- with conn.cursor() as cur:
- for table in ('placex', 'location_property_osmline'):
- has_column = cur.scalar("""SELECT count(*) FROM information_schema.columns
- WHERE table_name = %s
- and column_name = 'token_info'""",
- (table, ))
- if has_column == 0:
- cur.execute(pysql.SQL('ALTER TABLE {} ADD COLUMN token_info JSONB')
- .format(pysql.Identifier(table)))
- tokenizer = tokenizer_factory.create_tokenizer(config, init_db=False,
- module_name='legacy')
-
- tokenizer.migrate_database(config) # type: ignore[attr-defined]
-
-
-@_migration(4, 0, 99, 0)
-def create_tiger_housenumber_index(conn: Connection, **_: Any) -> None:
- """ Create idx_location_property_tiger_parent_place_id with included
- house number.
-
- The inclusion is needed for efficient lookup of housenumbers in
- full address searches.
- """
- if conn.server_version_tuple() >= (11, 0, 0):
- with conn.cursor() as cur:
- cur.execute(""" CREATE INDEX IF NOT EXISTS
- idx_location_property_tiger_housenumber_migrated
- ON location_property_tiger
- USING btree(parent_place_id)
- INCLUDE (startnumber, endnumber) """)
-
-
-@_migration(4, 0, 99, 1)
-def create_interpolation_index_on_place(conn: Connection, **_: Any) -> None:
- """ Create idx_place_interpolations for lookup of interpolation lines
- on updates.
- """
- with conn.cursor() as cur:
- cur.execute("""CREATE INDEX IF NOT EXISTS idx_place_interpolations
- ON place USING gist(geometry)
- WHERE osm_type = 'W' and address ? 'interpolation'""")
-
-
-@_migration(4, 0, 99, 2)
-def add_step_column_for_interpolation(conn: Connection, **_: Any) -> None:
- """ Add a new column 'step' to the interpolations table.
-
- Also converts the data into the stricter format which requires that
- startnumbers comply with the odd/even requirements.
- """
- if conn.table_has_column('location_property_osmline', 'step'):
- return
-
- with conn.cursor() as cur:
- # Mark invalid all interpolations with no intermediate numbers.
- cur.execute("""UPDATE location_property_osmline SET startnumber = null
- WHERE endnumber - startnumber <= 1 """)
- # Align the start numbers where odd/even does not match.
- cur.execute("""UPDATE location_property_osmline
- SET startnumber = startnumber + 1,
- linegeo = ST_LineSubString(linegeo,
- 1.0 / (endnumber - startnumber)::float,
- 1)
- WHERE (interpolationtype = 'odd' and startnumber % 2 = 0)
- or (interpolationtype = 'even' and startnumber % 2 = 1)
- """)
- # Mark invalid odd/even interpolations with no intermediate numbers.
- cur.execute("""UPDATE location_property_osmline SET startnumber = null
- WHERE interpolationtype in ('odd', 'even')
- and endnumber - startnumber = 2""")
- # Finally add the new column and populate it.
- cur.execute("ALTER TABLE location_property_osmline ADD COLUMN step SMALLINT")
- cur.execute("""UPDATE location_property_osmline
- SET step = CASE WHEN interpolationtype = 'all'
- THEN 1 ELSE 2 END
- """)
-
-
-@_migration(4, 0, 99, 3)
-def add_step_column_for_tiger(conn: Connection, **_: Any) -> None:
- """ Add a new column 'step' to the tiger data table.
- """
- if conn.table_has_column('location_property_tiger', 'step'):
- return
-
- with conn.cursor() as cur:
- cur.execute("ALTER TABLE location_property_tiger ADD COLUMN step SMALLINT")
- cur.execute("""UPDATE location_property_tiger
- SET step = CASE WHEN interpolationtype = 'all'
- THEN 1 ELSE 2 END
- """)
-
-
-@_migration(4, 0, 99, 4)
-def add_derived_name_column_for_country_names(conn: Connection, **_: Any) -> None:
- """ Add a new column 'derived_name' which in the future takes the
- country names as imported from OSM data.
- """
- if not conn.table_has_column('country_name', 'derived_name'):
- with conn.cursor() as cur:
- cur.execute("ALTER TABLE country_name ADD COLUMN derived_name public.HSTORE")
-
-
-@_migration(4, 0, 99, 5)
-def mark_internal_country_names(conn: Connection, config: Configuration, **_: Any) -> None:
- """ Names from the country table should be marked as internal to prevent
- them from being deleted. Only necessary for ICU tokenizer.
- """
- import psycopg2.extras # pylint: disable=import-outside-toplevel
-
- tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
- with tokenizer.name_analyzer() as analyzer:
- with conn.cursor() as cur:
- psycopg2.extras.register_hstore(cur)
- cur.execute("SELECT country_code, name FROM country_name")
-
- for country_code, names in cur:
- if not names:
- names = {}
- names['countrycode'] = country_code
- analyzer.add_country_names(country_code, names)
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Version information for Nominatim.
-"""
-from typing import Optional, Tuple
-
-# Version information: major, minor, patch level, database patch level
-#
-# The first three numbers refer to the last released version.
-#
-# The database patch level tracks important changes between releases
-# and must always be increased when there is a change to the database or code
-# that requires a migration.
-#
-# When adding a migration on the development branch, raise the patch level
-# to 99 to make sure that the migration is applied when updating from a
-# patch release to the next minor version. Patch releases usually shouldn't
-# have migrations in them. When they are needed, then make sure that the
-# migration can be reapplied and set the migration version to the appropriate
-# patch level when cherry-picking the commit with the migration.
-#
-# Released versions always have a database patch level of 0.
-NOMINATIM_VERSION = (4, 1, 0, 0)
-
-POSTGRESQL_REQUIRED_VERSION = (9, 6)
-POSTGIS_REQUIRED_VERSION = (2, 2)
-
-# Cmake sets a variable @GIT_HASH@ by executing 'git --log'. It is not run
-# on every execution of 'make'.
-# cmake/tool-installed.tmpl is used to build the binary 'nominatim'. Inside
-# there is a call to set the variable value below.
-GIT_COMMIT_HASH : Optional[str] = None
-
-
-# pylint: disable=consider-using-f-string
-def version_str(version:Tuple[int, int, int, int] = NOMINATIM_VERSION) -> str:
- """
- Return a human-readable string of the version.
- """
- return '{}.{}.{}-{}'.format(*version)
+++ /dev/null
-Subproject commit 6a5d2500e9689f55485d186306aadc55560085fd
--- /dev/null
+../../COPYING
\ No newline at end of file
--- /dev/null
+# Nominatim - Frontend Library
+
+Nominatim is a tool to search OpenStreetMap data
+by name and address (geocoding) and to generate synthetic addresses of
+OSM points (reverse geocoding).
+
+This module implements the library for searching a Nominatim database
+imported with the [`nominatim-db`](https://pypi.org/project/nominatim-db/) package.
+
+## Installation
+
+To install the Nominatim API from pypi, run:
+
+ pip install nominatim-api
+
+## Running a Nominatim server
+
+You need Falcon or Starlette to run Nominatim as a service, as well as
+an ASGI-capable server like uvicorn. To install them from pypi run:
+
+ pip install falcon uvicorn
+
+You need to have a Nominatim database imported with the 'nominatim-db'
+package. Go to the project directory, then run uvicorn as:
+
+ uvicorn --factory nominatim.server.falcon.server:run_wsgi
+
+## Documentation
+
+The full documentation for the Nominatim library can be found at:
+https://nominatim.org/release-docs/latest/library/Getting-Started/
+
+The v1 API of the server is documented at:
+https://nominatim.org/release-docs/latest/api/Overview/
+
+## License
+
+The source code is available under a GPLv3 license.
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Path settings for extra data used by Nominatim.
+"""
+from pathlib import Path
+
+DATA_DIR = None
+SQLLIB_DIR = None
+LUALIB_DIR = None
+CONFIG_DIR = (Path(__file__) / '..' / 'resources' / 'settings').resolve()
--- /dev/null
+[project]
+name = "nominatim-api"
+description = "A tool for building a database of OpenStreetMap for geocoding and for searching the database. Search library."
+readme = "README.md"
+requires-python = ">=3.7"
+license = 'GPL-3.0-or-later'
+maintainers = [
+ { name = "Sarah Hoffmann", email = "lonvia@denofr.de" },
+ { name = "Marc Tobias", email = "mtmail-cpan@gmx.net" }
+]
+keywords = [ "geocoding", "OpenStreetMap", "search" ]
+classifiers = [
+ "Programming Language :: Python :: 3",
+ "License :: OSI Approved :: GNU General Public License (GPL)",
+ "Operating System :: OS Independent",
+]
+dependencies = [
+ "python-dotenv",
+ "pyYAML>=5.1",
+ "SQLAlchemy>=1.4.31",
+ "psycopg",
+ "PyICU"
+]
+dynamic = ["version"]
+
+[project.urls]
+Homepage = "https://nominatim.org"
+Documentation = "https://nominatim.org/release-docs/latest/"
+Issues = "https://github.com/osm-search/Nominatim/issues"
+Repository = "https://github.com/osm-search/Nominatim"
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.hatch.version]
+path = "src/nominatim_api/version.py"
+pattern = "NOMINATIM_API_VERSION = '(?P<version>[^']+)'"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "src/nominatim_api",
+ "src/nominatim_db/config.py",
+ "settings",
+ "extra_src/paths.py"
+]
+
+exclude = [
+ "src/nominatim_api/config.py"
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/nominatim_api"]
+
+[tool.hatch.build.targets.wheel.force-include]
+"src/nominatim_db/config.py" = "nominatim_api/config.py"
+"extra_src/paths.py" = "nominatim_api/paths.py"
+"settings" = "nominatim_api/resources/settings"
--- /dev/null
+../../settings/
\ No newline at end of file
--- /dev/null
+../../src/
\ No newline at end of file
--- /dev/null
+../../COPYING
\ No newline at end of file
--- /dev/null
+# Nominatim - DB Backend
+
+Nominatim is a tool to search OpenStreetMap data
+by name and address (geocoding) and to generate synthetic addresses of
+OSM points (reverse geocoding).
+
+This module implements the database backend for Nominatim and the
+command-line tool for importing and maintaining the database.
+
+## Installation
+
+### Prerequisites
+
+Nominatim requires [osm2pgsql](https://osm2pgsql.org/) (>=1.8) for reading
+OSM data and [PostgreSQL](https://www.postgresql.org/) (>=9.6) to store the data.
+
+On Ubuntu (>=23.04) and Debian (when using backports), you can install them with:
+
+ sudo apt-get install osm2pgsql postgresql-postgis
+
+### Installation from pypi
+
+To install Nominatim from pypi, run:
+
+ pip install nominatim-db
+
+
+## Quick start
+
+First create a project directory for your new Nominatim database, which
+is the space for additional configuration and customization:
+
+ mkdir nominatim-project
+
+Make sure you run all nominatim commands from within the project directory:
+
+ cd nominatim-project
+
+Download an appropriate data extract, for example from
+[Geofabrik](https://download.geofabrik.de/) and import the file:
+
+ nominatim import --osm-file <downlaoded-osm-data.pbf>
+
+You will need to install the [`nominatim-api`](https://pypi.org/project/nominatim-api/)
+package to query the database.
+
+## Documentation
+
+A HTML version of the documentation can be found at
+https://nominatim.org/release-docs/latest/ .
+
+## License
+
+The source code is available under a GPLv3 license.
--- /dev/null
+../../data/
\ No newline at end of file
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Path settings for extra data used by Nominatim.
+"""
+from pathlib import Path
+
+DATA_DIR = (Path(__file__) / '..' / 'resources').resolve()
+SQLLIB_DIR = (DATA_DIR / 'lib-sql')
+LUALIB_DIR = (DATA_DIR / 'lib-lua')
+CONFIG_DIR = (DATA_DIR / 'settings')
--- /dev/null
+../../lib-lua
\ No newline at end of file
--- /dev/null
+../../lib-sql/
\ No newline at end of file
--- /dev/null
+[project]
+name = "nominatim-db"
+description = "A tool for building a database of OpenStreetMap for geocoding and for searching the database. Database backend."
+readme = "README.md"
+requires-python = ">=3.7"
+license = 'GPL-3.0-or-later'
+maintainers = [
+ { name = "Sarah Hoffmann", email = "lonvia@denofr.de" },
+ { name = "Marc Tobias", email = "mtmail-cpan@gmx.net" }
+]
+keywords = [ "geocoding", "OpenStreetMap", "search" ]
+classifiers = [
+ "Programming Language :: Python :: 3",
+ "License :: OSI Approved :: GNU General Public License (GPL)",
+ "Operating System :: OS Independent",
+]
+dependencies = [
+ "psycopg",
+ "python-dotenv",
+ "jinja2",
+ "pyYAML>=5.1",
+ "datrie",
+ "psutil",
+ "PyICU"
+]
+dynamic = ["version"]
+
+[project.urls]
+Homepage = "https://nominatim.org"
+Documentation = "https://nominatim.org/release-docs/latest/"
+Issues = "https://github.com/osm-search/Nominatim/issues"
+Repository = "https://github.com/osm-search/Nominatim"
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.hatch.version]
+path = "src/nominatim_db/version.py"
+pattern = "NOMINATIM_VERSION = parse_version.'(?P<version>[^-]+)"
+
+[tool.hatch.build.targets.sdist]
+include = [
+ "src/nominatim_db",
+ "scripts",
+ "lib-sql/**/*.sql",
+ "lib-lua/**/*.lua",
+ "settings",
+ "data/words.sql",
+ "extra_src/nominatim_db/paths.py"
+]
+
+artifacts = [
+ "data/country_osm_grid.sql.gz"
+]
+
+exclude = [
+ "src/nominatim_db/paths.py"
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/nominatim_db"]
+
+[tool.hatch.build.targets.wheel.shared-scripts]
+"scripts" = "/"
+
+[tool.hatch.build.targets.wheel.force-include]
+"lib-sql" = "nominatim_db/resources/lib-sql"
+"lib-lua" = "nominatim_db/resources/lib-lua"
+"settings" = "nominatim_db/resources/settings"
+"data/country_osm_grid.sql.gz" = "nominatim_db/resources/country_osm_grid.sql.gz"
+"data/words.sql" = "nominatim_db/resources/words.sql"
+"extra_src/nominatim_db/paths.py" = "nominatim_db/paths.py"
--- /dev/null
+#!python3
+
+from nominatim_db import cli
+
+exit(cli.nominatim(osm2pgsql_path=None))
--- /dev/null
+../../settings/
\ No newline at end of file
--- /dev/null
+../../src
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0"?>
-<ruleset name="Nominatim Standard">
-
- <description>Nominatim coding standard</description>
-
- <!-- based on another standard, you can find it here -->
- <!-- /usr/share/php/PHP/CodeSniffer/Standards/PSR2/ruleset.xml -->
- <!-- https://github.com/squizlabs/PHP_CodeSniffer/blob/master/CodeSniffer/Standards/PSR2/ruleset.xml -->
- <rule ref="PSR2"/>
-
- <exclude-pattern>./lib/template/*html*</exclude-pattern>
- <exclude-pattern>./lib/template/includes/</exclude-pattern>
- <exclude-pattern>./module/</exclude-pattern>
- <exclude-pattern>./website/css</exclude-pattern>
- <exclude-pattern>./website/js</exclude-pattern>
-
- <rule ref="Generic.Files.LineLength">
- <properties>
- <property name="lineLimit" value="194"/>
- <property name="absoluteLineLimit" value="194"/>
- </properties>
- </rule>
-
-
- <!-- "A file should declare new symbols (classes, functions, constants, etc.) and cause no
- other side effects, or it should execute logic with side effects, but should not do both."
- ... we have too many script and includes to be able to enforce that.
- -->
- <rule ref="PSR1.Files.SideEffects.FoundWithSymbols">
- <severity>0</severity>
- </rule>
-
-
-
- <!-- eval, system, etc -->
- <rule ref="Generic.PHP.ForbiddenFunctions">
- <properties>
- <property name="forbiddenFunctions" type="array" value="sizeof=>count,delete=>unset,print=>echo,create_function=>null,eval=>null"/>
- </properties>
- </rule>
-
- <!-- **************************************************************
- DOCUMENTATION
- ************************************************************** -->
-
- <rule ref="PEAR.Commenting.FunctionComment.Missing">
- <severity>0</severity>
- </rule>
-
-
-
-
- <!-- **************************************************************
- COMMENTS
- ************************************************************** -->
-
- <!-- any comments in the lines before function() are better than forcing
- a PHPdoc style right now -->
- <rule ref="PEAR.Commenting.FunctionComment.WrongStyle">
- <severity>0</severity>
- </rule>
-
- <!-- We allow comments after statements -->
- <rule ref="Squiz.Commenting.PostStatementComment.Found">
- <severity>0</severity>
- </rule>
- <!-- ... even without space e.g. //some words -->
- <rule ref="Squiz.Commenting.InlineComment.NoSpaceBefore">
- <severity>0</severity>
- </rule>
- <!-- blank lines after inline comments are fine -->
- <rule ref="Squiz.Commenting.InlineComment.SpacingAfter">
- <severity>0</severity>
- </rule>
-
- <!-- Comments don't have to start uppercase -->
- <rule ref="Squiz.Commenting.InlineComment.NotCapital">
- <severity>0</severity>
- </rule>
- <!-- Comments don't have to end with one of .!? -->
- <rule ref="Squiz.Commenting.InlineComment.InvalidEndChar">
- <severity>0</severity>
- </rule>
- <!-- Empty comments are fine -->
- <rule ref="Squiz.Commenting.InlineComment.Empty">
- <severity>0</severity>
- </rule>
-
-
-
-
- <!-- **************************************************************
- INDENTATION, SPACING
- ************************************************************** -->
-
- <rule ref="Squiz.Arrays.ArrayDeclaration.KeyNotAligned" />
-
- <!-- Aligned looks nicer, but causes too many warnings currently -->
- <rule ref="Squiz.Arrays.ArrayDeclaration.DoubleArrowNotAligned">
- <severity>0</severity>
- </rule>
-
-
-
-
-
-
- <!-- **************************************************************
- VARIABLES
- ************************************************************** -->
-
- <!-- CONST_this_var is fine, we don't need ConstThisVar -->
- <rule ref="Generic.NamingConventions.UpperCaseConstantName.ConstantNotUpperCase">
- <severity>0</severity>
- </rule>
-
- <!-- simply disagree with "Each line in an array declaration must end in a comma" -->
- <rule ref="Squiz.Arrays.ArrayDeclaration.NoCommaAfterLast">
- <severity>0</severity>
- </rule>
- <rule ref="Squiz.Arrays.ArrayDeclaration.NoComma">
- <severity>0</severity>
- </rule>
-
- <!-- We allow "$abc = array($aPoint[1], $aPoint[2])" -->
- <rule ref="Squiz.Arrays.ArrayDeclaration.SingleLineNotAllowed">
- <severity>0</severity>
- </rule>
-
- <!-- array() instead of [] for initialisation -->
- <rule ref="Generic.Arrays.DisallowShortArraySyntax.Found" />
-
-
-
-
- <!-- **************************************************************
- STRING QUOTING
- ************************************************************** -->
-
- <!-- Prefer single quoted strings -->
- <rule ref="Squiz.Strings.DoubleQuoteUsage" />
-
- <!-- We allow variabled inside double-quoted strings "abc $somevar" -->
- <rule ref="Squiz.Strings.DoubleQuoteUsage.ContainsVar">
- <severity>0</severity>
- </rule>
-
-
-
-
-
- <!-- **************************************************************
- CONTROL STRUCTURES
- ************************************************************** -->
-
- <!-- we allow "if (a) echo 'b'" without brackets -->
- <rule ref="Generic.ControlStructures.InlineControlStructure.NotAllowed">
- <severity>0</severity>
- </rule>
-
- <!-- We allow "if (a)". No need for "if (a === TRUE)" -->
- <rule ref="Squiz.Operators.ComparisonOperatorUsage.ImplicitTrue">
- <severity>0</severity>
- </rule>
- <!-- ... same for "if (!a)" -->
- <rule ref="Squiz.Operators.ComparisonOperatorUsage.NotAllowed">
- <severity>0</severity>
- </rule>
-
-
-
-</ruleset>
"allotments" : 22,
"neighbourhood" : [20, 22],
"quarter" : [20, 22],
- "isolated_dwelling" : [22, 20],
- "farm" : [22, 20],
+ "isolated_dwelling" : [22, 25],
+ "farm" : [22, 25],
"city_block" : 25,
"mountain_pass" : 25,
"square" : 25,
"stone" : 30,
"" : [22, 0]
},
+ "water" : {
+ "lake" : [20, 0],
+ "reservoir" : [20, 0],
+ "wastewater" : [24, 0],
+ "pond" : [24, 0],
+ "fountain" : [24, 0],
+ "" : [22, 0]
+ },
"waterway" : {
"river" : [19, 0],
"stream" : [22, 0],
+++ /dev/null
-name:
- default: De Nederlandse Antillen
- af: Nederlandse Antille
- an: Antillas Neerlandesas
- ar: جزر الأنتيل
- be: Нідэрландскія Антылы
- bg: Холандски Антили
- br: Antilhez Nederlandat
- bs: Holandski Antili
- ca: Antilles Neerlandeses
- cs: Nizozemské Antily
- cy: Antilles yr Iseldiroedd
- da: Nederlandske Antiller
- de: Niederländische Antillen
- dv: ނެދަލޭންޑު އެންޓިލޭ
- el: Ολλανδικές Αντίλλες
- en: Netherlands Antilles
- eo: Nederlandaj Antiloj
- es: Antillas Neerlandesas;Antillas Holandesas;Indias Occidentales Holandesas
- et: Hollandi Antillid
- eu: Holandarren Antillak
- fa: آنتیل هلند
- fi: Alankomaiden Antillit
- fo: Niðurlendsku Antillurnar
- fr: Antilles néerlandaises
- fy: Nederlânske Antillen
- ga: Aintillí na hÍsiltíre
- gl: Antillas Neerlandesas
- he: האנטילים ההולנדיים
- hi: नीदरलैंड एंटीलीज़
- hr: Nizozemski Antili
- hu: Holland Antillák
- ia: Antillas Nederlandese
- id: Antillen Belanda
- io: Nederlandana Antili
- is: Hollensku Antillaeyjar
- it: Antille Olandesi
- ja: オランダ領アンティル
- jv: Antillen Walanda
- ka: ნიდერლანდის ანტილები
- kk: Антийлер
- ko: 네덜란드령 안틸레스
- kw: Antillys Iseldiryek
- la: Antillae Nederlandiae
- lb: Hollännesch Antillen
- li: Nederlandse Antille
- ln: Antiya ya Holanda
- lt: Nyderlandų Antilai
- lv: Antiļas
- mn: Нидерландын Антиллийн Арлууд
- mr: नेदरलँड्स अँटिल्स
- ms: Antillen Belanda
- nn: Dei nederlandske Antillane
- "no": De nederlandske Antillene
- pl: Antyle Holenderskie
- pt: Antilhas Holandesas
- ro: Antilele Olandeze
- ru: Нидерландские Антилы
- sh: Nizozemski Antili
- sk: Holandské Antily
- sl: Nizozemski Antili
- sr: Холандски Антили
- sv: Nederländska Antillerna
- sw: Antili za Kiholanzi
- ta: நெதர்லாந்து அண்டிலிசு
- tg: Антил Ҳоланд
- th: เนเธอร์แลนด์แอนทิลลิส
- tr: Hollanda Antilleri
- uk: Нідерландські Антильські острови
- vi: Antille thuộc Hà Lan
- zh: 荷属安的列斯
+++ /dev/null
-name:
- default: Antarctica
+++ /dev/null
-name:
- default: American Samoa
+++ /dev/null
-name:
- default: Aruba
+++ /dev/null
-name:
- default: Aland Islands
+++ /dev/null
-name:
- default: Saint Barthélemy
+++ /dev/null
-name:
- default: "\N"
+++ /dev/null
-name:
- default: Bouvet Island
+++ /dev/null
-name:
- default: Cocos (Keeling) Islands
- af: Cocos (Keeling) Eilande
- ar: جزر كوكوس (كيلينغ)
- be: Какосавыя (Кілінг) астравы
- br: Inizi Kokoz
- ca: Illes Cocos
- da: Cocosøerne
- de: Kokosinseln
- el: Νησιά Κόκος
- en: Cocos (Keeling) Islands
- eo: Kokosinsuloj
- es: Islas Cocos (Keeling)
- et: Kookossaared
- eu: Cocos (Keeling) uharteak
- fa: جزایر کوکوس
- fi: Kookossaaret
- fr: Îles Cocos
- fy: de Kokoseilannen
- he: איי קוקוס (קילינג)
- hr: Kokosovi otoci
- hu: Kókusz (Keeling)-szigetek
- id: Kepulauan Cocos (Keeling)
- is: Kókoseyjar
- it: Isole Cocos e Keeling
- lt: Kokoso (Keelingo) salos
- lv: Kokosu (Kīlinga) salas
- mn: Кокосын (Кийлингийн) Арлууд
- nl: Cocoseilanden
- pl: Wyspy Kokosowe
- ru: Кокосовые острова
- sl: Kokosovi otoki
- sv: Kokosöarna
- tr: Cocos (Keeling) Adaları
- uk: Кокосові острови
- vi: Quần đảo Cocos (Keeling)
- zh: 科科斯(基林)群島
+++ /dev/null
-name:
- default: Curaçao
- en: Curaçao
- es: Curazao
- fr: Curaçao
- ru: Кюрасао
- sv: Curaçao
+++ /dev/null
-name:
- default: Christmas Island
- af: Christmas-eiland
- ar: جزيرة الميلاد
- bg: Рождество
- br: Enez Nedeleg
- bs: Božićno ostrvo
- ca: Illa Christmas
- cs: Vánoční ostrov
- cy: Ynys y Nadolig
- da: Juleøen
- de: Weihnachtsinsel
- el: Νήσος των Χριστουγέννων
- eo: Kristnaskinsulo
- es: Isla de Navidad
- et: Jõulusaar
- eu: Christmas uhartea
- fa: جزیره کریسمس
- fi: Joulusaari
- fr: Île Christmas
- fy: Krysteilân
- ga: Oileán na Nollag
- gl: Illa de Nadal
- he: טריטוריית האי חג המולד
- hi: क्रिसमस आईलैंड
- hr: Božićni otok
- hu: Karácsony-sziget
- id: Pulau Natal
- is: Jólaeyja
- it: Isola del Natale
- ja: クリスマス島
- ka: შობის კუნძული
- kk: Кристмас аралы
- ko: 크리스마스 섬
- kw: Ynys Nadelik
- lb: Chrëschtdagsinsel
- lt: Kalėdų sala
- lv: Ziemsvētku sala
- mn: Зул Сарын Арал
- mr: क्रिसमस द्वीप
- ms: Pulau Krismas
- nl: Christmaseiland
- nn: Christmasøya
- "no": Christmasøya
- pl: Wyspa Bożego Narodzenia
- pt: Ilha Christmas
- ro: Insula Crăciunului
- ru: Остров Рождества
- sh: Božićni otok
- sk: Vianočný ostrov
- sl: Božični otoki
- sr: Божићно Острво
- sv: Julön
- sw: Kisiwa cha Krismasi
- ta: கிறிஸ்துமசு தீவு
- th: เกาะคริสต์มาส
- tr: Christmas Adası
- uk: Острів Різдва
- vi: Đảo Christmas
- wo: Dunu Christmas
- zh: 圣诞岛
+++ /dev/null
-name:
- default: Guyane Française
- af: Frans-Guyana
- ar: غيانا
- br: Gwiana c’hall
- ca: Guaiana Francesa
- cy: Guyane
- da: Fransk Guyana
- de: Französisch-Guayana
- el: Γαλλική Γουιάνα
- en: French Guiana
- eo: Gujano
- es: Guayana Francesa
- et: Prantsuse Guajaana
- fa: گویان فرانسه
- fi: Ranskan Guayana
- fr: Guyane française
- fy: Frânsk Guyana
- ga: Guáin na Fraince
- gd: Guiana Fhrangach
- he: גיאנה הצרפתית
- hr: Francuska Gvajana
- hu: Francia Guyana
- id: Guyana Perancis
- is: Franska Gvæjana
- it: Guyana francese
- la: Guiana Francica
- li: Frans Guyana
- lt: Prancūzijos Gviana
- lv: Franču Gviāna
- mn: Франц Гвиана
- nl: Frans-Guyana
- pl: Gujana Francuska
- ru: Французская Гвиана
- sl: Francoska Gvajana
- sv: Franska Guyana
- th: เฟรนช์เกียนา
- tr: Fransız Guyanası
- uk: Французька Гвіана
- vi: Guyane thuộc Pháp
- zh: 法属圭亚那
+++ /dev/null
-name:
- default: Guadeloupe
- ar: غوادلوب
- be: Гвадэлупа
- br: Gwadeloup
- ca: Illa de Guadalupe
- da: Guadeloupe
- el: Γουαδελούπη
- en: Guadeloupe
- eo: Gvadelupo
- es: Guadalupe
- fa: گوادلوپ
- fi: Guadeloupe
- fr: Guadeloupe
- fy: Guadelûp
- ga: Guadalúip
- he: גוואדלופ
- hr: Gvadalupa
- hu: Guadeloupe
- is: Gvadelúpeyjar
- it: Guadalupa
- la: Guadalupa
- lt: Gvadelupa
- lv: Gvadelupa
- mn: Гуаделупе
- pl: Gwadelupa
- ru: Гваделупа
- sv: Guadeloupe
- th: กวาเดอลูป
- uk: Гваделупа
- zh: 瓜德罗普
+++ /dev/null
-name:
- default: Guam
+++ /dev/null
-name:
- default: Hong Kong
+++ /dev/null
-name:
- default: Heard Island and MaxDonald Islands
+++ /dev/null
-name:
- default: Saint Martin
+++ /dev/null
-name:
- default: Macao
+++ /dev/null
-name:
- default: Northern Mariana Islands
+++ /dev/null
-name:
- default: Martinique
- ar: مارتينيك
- be: Марцініка
- br: Martinik
- ca: Martinica
- da: Martinique
- el: Μαρτινίκα
- en: Martinique
- eo: Martiniko
- es: Martinica
- fa: مارتینیک
- fi: Martinique
- fr: Martinique
- fy: Martinyk
- he: מרטיניק
- hr: Martinik
- hu: Martinique
- id: Martinik
- is: Martinique
- it: Martinica
- la: Martinica
- lt: Martinika
- lv: Martinika
- mn: Мартиник
- pl: Martynika
- ru: Мартиника
- sv: Martinique
- uk: Мартиніка
- zh: 馬提尼克
+++ /dev/null
-name:
- default: Nouvelle-Calédonie
- af: Nieu-Caledonia
- ar: كاليدونيا الجديدة
- be: Новая Каледонія
- br: Kaledonia Nevez
- ca: Nova Caledònia
- cy: Caledonia Newydd
- da: Ny Kaledonien
- de: Neukaledonien
- el: Νέα Καληδονία
- en: New Caledonia
- eo: Nov-Kaledonio
- es: Nueva Caledonia
- fa: کالدونیای جدید
- fi: Uusi-Kaledonia
- fr: Nouvelle-Calédonie
- ga: An Nua-Chaladóin
- he: קלדוניה החדשה
- hr: Nova Kaledonija
- hu: Új-Kaledónia
- id: Kaledonia Baru
- is: Nýja-Kaledónía
- it: Nuova Caledonia
- la: Nova Caledonia
- lt: Naujoji Kaledonija
- lv: Jaunkaledonija
- mn: Шинэ Каледони
- nl: Nieuw-Caledonië
- pl: Nowa Kaledonia
- ru: Новая Каледония
- sl: Nova Kaledonija
- sv: Nya Kaledonien
- th: นิวแคลิโดเนีย
- tr: Yeni Kaledonya
- uk: Нова Каледонія
- zh: 新喀里多尼亚
+++ /dev/null
-name:
- default: Norfolk Island
- af: Norfolkeiland
- ar: جزيرة نورفولك
- be: Норфалк
- br: Enez Norfolk
- ca: Illa Norfolk
- cy: Ynys Norfolk
- da: Norfolk-øen
- de: Norfolkinsel
- en: Norfolk Island
- eo: Norfolkinsulo
- es: Isla Norfolk
- et: Norfolki saar
- fi: Norfolkinsaari
- fr: Île Norfolk
- fy: Norfolk
- ga: Oileán Norfolk
- he: האי נורפוק
- hr: Otok Norfolk
- hu: Norfolk-sziget
- id: Pulau Norfolk
- is: Norfolkeyja
- it: Isola Norfolk
- la: Insula Norfolcia
- lt: Norfolko sala
- lv: Norfolkas sala
- mn: Норфолк Арал
- nl: Norfolk
- pl: Wyspa Norfolk
- ru: Остров Норфолк
- sv: Norfolkön
- tr: Norfolk Adası
- uk: Острів Норфолк
- vi: Đảo Norfolk
- zh: 诺福克岛
+++ /dev/null
-name:
- default: Polynésie française
- af: Franse Polynesië
- an: Polinesia Franzesa
- ar: بولونيزيا الفرنسية
- az: Fransa Polineziyası
- be: Французская Палінезія
- bg: Френска Полинезия
- br: Polinezia Frañs
- bs: Francuska Polinezija
- ca: Polinèsia Francesa
- cs: Francouzská Polynésie
- cy: Polynesia Ffrengig
- da: Fransk Polynesien
- de: Französisch-Polynesien
- dv: ފަރަންސޭސި ޕޮލިނޭޝިއާ
- el: Γαλλική Πολυνησία
- en: French Polynesia
- eo: Franca Polinezio
- es: Polinesia Francesa
- et: Prantsuse Polüneesia
- eu: Frantziar Polinesia
- fa: پلینزی فرانسه
- fi: Ranskan Polynesia
- fr: Polynésie française
- fy: Frânsk Polyneezje
- ga: Polainéis na Fraince
- gd: French Polynesia
- gl: Polinesia francesa
- he: פולינזיה הצרפתית
- hi: फ्रेंच पोलीनेशिया
- hr: Francuska Polinezija
- hu: Francia Polinézia
- id: Polinesia Perancis
- io: Franca Polinezia
- is: Franska Pólýnesía
- it: Polinesia francese
- ja: フランス領ポリネシア
- jv: Polinesia Perancis
- kk: Франция Полинезиясы
- ko: 프랑스령 폴리네시아
- kw: Polynesi Frynkek
- la: Polynesia Francica
- lb: Franséisch-Polynesien
- lt: Prancūzijos Polinezija
- lv: Franču Polinēzija
- mi: Porinīhia Wīwī
- mk: Француска Полинезија
- mn: Францын Полинез
- mr: फ्रेंच पॉलिनेशिया
- ms: Polinesia Perancis
- nl: Frans-Polynesië
- nn: Fransk Polynesia
- "no": Fransk Polynesia
- oc: Polinesia Francesa
- os: Францы Полинези
- pl: Polinezja Francuska
- pt: Polinésia Francesa
- qu: Phransis Pulinisya
- ro: Polinezia Franceză
- ru: Французская Полинезия
- se: Frankriikka Polynesia
- sh: Francuska Polinezija
- sk: Francúzska Polynézia
- sl: Francoska Polinezija
- sr: Француска Полинезија
- sv: Franska Polynesien
- sw: Polynesia ya Kifaransa
- ta: பிரெஞ்சு பொலினீசியா
- th: เฟรนช์โปลินีเซีย
- tr: Fransız Polinezyası
- ty: Pōrīnetia Farāni
- ug: Fransiyige Qarashliq Polinéziye
- uk: Французька Полінезія
- vi: Polynésie thuộc Pháp
- wo: Polineesi gu Faraas
- zh: 法属波利尼西亚
+++ /dev/null
-name:
- default: Saint-Pierre-et-Miquelon
- af: Saint-Pierre et Miquelon
- be: Святы П’ер і Міквелон
- da: Saint Pierre og Miquelon
- de: Saint-Pierre und Miquelon
- en: Saint Pierre and Miquelon
- eo: Sankta-Piero kaj Mikelono
- es: San Pedro y Miguelón
- fi: Saint-Pierre ja Miquelon
- fr: Saint-Pierre-et-Miquelon
- hr: Sveti Petar i Mikelon
- hu: Saint-Pierre és Miquelon
- lt: Sen Pjeras ir Mikelonas
- lv: Senpjēra un Mikelona
- mn: Сент Пьер ба Микелон
- sv: Saint-Pierre och Miquelon
- tr: Saint-Pierre ve Miquelon
- uk: Сен-П'єр і Мікелон
+++ /dev/null
-name:
- default: Puerto Rico
+++ /dev/null
-name:
- default: Réunion
- af: Réunion
- ar: ريونيون
- be: Руньён
- br: Ar Reunion
- ca: Illa de la Reunió
- da: Reunion
- el: Ρεϊνιόν
- eo: Reunio
- es: La Reunión
- fa: رئونیون
- fi: Réunion
- fr: La Réunion
- he: ראוניון
- hu: Réunion
- is: Réunion
- it: Riunione
- la: Reunio
- lt: Reunionas
- lv: Reinjona
- mn: Реюньон
- pl: Reunion
- ru: Реюньон
- sl: Reunion
- sv: Réunion
- th: เรอูนียง
- uk: Реюньйон
- zh: 留尼汪
+++ /dev/null
-name:
- default: Svalbard and Jan Mayen
+++ /dev/null
-name:
- default: Sint Maarten
+++ /dev/null
-name:
- default: Terres australes et antarctiques françaises
- af: Franse Suidelike en Antarktiese Gebiede
- an: Territorios Australs Franzeses
- ar: الأراضي الجنوبية الفرنسية
- be: Французскія Паўднёвыя тэрыторыі
- bg: Френски южни и антарктически територии
- br: Douaroù Aostral hag Antarktikel Frañs
- ca: Terres Australs i Antàrtiques Franceses
- cs: Francouzská jižní a antarktická území
- da: Franske sydlige og Antarktiske territorier
- de: Französische Süd- und Antarktisgebiete
- el: Γαλλικά νότια και ανταρκτικά εδάφη
- en: French Southern Lands
- eo: Francaj Sudaj Teritorioj
- es: Tierras Australes y Antárticas Francesas
- eu: Frantziaren lurralde austral eta antartikoak
- fi: Ranskan eteläiset ja antarktiset alueet
- fr: Terres australes et antarctiques françaises
- fy: Frânske Súdlike en Antarktyske Lannen
- gl: Terras Austrais e Antárticas Francesas
- hr: Francuski južni i antarktički teritoriji
- hu: Francia déli és antarktiszi területek
- id: Daratan Selatan dan Antarktika Perancis
- is: Frönsku suðlægu landsvæðin
- it: Terre Australi e Antartiche Francesi
- ja: フランス領南方・南極地域
- ko: 프랑스령 남부와 남극 지역
- kw: Tiryow Deghow hag Antarktik Frynkek
- lt: Prancūzijos Pietų Sritys
- lv: Francijas Dienvidjūru un Antarktikas Zemes
- nl: Franse Zuidelijke en Antarctische Gebieden
- "no": De franske sørterritorier
- oc: Tèrras Australas e Antarticas Francesas
- pl: Francuskie Terytoria Południowe i Antarktyczne
- pt: Terras Austrais e Antárticas Francesas
- ro: Teritoriile australe şi antarctice franceze
- ru: Французские Южные и Антарктические территории
- sh: Francuske Južne Teritorije
- sk: Francúzske južné a antarktické územia
- sl: Francoske južne in antarktične dežele
- sr: Француске јужне и антарктичке земље
- sv: Franska sydterritorierna
- ta: பிரெஞ்சு தென்னக நிலங்களும் அண்டாடிக் நிலமும்
- tr: Fransız Güney ve Antarktika Toprakları
- uk: Французькі Південні та Антарктичні території
- vi: Vùng đất phía Nam và châu Nam Cực thuộc Pháp
- zh: 法属南部领地
+++ /dev/null
-name:
- default: United States Minor Outlying Islands
+++ /dev/null
-name:
- default: United States Virgin Islands
+++ /dev/null
-name:
- default: Wallis-et-Futuna
- af: Wallis-en-Futuna
- an: Wallis e Futuna
- ar: جزر واليس وفوتونا
- be: Уоліс і Футуна
- bg: Уолис и Футуна
- br: Wallis ha Futuna
- ca: Wallis i Futuna
- cs: Wallis a Futuna
- cy: Wallis a Futuna
- da: Wallis og Futuna
- de: Wallis und Futuna
- dv: ވާލީ އަދި ފުތޫނާ
- el: Ουώλλις και Φουτούνα
- en: Wallis and Futuna Islands
- eo: Valiso kaj Futuno
- es: Wallis y Futuna
- et: Wallis ja Futuna
- eu: Wallis eta Futuna
- fa: والیس و فوتونا
- fi: Wallis- ja Futunasaaret
- fr: Wallis-et-Futuna
- fy: Wallis en Fûtûna
- ga: Vailís agus Futúna
- gl: Wallis e Futuna
- he: ואליס ופוטונה
- hr: Wallis i Futuna
- hu: Wallis és Futuna
- id: Wallis dan Futuna
- io: Wallis e Futuna Insuli
- is: Wallis- og Fútúnaeyjar
- it: Wallis e Futuna
- ja: ウォリス・フツナ
- jv: Wallis lan Futuna
- ko: 왈리스 퓌튀나
- kw: Wallis ha Futuna
- la: Vallis et Futuna
- lb: Wallis a Futuna
- lt: Walliso ir Futuna salos
- lv: Volisa un Futuna
- mn: Уоллис ба Футуна
- mr: वालिस व फुतुना
- ms: Wallis dan Futuna
- nl: Wallis en Futuna
- nn: Wallis- og Futunaøyane
- "no": Wallis- og Futunaøyene
- oc: Wallis e Futuna
- pl: Wallis i Futuna
- pt: Wallis e Futuna
- ro: Wallis şi Futuna
- ru: Уоллис и Футуна
- se: Wallis ja Futuna
- sh: Wallis i Futuna
- sk: Wallis a Futuna
- sl: Wallis in Futuna
- sm: Wallis and Futuna
- sr: Валис и Футуна
- sv: Wallis- och Futunaöarna
- sw: Wallis na Futuna
- ta: வலிசும் புட்டூனாவும்
- th: หมู่เกาะวาลลิสและหมู่เกาะฟุตูนา
- tr: Wallis ve Futuna Adaları
- ug: Wallis we Futuna Taqim Aralliri
- uk: Волліс і Футуна
- vi: Wallis và Futuna
- wo: Wallis ak Futuna
- zh: 瓦利斯和富图纳群岛
+++ /dev/null
-name:
- default: Mayotte
pattern: "dddd"
-# Netherlands Antilles (De Nederlandse Antillen)
-an:
- partition: 58
- languages: nl, en, pap
- names: !include country-names/an.yaml
-
-
# Angola (Angola)
ao:
partition: 85
postcode: no
-# (Antarctica)
-aq:
- partition: 181
- languages: en, es, fr, ru
- names: !include country-names/aq.yaml
- postcode: no
-
-
# Argentina (Argentina)
ar:
partition: 39
pattern: "l?dddd(?:lll)?"
-# (American Samoa)
-as:
- partition: 182
- languages: en, sm
- names: !include country-names/as.yaml
-
-
# Austria (Österreich)
at:
partition: 245
pattern: "dddd"
-# (Aruba)
-aw:
- partition: 183
- languages: nl, pap
- names: !include country-names/aw.yaml
- postcode: no
-
-
-# (Aland Islands)
-ax:
- partition: 184
- languages: sv
- names: !include country-names/ax.yaml
-
-
# Azerbaijan (Azərbaycan)
az:
partition: 119
postcode: no
-# (Saint Barthélemy)
-bl:
- partition: 204
- languages: fr
- names: !include country-names/bl.yaml
-
-
# Bermuda (Bermuda)
bm:
partition: 176
postcode: no
-# Caribbean Netherlands (Caribisch Nederland)
-bq:
- partition: 250
- languages: nl
- names: !include country-names/bq.yaml
-
-
# Brazil (Brasil)
br:
partition: 121
pattern: "ddddd"
-# (Bouvet Island)
-bv:
- partition: 185
- languages: "no"
- names: !include country-names/bv.yaml
-
-
# Botswana (Botswana)
bw:
partition: 122
output: \1 \2
-# Cocos (Keeling) Islands (Cocos (Keeling) Islands)
-cc:
- partition: 118
- languages: en
- names: !include country-names/cc.yaml
-
-
# Democratic Republic of the Congo (République démocratique du Congo)
cd:
partition: 229
pattern: "dddd"
-# Curaçao (Curaçao)
-cw:
- partition: 248
- languages: nl, en
- names: !include country-names/cw.yaml
-
-
-# Christmas Island (Christmas Island)
-cx:
- partition: 177
- languages: en
- names: !include country-names/cx.yaml
-
-
# Cyprus (Κύπρος - Kıbrıs)
cy:
partition: 114
pattern: "dddd"
-# French Guiana (Guyane Française)
-gf:
- partition: 231
- languages: fr
- names: !include country-names/gf.yaml
-
-
# Guernsey (Guernsey)
gg:
partition: 77
pattern: "ddd"
-# Guadeloupe (Guadeloupe)
-gp:
- partition: 232
- languages: fr
- names: !include country-names/gp.yaml
-
-
# Equatorial Guinea (Guinea Ecuatorial)
gq:
partition: 12
pattern: "ddddd"
-# Guam (Guam)
-gu:
- partition: 187
- languages: en, ch
- names: !include country-names/gu.yaml
-
-
# Guinea-Bissau (Guiné-Bissau)
gw:
partition: 8
postcode: no
-# (Hong Kong)
-hk:
- partition: 188
- languages: zh-hant, en
- names: !include country-names/hk.yaml
-
-
-# (Heard Island and MaxDonald Islands)
-hm:
- partition: 189
- languages: en
- names: !include country-names/hm.yaml
-
-
# Honduras (Honduras)
hn:
partition: 56
pattern: "ddddd"
-# Saint Martin (Saint Martin)
-mf:
- partition: 203
- languages: fr
- names: !include country-names/mf.yaml
-
-
# Madagascar (Madagasikara)
mg:
partition: 164
pattern: "ddddd"
-# Macao (Macao)
-mo:
- partition: 191
- languages: zh-hant, pt
- names: !include country-names/mo.yaml
- postcode: no
-
-
-# Northern Mariana Islands (Northern Mariana Islands)
-mp:
- partition: 192
- languages: ch, en
- names: !include country-names/mp.yaml
-
-
-# Martinique (Martinique)
-mq:
- partition: 233
- languages: fr
- names: !include country-names/mq.yaml
-
-
# Mauritania (موريتانيا)
mr:
partition: 149
pattern: "ddddd"
-# New Caledonia (Nouvelle-Calédonie)
-nc:
- partition: 234
- languages: fr
- names: !include country-names/nc.yaml
-
-
# Niger (Niger)
ne:
partition: 226
pattern: "dddd"
-# Norfolk Island (Norfolk Island)
-nf:
- partition: 100
- languages: en, pih
- names: !include country-names/nf.yaml
-
-
# Nigeria (Nigeria)
ng:
partition: 218
pattern: "ddddd"
-# French Polynesia (Polynésie française)
-pf:
- partition: 202
- languages: fr
- names: !include country-names/pf.yaml
-
-
# Papua New Guinea (Papua Niugini)
pg:
partition: 71
output: \1-\2
-# Saint Pierre and Miquelon (Saint-Pierre-et-Miquelon)
-pm:
- partition: 236
- languages: fr
- names: !include country-names/pm.yaml
-
-
# Pitcairn Islands (Pitcairn Islands)
pn:
partition: 113
output: \1 \2
-# Puerto Rico (Puerto Rico)
-pr:
- partition: 193
- languages: es, en
- names: !include country-names/pr.yaml
-
-
# Palestinian Territory (Palestinian Territory)
ps:
partition: 194
postcode: no
-# (Réunion)
-re:
- partition: 235
- languages: fr
- names: !include country-names/re.yaml
-
-
# Romania (România)
ro:
partition: 170
pattern: "dddd"
-# (Svalbard and Jan Mayen)
-sj:
- partition: 197
- languages: "no"
- names: !include country-names/sj.yaml
-
-
# Slovakia (Slovensko)
sk:
partition: 172
pattern: "dddd"
-# (Sint Maarten)
-sx:
- partition: 249
- languages: nl, en
- names: !include country-names/sx.yaml
-
-
# Syria (سوريا)
sy:
partition: 104
postcode: no
-# French Southern Lands (Terres australes et antarctiques françaises)
-tf:
- partition: 132
- languages: fr
- names: !include country-names/tf.yaml
-
-
# Togo (Togo)
tg:
partition: 243
postcode: no
-# (United States Minor Outlying Islands)
-um:
- partition: 198
- languages: en
- names: !include country-names/um.yaml
- postcode:
- pattern: "96898"
-
-
# United States (United States)
us:
partition: 2
output: VG\1
-# (United States Virgin Islands)
-vi:
- partition: 199
- languages: en
- names: !include country-names/vi.yaml
-
-
# Vietnam (Việt Nam)
vn:
partition: 75
postcode: no
-# Wallis and Futuna Islands (Wallis-et-Futuna)
-wf:
- partition: 238
- languages: fr
- names: !include country-names/wf.yaml
-
-
# Samoa (Sāmoa)
ws:
partition: 131
postcode: no
-# Mayotte (Mayotte)
-yt:
- partition: 200
- languages: fr
- names: !include country-names/yt.yaml
-
-
# South Africa (South Africa)
za:
partition: 76
# Nominatim sets up read-only access for this user during installation.
NOMINATIM_DATABASE_WEBUSER="www-data"
-# Directory where to find the PostgreSQL server module.
-# When empty the module is expected to be located in the 'module' subdirectory
-# in the project directory.
-# Changing this value requires to run 'nominatim refresh --functions'.
-NOMINATIM_DATABASE_MODULE_PATH=
-
# Tokenizer used for normalizing and parsing queries and names.
# The tokenizer is set up during import and cannot be changed afterwards
# without a reimport.
# Currently available tokenizers: icu, legacy
NOMINATIM_TOKENIZER="icu"
-# Number of occurrences of a word before it is considered frequent.
-# Similar to the concept of stop words. Frequent partial words get ignored
-# or handled differently during search.
-# Changing this value requires a reimport.
-NOMINATIM_MAX_WORD_FREQUENCY=50000
-
# If true, admin level changes on places with many contained children are blocked.
NOMINATIM_LIMIT_REINDEXING=yes
# Currently only affects the initial import of country names and special phrases.
NOMINATIM_LANGUAGES=
-# Rules for normalizing terms for comparisons.
-# The default is to remove accents and punctuation and to lower-case the
-# term. Spaces are kept but collapsed to one standard space.
-# Changing this value requires a reimport.
-NOMINATIM_TERM_NORMALIZATION=":: NFD (); [[:Nonspacing Mark:] [:Cf:]] >; :: lower (); [[:Punctuation:][:Space:]]+ > ' '; :: NFC ();"
-
# Configuration file for the tokenizer.
# The content depends on the tokenizer used. If left empty the default settings
# for the chosen tokenizer will be used. The configuration can only be set
# EXPERT ONLY. You should usually use the supplied osm2pgsql.
NOMINATIM_OSM2PGSQL_BINARY=
-# Directory where to find US Tiger data files to import.
-# OBSOLETE: use `nominatim add-data --tiger-data <dir>` to explicitly state
-# the directory on import
-NOMINATIM_TIGER_DATA_PATH=
-
# Directory where to find pre-computed Wikipedia importance files.
# When unset, the data is expected to be located in the project directory.
NOMINATIM_WIKIPEDIA_DATA_PATH=
-# Configuration file for special phrase import.
-# OBSOLETE: use `nominatim special-phrases --config <file>` or simply put
-# a custom phrase-settings.json into your project directory.
-NOMINATIM_PHRASE_CONFIG=
-
# Configuration file for rank assignments.
NOMINATIM_ADDRESS_LEVEL_CONFIG=address-levels.json
# Tablespace for indexes used during address computation. Used for import and update only.
NOMINATIM_TABLESPACE_ADDRESS_INDEX=
-# Tablespace for tables for auxilary data, e.g. TIGER data, postcodes.
+# Tablespace for tables for auxiliary data, e.g. TIGER data, postcodes.
NOMINATIM_TABLESPACE_AUX_DATA=
-# Tablespace for indexes for auxilary data, e.g. TIGER data, postcodes.
+# Tablespace for indexes for auxiliary data, e.g. TIGER data, postcodes.
NOMINATIM_TABLESPACE_AUX_INDEX=
# When unset, the local language (i.e. the name tag without suffix) will be used.
NOMINATIM_DEFAULT_LANGUAGE=
-# Enable a special batch query mode.
-# This feature is currently undocumented and potentially broken.
-NOMINATIM_SEARCH_BATCH_MODE=no
-
-# Threshold for searches by name only.
-# Threshold where the lookup strategy in the database is switched. If there
-# are less occurences of a tem than given, the search does the lookup only
-# against the name, otherwise it uses indexes for name and address.
-NOMINATIM_SEARCH_NAME_ONLY_THRESHOLD=500
-
# Maximum number of OSM ids accepted by /lookup.
NOMINATIM_LOOKUP_MAX_COUNT=50
# Set to zero to disable polygon output.
NOMINATIM_POLYGON_OUTPUT_MAX_TYPES=1
+# Offer backwards compatible PHP URLs.
+# When running one of the Python enignes, they will add endpoint aliases
+# under <endpoint>.php
+NOMINATIM_SERVE_LEGACY_URLS=yes
+
+# Maximum number of connection a single API object can use. (Python API only)
+# When running Nominatim as a server, then this is the maximum number
+# of connections _per worker_.
+NOMINATIM_API_POOL_SIZE=10
+
+# Timeout is seconds after which a single query to the database is cancelled.
+# The user receives a 503 response, when a query times out.
+# When empty, then timeouts are disabled.
+NOMINATIM_QUERY_TIMEOUT=10
+
+# Maximum time a single request is allowed to take. When the timeout is
+# exceeded, the available results are returned.
+# When empty, then timeouts are disabled.
+NOMINATIM_REQUEST_TIMEOUT=60
+
+# Search elements just within countries
+# If, despite not finding a point within the static grid of countries, it
+# finds a geometry of a region, do not return the geometry. Return "Unable
+# to geocode" instead.
+NOMINATIM_SEARCH_WITHIN_COUNTRIES=False
+
### Log settings
#
# The following options allow to enable logging of API requests.
# Enable logging of requests into a file.
# To enable logging set this setting to the file to log to.
NOMINATIM_LOG_FILE=
+
+# Echo raw SQL from SQLAlchemy statements.
+# EXPERT: Works only in command line/library use.
+NOMINATIM_DEBUG_SQL=no
-- "[𞥐𐒠߀𖭐꤀𖩠𑓐𑑐𑋰𑄶꩐꘠᱀᭐᮰᠐០᥆༠໐꧰႐᪐᪀᧐𑵐꯰᱐𑱐𑜰𑛀𑙐𑇐꧐꣐෦𑁦0𝟶𝟘𝟬𝟎𝟢₀⓿⓪⁰] > 0"
-- "[𞥑𐒡߁𖭑꤁𖩡𑓑𑑑𑋱𑄷꩑꘡᱁᭑᮱᠑១᥇༡໑꧱႑᪑᪁᧑𑵑꯱᱑𑱑𑜱𑛁𑙑𑇑꧑꣑෧𑁧1𝟷𝟙𝟭𝟏𝟣₁¹①⑴⒈❶➀➊⓵] > 1"
-- "[𞥒𐒢߂𖭒꤂𖩢𑓒𑑒𑋲𑄸꩒꘢᱂᭒᮲᠒២᥈༢໒꧲႒᪒᪂᧒𑵒꯲᱒𑱒𑜲𑛂𑙒𑇒꧒꣒෨𑁨2𝟸𝟚𝟮𝟐𝟤₂²②⑵⒉❷➁➋⓶] > 2"
-- "[𞥓𐒣߃𖭓꤃𖩣𑓓𑑓𑋳𑄹꩓꘣᱃᭓᮳᠓៣᥉༣໓꧳႓᪓᪃᧓𑵓꯳᱓𑱓𑜳𑛃𑙓𑇓꧓꣓෩𑁩3𝟹𝟛𝟯𝟑𝟥₃³③⑶⒊❸➂➌⓷] > 3"
-- "[𞥔𐒤߄𖭔꤄𖩤𑓔𑑔𑋴𑄺꩔꘤᱄᭔᮴᠔៤᥊༤໔꧴႔᪔᪄᧔𑵔꯴᱔𑱔𑜴𑛄𑙔𑇔꧔꣔෪𑁪4𝟺𝟜𝟰𝟒𝟦₄⁴④⑷⒋❹➃➍⓸] > 4"
-- "[𞥕𐒥߅𖭕꤅𖩥𑓕𑑕𑋵𑄻꩕꘥᱅᭕᮵᠕៥᥋༥໕꧵႕᪕᪅᧕𑵕꯵᱕𑱕𑜵𑛅𑙕𑇕꧕꣕෫𑁫5𝟻𝟝𝟱𝟓𝟧₅⁵⑤⑸⒌❺➄➎⓹] > 5"
-- "[𞥖𐒦߆𖭖꤆𖩦𑓖𑑖𑋶𑄼꩖꘦᱆᭖᮶᠖៦᥌༦໖꧶႖᪖᪆᧖𑵖꯶᱖𑱖𑜶𑛆𑙖𑇖꧖꣖෬𑁬6𝟼𝟞𝟲𝟔𝟨₆⁶⑥⑹⒍❻➅➏⓺] > 6"
-- "[𞥗𐒧߇𖭗꤇𖩧𑓗𑑗𑋷𑄽꩗꘧᱇᭗᮷᠗៧᥍༧໗꧷႗᪗᪇᧗𑵗꯷᱗𑱗𑜷𑛇𑙗𑇗꧗꣗෭𑁭7𝟽𝟟𝟳𝟕𝟩₇⁷⑦⑺⒎❼➆➐⓻] > 7"
-- "[𞥘𐒨߈𖭘꤈𖩨𑓘𑑘𑋸𑄾꩘꘨᱈᭘᮸᠘៨᥎༨໘꧸႘᪘᪈᧘𑵘꯸᱘𑱘𑜸𑛈𑙘𑇘꧘꣘෮𑁮8𝟾𝟠𝟴𝟖𝟪₈⁸⑧⑻⒏❽➇➑⓼] > 8"
-- "[𞥙𐒩߉𖭙꤉𖩩𑓙𑑙𑋹𑄿꩙꘩᱉᭙᮹᠙៩᥏༩໙꧹႙᪙᪉᧙𑵙꯹᱙𑱙𑜹𑛉𑙙𑇙꧙꣙෯𑁯9𝟿𝟡𝟵𝟗𝟫₉⁹⑨⑼⒐❾➈➒⓽] > 9"
-- "[𑜺⑩⑽⒑❿➉➓⓾] > '10'"
+- "[𞥐𐒠߀𖭐꤀𖩠𑓐𑑐𑋰𑄶꩐꘠᱀᭐᮰᠐០᥆༠໐꧰႐᪐᪀᧐𑵐꯰᱐𑱐𑜰𑛀𑙐𑇐꧐꣐෦𑁦0𝟶𝟘𝟬𝟎𝟢₀⓿⓪⁰零] > 0"
+- "[𞥑𐒡߁𖭑꤁𖩡𑓑𑑑𑋱𑄷꩑꘡᱁᭑᮱᠑១᥇༡໑꧱႑᪑᪁᧑𑵑꯱᱑𑱑𑜱𑛁𑙑𑇑꧑꣑෧𑁧1𝟷𝟙𝟭𝟏𝟣₁¹①⑴⒈❶➀➊⓵一] > 1"
+- "[𞥒𐒢߂𖭒꤂𖩢𑓒𑑒𑋲𑄸꩒꘢᱂᭒᮲᠒២᥈༢໒꧲႒᪒᪂᧒𑵒꯲᱒𑱒𑜲𑛂𑙒𑇒꧒꣒෨𑁨2𝟸𝟚𝟮𝟐𝟤₂²②⑵⒉❷➁➋⓶二] > 2"
+- "[𞥓𐒣߃𖭓꤃𖩣𑓓𑑓𑋳𑄹꩓꘣᱃᭓᮳᠓៣᥉༣໓꧳႓᪓᪃᧓𑵓꯳᱓𑱓𑜳𑛃𑙓𑇓꧓꣓෩𑁩3𝟹𝟛𝟯𝟑𝟥₃³③⑶⒊❸➂➌⓷三] > 3"
+- "[𞥔𐒤߄𖭔꤄𖩤𑓔𑑔𑋴𑄺꩔꘤᱄᭔᮴᠔៤᥊༤໔꧴႔᪔᪄᧔𑵔꯴᱔𑱔𑜴𑛄𑙔𑇔꧔꣔෪𑁪4𝟺𝟜𝟰𝟒𝟦₄⁴④⑷⒋❹➃➍⓸四] > 4"
+- "[𞥕𐒥߅𖭕꤅𖩥𑓕𑑕𑋵𑄻꩕꘥᱅᭕᮵᠕៥᥋༥໕꧵႕᪕᪅᧕𑵕꯵᱕𑱕𑜵𑛅𑙕𑇕꧕꣕෫𑁫5𝟻𝟝𝟱𝟓𝟧₅⁵⑤⑸⒌❺➄➎⓹五] > 5"
+- "[𞥖𐒦߆𖭖꤆𖩦𑓖𑑖𑋶𑄼꩖꘦᱆᭖᮶᠖៦᥌༦໖꧶႖᪖᪆᧖𑵖꯶᱖𑱖𑜶𑛆𑙖𑇖꧖꣖෬𑁬6𝟼𝟞𝟲𝟔𝟨₆⁶⑥⑹⒍❻➅➏⓺六] > 6"
+- "[𞥗𐒧߇𖭗꤇𖩧𑓗𑑗𑋷𑄽꩗꘧᱇᭗᮷᠗៧᥍༧໗꧷႗᪗᪇᧗𑵗꯷᱗𑱗𑜷𑛇𑙗𑇗꧗꣗෭𑁭7𝟽𝟟𝟳𝟕𝟩₇⁷⑦⑺⒎❼➆➐⓻七] > 7"
+- "[𞥘𐒨߈𖭘꤈𖩨𑓘𑑘𑋸𑄾꩘꘨᱈᭘᮸᠘៨᥎༨໘꧸႘᪘᪈᧘𑵘꯸᱘𑱘𑜸𑛈𑙘𑇘꧘꣘෮𑁮8𝟾𝟠𝟴𝟖𝟪₈⁸⑧⑻⒏❽➇➑⓼八] > 8"
+- "[𞥙𐒩߉𖭙꤉𖩩𑓙𑑙𑋹𑄿꩙꘩᱉᭙᮹᠙៩᥏༩໙꧹႙᪙᪉᧙𑵙꯹᱙𑱙𑜹𑛉𑙙𑇙꧙꣙෯𑁯9𝟿𝟡𝟵𝟗𝟫₉⁹⑨⑼⒐❾➈➒⓽九] > 9"
+- "[𑜺⑩⑽⒑❿➉➓⓾十] > '10'"
- "[⑪⑾⒒⓫] > '11'"
- "[⑫⑿⒓⓬] > '12'"
- "[⑬⒀⒔⓭] > '13'"
+query-preprocessing:
+ - step: split_japanese_phrases
+ - step: normalize
normalization:
- ":: lower ()"
- ":: Hans-Hant"
- "'nº' > 'no'"
- "ª > a"
- "º > o"
- - "[[:Punctuation:][:Symbol:]\u02bc] > ' '"
+ - "[[:Punctuation:][:Symbol:][\u02bc] - [-:]]+ > '-'"
- "ß > 'ss'" # German szet is unambiguously equal to double ss
- - "[^[:alnum:] [:Canonical_Combining_Class=Virama:] [:Space:]] >"
+ - "[^[:alnum:] [:Canonical_Combining_Class=Virama:] [:Space:] [-:]] >"
- "[:Lm:] >"
- ":: [[:Number:]] Latin ()"
- ":: [[:Number:]] Ascii ();"
- ":: [[:Number:]] NFD ();"
- "[[:Nonspacing Mark:] [:Cf:]] >;"
- - "[:Space:]+ > ' '"
+ - "[-:]?[:Space:]+[-:]? > ' '"
transliteration:
+ - "[-:] > ' '"
- ":: Latin ()"
- !include icu-rules/extended-unicode-to-asccii.yaml
- ":: Ascii ()"
- ":: lower ()"
- "[^a-z0-9[:Space:]] >"
- ":: NFC ()"
+ - "[:Space:]+ > ' '"
sanitizers:
- step: clean-housenumbers
filter-kind:
- step: clean-postcodes
convert-to-address: yes
default-pattern: "[A-Z0-9- ]{3,12}"
+ - step: clean-tiger-tags
- step: split-name-list
+ delimiters: ;
- step: strip-brace-terms
- step: tag-analyzer-by-language
filter-kind: [".*name.*"]
whitelist: [bg,ca,cs,da,de,el,en,es,et,eu,fi,fr,gl,hu,it,ja,mg,ms,nl,no,pl,pt,ro,ru,sk,sl,sv,tr,uk,vi]
use-defaults: all
mode: append
+ - step: tag-japanese
token-analysis:
- analyzer: generic
- id: "@housenumber"
+++ /dev/null
-[
-{
- "keys" : [ "" ],
- "values" : {
- "no" : "skip"
- }
-},
-{ "keys" : ["wikipedia", "wikipedia:*", "wikidata", "area"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "skip",
- "" : "fallback,with_name"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "administrative" : "main",
- "postal_code" : "main"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["addr:housename"],
- "values" : {
- "" : "name,house"
- }
-},
-{
- "keys" : ["addr:housenumber", "addr:conscriptionnumber", "addr:streetnumber"],
- "values" : {
- "" : "address,house"
- }
-},
-{
- "keys" : ["addr:interpolation"],
- "values" : {
- "" : "interpolation,address"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode,fallback"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "motorway" : "main",
- "trunk" : "main",
- "primary" : "main",
- "secondary" : "main",
- "tertiary" : "main",
- "unclassified" : "main",
- "residential" : "main",
- "living_street" : "main",
- "pedestrian" : "main",
- "road" : "main",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name"
- }
-}
-]
+++ /dev/null
-[
-{ "keys" : ["wikipedia", "wikipedia:*", "wikidata"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "skip",
- "" : "fallback,with_name"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "administrative" : "main"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode"
- }
-},
-{
- "keys" : ["capital"],
- "values" : {
- "" : "extra"
- }
-}
-]
+++ /dev/null
-[
-{
- "keys" : ["*source"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "wikidata", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "pcode:*", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["addr:housename"],
- "values" : {
- "" : "name,house"
- }
-},
-{
- "keys" : ["emergency"],
- "values" : {
- "fire_hydrant" : "skip",
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["historic", "military"],
- "values" : {
- "no" : "skip",
- "yes" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["natural"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "coastline" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "main,with_name",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "no" : "skip",
- "turning_circle" : "skip",
- "mini_roundabout" : "skip",
- "noexit" : "skip",
- "crossing" : "skip",
- "give_way" : "skip",
- "stop" : "skip",
- "street_lamp" : "main,with_name",
- "traffic_signals" : "main,with_name",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name",
- "" : "main"
- }
-},
-{
- "keys" : ["railway"],
- "values" : {
- "level_crossing" : "skip",
- "no" : "skip",
- "rail" : "extra",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["man_made"],
- "values" : {
- "survey_point" : "skip",
- "cutline" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["aerialway"],
- "values" : {
- "pylon" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "place" : "skip",
- "postal_code" : "main",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["aeroway", "amenity", "club", "craft", "leisure",
- "office", "mountain_pass"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["shop"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["tourism"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["bridge", "tunnel"],
- "values" : {
- "" : "main,with_name_key"
- }
-},
-{
- "keys" : ["waterway"],
- "values" : {
- "riverbank" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["junction", "healthcare"],
- "values" : {
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode,fallback"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:housenumber", "addr:conscriptionnumber", "addr:streetnumber"],
- "values" : {
- "" : "address,house"
- }
-},
-{
- "keys" : ["addr:interpolation"],
- "values" : {
- "" : "interpolation,address"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["building"],
- "values" : {
- "no" : "skip",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["note", "note:*", "source", "source*", "attribution",
- "comment", "fixme", "FIXME", "created_by", "tiger:*", "NHD:*",
- "nhd:*", "gnis:*", "geobase:*", "KSJ2:*", "yh:*",
- "osak:*", "naptan:*", "CLC:*", "import", "it:fvg:*",
- "type", "lacounty:*", "ref:ruian:*", "building:ruian:type",
- "ref:linz:*"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : [""],
- "values" : {
- "" : "extra"
- }
-}
-]
+++ /dev/null
-[
-{
- "keys" : ["*source"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "wikidata", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "pcode:*", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["addr:housename"],
- "values" : {
- "" : "name,house"
- }
-},
-{
- "keys" : ["emergency"],
- "values" : {
- "fire_hydrant" : "skip",
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["historic", "military"],
- "values" : {
- "no" : "skip",
- "yes" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["natural"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "coastline" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "main,with_name",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "no" : "skip",
- "turning_circle" : "skip",
- "mini_roundabout" : "skip",
- "noexit" : "skip",
- "crossing" : "skip",
- "give_way" : "skip",
- "stop" : "skip",
- "street_lamp" : "main,with_name",
- "traffic_signals" : "main,with_name",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name",
- "" : "main"
- }
-},
-{
- "keys" : ["railway"],
- "values" : {
- "level_crossing" : "skip",
- "no" : "skip",
- "rail" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["man_made"],
- "values" : {
- "survey_point" : "skip",
- "cutline" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["aerialway"],
- "values" : {
- "pylon" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "place" : "skip",
- "postal_code" : "main",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["aeroway", "amenity", "club", "craft", "leisure",
- "office", "mountain_pass"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["shop"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["tourism"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["bridge", "tunnel"],
- "values" : {
- "" : "main,with_name_key"
- }
-},
-{
- "keys" : ["waterway"],
- "values" : {
- "riverbank" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["junction", "healthcare"],
- "values" : {
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode,fallback"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:housenumber", "addr:conscriptionnumber", "addr:streetnumber"],
- "values" : {
- "" : "address,house"
- }
-},
-{
- "keys" : ["addr:interpolation"],
- "values" : {
- "" : "interpolation,address"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["building"],
- "values" : {
- "no" : "skip",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["tracktype", "traffic_calming", "service", "cuisine", "capital",
- "dispensing", "religion", "denomination", "sport",
- "internet_access", "lanes", "surface", "smoothness", "width",
- "est_width", "incline", "opening_hours", "collection_times",
- "service_times", "disused", "wheelchair", "sac_scale",
- "trail_visibility", "mtb:scale", "mtb:description", "wood",
- "drive_through", "drive_in", "access", "vehicle", "bicyle",
- "foot", "goods", "hgv", "motor_vehicle", "motor_car", "oneway",
- "date_on", "date_off", "day_on", "day_off", "hour_on", "hour_off",
- "maxweight", "maxheight", "maxspeed", "fee", "toll", "charge",
- "population", "description", "image", "attribution", "fax",
- "email", "url", "website", "phone", "real_ale", "smoking",
- "food", "camera", "brewery", "locality", "wikipedia",
- "wikipedia:*", "access:*", "contact:*", "drink:*", "toll:*",
- "area"],
- "values" : {
- "" : "extra"
- }
-}
-]
+++ /dev/null
-[
-{ "keys" : ["wikipedia", "wikipedia:*", "wikidata", "area"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "skip",
- "" : "fallback,with_name"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "administrative" : "main"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "motorway" : "main",
- "trunk" : "main",
- "primary" : "main",
- "secondary" : "main",
- "tertiary" : "main",
- "unclassified" : "main",
- "residential" : "main",
- "living_street" : "main",
- "pedestrian" : "main",
- "road" : "main",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name"
- }
-}
-]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+The public interface of the Nominatim library.
+
+Classes and functions defined in this file are considered stable. Always
+import from this file, not from the source files directly.
+"""
+
+from .errors import (UsageError as UsageError)
+from .config import (Configuration as Configuration)
+
+from .core import (NominatimAPI as NominatimAPI,
+ NominatimAPIAsync as NominatimAPIAsync)
+from .connection import (SearchConnection as SearchConnection)
+from .status import (StatusResult as StatusResult)
+from .types import (PlaceID as PlaceID,
+ OsmID as OsmID,
+ PlaceRef as PlaceRef,
+ Point as Point,
+ Bbox as Bbox,
+ GeometryFormat as GeometryFormat,
+ DataLayer as DataLayer)
+from .results import (SourceTable as SourceTable,
+ AddressLine as AddressLine,
+ AddressLines as AddressLines,
+ WordInfo as WordInfo,
+ WordInfos as WordInfos,
+ DetailedResult as DetailedResult,
+ ReverseResult as ReverseResult,
+ ReverseResults as ReverseResults,
+ SearchResult as SearchResult,
+ SearchResults as SearchResults)
+from .localization import (Locales as Locales)
+from .result_formatting import (FormatDispatcher as FormatDispatcher,
+ load_format_dispatcher as load_format_dispatcher)
+
+from .version import NOMINATIM_API_VERSION as __version__
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+
+# This file is just a placeholder to make the config module available
+# during development. It will be replaced by nominatim_db/config.py on
+# installation.
+# flake8: noqa
+from nominatim_db.config import *
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Extended SQLAlchemy connection class that also includes access to the schema.
+"""
+from typing import cast, Any, Mapping, Sequence, Union, Dict, Optional, Set, \
+ Awaitable, Callable, TypeVar
+import asyncio
+
+import sqlalchemy as sa
+from sqlalchemy.ext.asyncio import AsyncConnection
+
+from .typing import SaFromClause
+from .sql.sqlalchemy_schema import SearchTables
+from .sql.sqlalchemy_types import Geometry
+from .logging import log
+from .config import Configuration
+
+T = TypeVar('T')
+
+
+class SearchConnection:
+ """ An extended SQLAlchemy connection class, that also contains
+ the table definitions. The underlying asynchronous SQLAlchemy
+ connection can be accessed with the 'connection' property.
+ The 't' property is the collection of Nominatim tables.
+ """
+
+ def __init__(self, conn: AsyncConnection,
+ tables: SearchTables,
+ properties: Dict[str, Any],
+ config: Configuration) -> None:
+ self.connection = conn
+ self.t = tables
+ self.config = config
+ self._property_cache = properties
+ self._classtables: Optional[Set[str]] = None
+ self.query_timeout: Optional[int] = None
+
+ def set_query_timeout(self, timeout: Optional[int]) -> None:
+ """ Set the timeout after which a query over this connection
+ is cancelled.
+ """
+ self.query_timeout = timeout
+
+ async def scalar(self, sql: sa.sql.base.Executable,
+ params: Union[Mapping[str, Any], None] = None) -> Any:
+ """ Execute a 'scalar()' query on the connection.
+ """
+ log().sql(self.connection, sql, params)
+ return await asyncio.wait_for(self.connection.scalar(sql, params), self.query_timeout)
+
+ async def execute(self, sql: 'sa.Executable',
+ params: Union[Mapping[str, Any], Sequence[Mapping[str, Any]], None] = None
+ ) -> 'sa.Result[Any]':
+ """ Execute a 'execute()' query on the connection.
+ """
+ log().sql(self.connection, sql, params)
+ return await asyncio.wait_for(self.connection.execute(sql, params), self.query_timeout)
+
+ async def get_property(self, name: str, cached: bool = True) -> str:
+ """ Get a property from Nominatim's property table.
+
+ Property values are normally cached so that they are only
+ retrieved from the database when they are queried for the
+ first time with this function. Set 'cached' to False to force
+ reading the property from the database.
+
+ Raises a ValueError if the property does not exist.
+ """
+ lookup_name = f'DBPROP:{name}'
+
+ if cached and lookup_name in self._property_cache:
+ return cast(str, self._property_cache[lookup_name])
+
+ sql = sa.select(self.t.properties.c.value)\
+ .where(self.t.properties.c.property == name)
+ value = await self.connection.scalar(sql)
+
+ if value is None:
+ raise ValueError(f"Property '{name}' not found in database.")
+
+ self._property_cache[lookup_name] = cast(str, value)
+
+ return cast(str, value)
+
+ async def get_db_property(self, name: str) -> Any:
+ """ Get a setting from the database. At the moment, only
+ 'server_version', the version of the database software, can
+ be retrieved with this function.
+
+ Raises a ValueError if the property does not exist.
+ """
+ if name != 'server_version':
+ raise ValueError(f"DB setting '{name}' not found in database.")
+
+ return self._property_cache['DB:server_version']
+
+ async def get_cached_value(self, group: str, name: str,
+ factory: Callable[[], Awaitable[T]]) -> T:
+ """ Access the cache for this Nominatim instance.
+ Each cache value needs to belong to a group and have a name.
+ This function is for internal API use only.
+
+ `factory` is an async callback function that produces
+ the value if it is not already cached.
+
+ Returns the cached value or the result of factory (also caching
+ the result).
+ """
+ full_name = f'{group}:{name}'
+
+ if full_name in self._property_cache:
+ return cast(T, self._property_cache[full_name])
+
+ value = await factory()
+ self._property_cache[full_name] = value
+
+ return value
+
+ async def get_class_table(self, cls: str, typ: str) -> Optional[SaFromClause]:
+ """ Lookup up if there is a classtype table for the given category
+ and return a SQLAlchemy table for it, if it exists.
+ """
+ if self._classtables is None:
+ res = await self.execute(sa.text("""SELECT tablename FROM pg_tables
+ WHERE tablename LIKE 'place_classtype_%'
+ """))
+ self._classtables = {r[0] for r in res}
+
+ tablename = f"place_classtype_{cls}_{typ}"
+
+ if tablename not in self._classtables:
+ return None
+
+ if tablename in self.t.meta.tables:
+ return self.t.meta.tables[tablename]
+
+ return sa.Table(tablename, self.t.meta,
+ sa.Column('place_id', sa.BigInteger),
+ sa.Column('centroid', Geometry))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of classes for API access via libraries.
+"""
+from typing import Mapping, Optional, Any, AsyncIterator, Dict, Sequence, List, \
+ Union, Tuple, cast
+import asyncio
+import sys
+import contextlib
+from pathlib import Path
+
+import sqlalchemy as sa
+import sqlalchemy.ext.asyncio as sa_asyncio
+
+from .errors import UsageError
+from .sql.sqlalchemy_schema import SearchTables
+from .sql.async_core_library import PGCORE_LIB, PGCORE_ERROR
+from .config import Configuration
+from .sql import sqlite_functions, sqlalchemy_functions # noqa
+from .connection import SearchConnection
+from .status import get_status, StatusResult
+from .lookup import get_places, get_detailed_place
+from .reverse import ReverseGeocoder
+from .search import ForwardGeocoder, Phrase, PhraseType, make_query_analyzer
+from . import types as ntyp
+from .results import DetailedResult, ReverseResult, SearchResults
+
+
+class NominatimAPIAsync:
+ """ The main frontend to the Nominatim database implements the
+ functions for lookup, forward and reverse geocoding using
+ asynchronous functions.
+
+ This class shares most of the functions with its synchronous
+ version. There are some additional functions or parameters,
+ which are documented below.
+
+ This class should usually be used as a context manager in 'with' context.
+ """
+ def __init__(self, project_dir: Optional[Union[str, Path]] = None,
+ environ: Optional[Mapping[str, str]] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ """ Initiate a new frontend object with synchronous API functions.
+
+ Parameters:
+ project_dir: Path to the
+ [project directory](../admin/Import.md#creating-the-project-directory)
+ of the local Nominatim installation.
+ environ: Mapping of [configuration parameters](../customize/Settings.md).
+ When set, replaces any configuration via environment variables.
+ Settings in this mapping also have precedence over any
+ parameters found in the `.env` file of the project directory.
+ loop: The asyncio event loop that will be used when calling
+ functions. Only needed, when a custom event loop is used
+ and the Python version is 3.9 or earlier.
+ """
+ self.config = Configuration(project_dir, environ)
+ self.query_timeout = self.config.get_int('QUERY_TIMEOUT') \
+ if self.config.QUERY_TIMEOUT else None
+ self.reverse_restrict_to_country_area = self.config.get_bool('SEARCH_WITHIN_COUNTRIES')
+ self.server_version = 0
+
+ if sys.version_info >= (3, 10):
+ self._engine_lock = asyncio.Lock()
+ else:
+ self._engine_lock = asyncio.Lock(loop=loop)
+ self._engine: Optional[sa_asyncio.AsyncEngine] = None
+ self._tables: Optional[SearchTables] = None
+ self._property_cache: Dict[str, Any] = {'DB:server_version': 0}
+
+ async def setup_database(self) -> None:
+ """ Set up the SQL engine and connections.
+
+ This function will be implicitly called when the database is
+ accessed for the first time. You may also call it explicitly to
+ avoid that the first call is delayed by the setup.
+ """
+ async with self._engine_lock:
+ if self._engine:
+ return
+
+ extra_args: Dict[str, Any] = {'future': True,
+ 'echo': self.config.get_bool('DEBUG_SQL')}
+
+ if self.config.get_int('API_POOL_SIZE') == 0:
+ extra_args['poolclass'] = sa.pool.NullPool
+ else:
+ extra_args['poolclass'] = sa.pool.AsyncAdaptedQueuePool
+ extra_args['max_overflow'] = 0
+ extra_args['pool_size'] = self.config.get_int('API_POOL_SIZE')
+
+ is_sqlite = self.config.DATABASE_DSN.startswith('sqlite:')
+
+ if is_sqlite:
+ params = dict((p.split('=', 1)
+ for p in self.config.DATABASE_DSN[7:].split(';')))
+ dburl = sa.engine.URL.create('sqlite+aiosqlite',
+ database=params.get('dbname'))
+
+ if not ('NOMINATIM_DATABASE_RW' in self.config.environ
+ and self.config.get_bool('DATABASE_RW')) \
+ and not Path(params.get('dbname', '')).is_file():
+ raise UsageError(f"SQlite database '{params.get('dbname')}' does not exist.")
+ else:
+ dsn = self.config.get_database_params()
+ query = {k: str(v) for k, v in dsn.items()
+ if k not in ('user', 'password', 'dbname', 'host', 'port')}
+
+ dburl = sa.engine.URL.create(
+ f'postgresql+{PGCORE_LIB}',
+ database=cast(str, dsn.get('dbname')),
+ username=cast(str, dsn.get('user')),
+ password=cast(str, dsn.get('password')),
+ host=cast(str, dsn.get('host')),
+ port=int(cast(str, dsn['port'])) if 'port' in dsn else None,
+ query=query)
+
+ engine = sa_asyncio.create_async_engine(dburl, **extra_args)
+
+ if is_sqlite:
+ server_version = 0
+
+ @sa.event.listens_for(engine.sync_engine, "connect")
+ def _on_sqlite_connect(dbapi_con: Any, _: Any) -> None:
+ dbapi_con.run_async(lambda conn: conn.enable_load_extension(True))
+ sqlite_functions.install_custom_functions(dbapi_con)
+ cursor = dbapi_con.cursor()
+ cursor.execute("SELECT load_extension('mod_spatialite')")
+ cursor.execute('SELECT SetDecimalPrecision(7)')
+ dbapi_con.run_async(lambda conn: conn.enable_load_extension(False))
+ else:
+ try:
+ async with engine.begin() as conn:
+ result = await conn.scalar(sa.text('SHOW server_version_num'))
+ server_version = int(result)
+ await conn.execute(sa.text("SET jit_above_cost TO '-1'"))
+ await conn.execute(sa.text(
+ "SET max_parallel_workers_per_gather TO '0'"))
+ except (PGCORE_ERROR, sa.exc.OperationalError):
+ server_version = 0
+
+ @sa.event.listens_for(engine.sync_engine, "connect")
+ def _on_connect(dbapi_con: Any, _: Any) -> None:
+ cursor = dbapi_con.cursor()
+ cursor.execute("SET jit_above_cost TO '-1'")
+ cursor.execute("SET max_parallel_workers_per_gather TO '0'")
+
+ self._property_cache['DB:server_version'] = server_version
+
+ self._tables = SearchTables(sa.MetaData())
+ self._engine = engine
+
+ async def close(self) -> None:
+ """ Close all active connections to the database. The NominatimAPIAsync
+ object remains usable after closing. If a new API functions is
+ called, new connections are created.
+ """
+ if self._engine is not None:
+ await self._engine.dispose()
+
+ async def __aenter__(self) -> 'NominatimAPIAsync':
+ return self
+
+ async def __aexit__(self, *_: Any) -> None:
+ await self.close()
+
+ @contextlib.asynccontextmanager
+ async def begin(self) -> AsyncIterator[SearchConnection]:
+ """ Create a new connection with automatic transaction handling.
+
+ This function may be used to get low-level access to the database.
+ Refer to the documentation of SQLAlchemy for details how to use
+ the connection object.
+ """
+ if self._engine is None:
+ await self.setup_database()
+
+ assert self._engine is not None
+ assert self._tables is not None
+
+ async with self._engine.begin() as conn:
+ yield SearchConnection(conn, self._tables, self._property_cache, self.config)
+
+ async def status(self) -> StatusResult:
+ """ Return the status of the database.
+ """
+ try:
+ async with self.begin() as conn:
+ conn.set_query_timeout(self.query_timeout)
+ status = await get_status(conn)
+ except (PGCORE_ERROR, sa.exc.OperationalError):
+ return StatusResult(700, 'Database connection failed')
+
+ return status
+
+ async def details(self, place: ntyp.PlaceRef, **params: Any) -> Optional[DetailedResult]:
+ """ Get detailed information about a place in the database.
+
+ Returns None if there is no entry under the given ID.
+ """
+ details = ntyp.LookupDetails.from_kwargs(params)
+ async with self.begin() as conn:
+ conn.set_query_timeout(self.query_timeout)
+ if details.keywords:
+ await make_query_analyzer(conn)
+ return await get_detailed_place(conn, place, details)
+
+ async def lookup(self, places: Sequence[ntyp.PlaceRef], **params: Any) -> SearchResults:
+ """ Get simple information about a list of places.
+
+ Returns a list of place information for all IDs that were found.
+ """
+ details = ntyp.LookupDetails.from_kwargs(params)
+ async with self.begin() as conn:
+ conn.set_query_timeout(self.query_timeout)
+ if details.keywords:
+ await make_query_analyzer(conn)
+ return await get_places(conn, places, details)
+
+ async def reverse(self, coord: ntyp.AnyPoint, **params: Any) -> Optional[ReverseResult]:
+ """ Find a place by its coordinates. Also known as reverse geocoding.
+
+ Returns the closest result that can be found or None if
+ no place matches the given criteria.
+ """
+ # The following negation handles NaN correctly. Don't change.
+ if not abs(coord[0]) <= 180 or not abs(coord[1]) <= 90:
+ # There are no results to be expected outside valid coordinates.
+ return None
+
+ details = ntyp.ReverseDetails.from_kwargs(params)
+ async with self.begin() as conn:
+ conn.set_query_timeout(self.query_timeout)
+ if details.keywords:
+ await make_query_analyzer(conn)
+ geocoder = ReverseGeocoder(conn, details,
+ self.reverse_restrict_to_country_area)
+ return await geocoder.lookup(coord)
+
+ async def search(self, query: str, **params: Any) -> SearchResults:
+ """ Find a place by free-text search. Also known as forward geocoding.
+ """
+ query = query.strip()
+ if not query:
+ raise UsageError('Nothing to search for.')
+
+ async with self.begin() as conn:
+ conn.set_query_timeout(self.query_timeout)
+ geocoder = ForwardGeocoder(conn, ntyp.SearchDetails.from_kwargs(params),
+ self.config.get_int('REQUEST_TIMEOUT')
+ if self.config.REQUEST_TIMEOUT else None)
+ phrases = [Phrase(PhraseType.NONE, p.strip()) for p in query.split(',')]
+ return await geocoder.lookup(phrases)
+
+ async def search_address(self, amenity: Optional[str] = None,
+ street: Optional[str] = None,
+ city: Optional[str] = None,
+ county: Optional[str] = None,
+ state: Optional[str] = None,
+ country: Optional[str] = None,
+ postalcode: Optional[str] = None,
+ **params: Any) -> SearchResults:
+ """ Find an address using structured search.
+ """
+ async with self.begin() as conn:
+ conn.set_query_timeout(self.query_timeout)
+ details = ntyp.SearchDetails.from_kwargs(params)
+
+ phrases: List[Phrase] = []
+
+ if amenity:
+ phrases.append(Phrase(PhraseType.AMENITY, amenity))
+ if street:
+ phrases.append(Phrase(PhraseType.STREET, street))
+ if city:
+ phrases.append(Phrase(PhraseType.CITY, city))
+ if county:
+ phrases.append(Phrase(PhraseType.COUNTY, county))
+ if state:
+ phrases.append(Phrase(PhraseType.STATE, state))
+ if postalcode:
+ phrases.append(Phrase(PhraseType.POSTCODE, postalcode))
+ if country:
+ phrases.append(Phrase(PhraseType.COUNTRY, country))
+
+ if not phrases:
+ raise UsageError('Nothing to search for.')
+
+ if amenity or street:
+ details.restrict_min_max_rank(26, 30)
+ elif city:
+ details.restrict_min_max_rank(13, 25)
+ elif county:
+ details.restrict_min_max_rank(10, 12)
+ elif state:
+ details.restrict_min_max_rank(5, 9)
+ elif postalcode:
+ details.restrict_min_max_rank(5, 11)
+ else:
+ details.restrict_min_max_rank(4, 4)
+
+ if 'layers' not in params:
+ details.layers = ntyp.DataLayer.ADDRESS
+ if amenity:
+ details.layers |= ntyp.DataLayer.POI
+
+ geocoder = ForwardGeocoder(conn, details,
+ self.config.get_int('REQUEST_TIMEOUT')
+ if self.config.REQUEST_TIMEOUT else None)
+ return await geocoder.lookup(phrases)
+
+ async def search_category(self, categories: List[Tuple[str, str]],
+ near_query: Optional[str] = None,
+ **params: Any) -> SearchResults:
+ """ Find an object of a certain category near another place.
+ The near place may either be given as an unstructured search
+ query in itself or as coordinates.
+ """
+ if not categories:
+ return SearchResults()
+
+ details = ntyp.SearchDetails.from_kwargs(params)
+ async with self.begin() as conn:
+ conn.set_query_timeout(self.query_timeout)
+ if near_query:
+ phrases = [Phrase(PhraseType.NONE, p) for p in near_query.split(',')]
+ else:
+ phrases = []
+ if details.keywords:
+ await make_query_analyzer(conn)
+
+ geocoder = ForwardGeocoder(conn, details,
+ self.config.get_int('REQUEST_TIMEOUT')
+ if self.config.REQUEST_TIMEOUT else None)
+ return await geocoder.lookup_pois(categories, phrases)
+
+
+class NominatimAPI:
+ """ This class provides a thin synchronous wrapper around the asynchronous
+ Nominatim functions. It creates its own event loop and runs each
+ synchronous function call to completion using that loop.
+
+ This class should usually be used as a context manager in 'with' context.
+ """
+
+ def __init__(self, project_dir: Optional[Union[str, Path]] = None,
+ environ: Optional[Mapping[str, str]] = None) -> None:
+ """ Initiate a new frontend object with synchronous API functions.
+
+ Parameters:
+ project_dir: Path to the
+ [project directory](../admin/Import.md#creating-the-project-directory)
+ of the local Nominatim installation.
+ environ: Mapping of [configuration parameters](../customize/Settings.md).
+ When set, replaces any configuration via environment variables.
+ Settings in this mapping also have precedence over any
+ parameters found in the `.env` file of the project directory.
+ """
+ self._loop = asyncio.new_event_loop()
+ self._async_api = NominatimAPIAsync(project_dir, environ, loop=self._loop)
+
+ def close(self) -> None:
+ """ Close all active connections to the database.
+
+ This function also closes the asynchronous worker loop making
+ the NominatimAPI object unusable.
+ """
+ if not self._loop.is_closed():
+ self._loop.run_until_complete(self._async_api.close())
+ self._loop.close()
+
+ def __enter__(self) -> 'NominatimAPI':
+ return self
+
+ def __exit__(self, *_: Any) -> None:
+ self.close()
+
+ @property
+ def config(self) -> Configuration:
+ """ Provide read-only access to the [configuration](Configuration.md)
+ used by the API.
+ """
+ return self._async_api.config
+
+ def status(self) -> StatusResult:
+ """ Return the status of the database as a dataclass object
+ with the fields described below.
+
+ Returns:
+ status(int): A status code as described on the status page.
+ message(str): Either 'OK' or a human-readable message of the
+ problem encountered.
+ software_version(tuple): A tuple with the version of the
+ Nominatim library consisting of (major, minor, patch, db-patch)
+ version.
+ database_version(tuple): A tuple with the version of the library
+ which was used for the import or last migration.
+ Also consists of (major, minor, patch, db-patch).
+ data_updated(datetime): Timestamp with the age of the data.
+ """
+ return self._loop.run_until_complete(self._async_api.status())
+
+ def details(self, place: ntyp.PlaceRef, **params: Any) -> Optional[DetailedResult]:
+ """ Get detailed information about a place in the database.
+
+ The result is a dataclass object with the fields described below
+ or `None` if the place could not be found in the database.
+
+ Parameters:
+ place: Description of the place to look up. See
+ [Place identification](Input-Parameter-Types.md#place-identification)
+ for the various ways to reference a place.
+
+ Other parameters:
+ geometry_output (enum): Add the full geometry of the place to the result.
+ Multiple formats may be selected. Note that geometries can become
+ quite large. (Default: none)
+ geometry_simplification (float): Simplification factor to use on
+ the geometries before returning them. The factor expresses
+ the tolerance in degrees from which the geometry may differ.
+ Topology is preserved. (Default: 0.0)
+ address_details (bool): Add detailed information about the places
+ that make up the address of the requested object. (Default: False)
+ linked_places (bool): Add detailed information about the places
+ that link to the result. (Default: False)
+ parented_places (bool): Add detailed information about all places
+ for which the requested object is a parent, i.e. all places for
+ which the object provides the address details.
+ Only POI places can have parents. (Default: False)
+ keywords (bool): Add detailed information about the search terms
+ used for this place.
+
+ Returns:
+ source_table (enum): Data source of the place. See below for possible values.
+ category (tuple): A tuple of two strings with the primary OSM tag
+ and value.
+ centroid (Point): Point position of the place.
+ place_id (Optional[int]): Internal ID of the place. This ID may differ
+ for the same place between different installations.
+ parent_place_id (Optional(int]): Internal ID of the parent of this
+ place. Only meaning full for POI-like objects (places with a
+ rank_address of 30).
+ linked_place_id (Optional[int]): Internal ID of the place this object
+ links to. When this ID is set then there is no guarantee that
+ the rest of the result information is complete.
+ admin_level (int): Value of the `admin_level` OSM tag. Only meaningful
+ for administrative boundary objects.
+ indexed_date (datetime): Timestamp when the place was last updated.
+ osm_object (Optional[tuple]): OSM type and ID of the place, if available.
+ names (Optional[dict]): Dictionary of names of the place. Keys are
+ usually the corresponding OSM tag keys.
+ address (Optional[dict]): Dictionary of address parts directly
+ attributed to the place. Keys are usually the corresponding
+ OSM tag keys with the `addr:` prefix removed.
+ extratags (Optional[dict]): Dictionary of additional attributes for
+ the place. Usually OSM tag keys and values.
+ housenumber (Optional[str]): House number of the place, normalised
+ for lookup. To get the house number in its original spelling,
+ use `address['housenumber']`.
+ postcode (Optional[str]): Computed postcode for the place. To get
+ directly attributed postcodes, use `address['postcode']` instead.
+ wikipedia (Optional[str]): Reference to a wikipedia site for the place.
+ The string has the format <language code>:<wikipedia title>.
+ rank_address (int): [Address rank](../customize/Ranking.md#address-rank).
+ rank_search (int): [Search rank](../customize/Ranking.md#search-rank).
+ importance (Optional[float]): Relative importance of the place. This is a measure
+ how likely the place will be searched for.
+ country_code (Optional[str]): Country the feature is in as
+ ISO 3166-1 alpha-2 country code.
+ address_rows (Optional[AddressLines]): List of places that make up the
+ computed address. `None` when `address_details` parameter was False.
+ linked_rows (Optional[AddressLines]): List of places that link to the object.
+ `None` when `linked_places` parameter was False.
+ parented_rows (Optional[AddressLines]): List of direct children of the place.
+ `None` when `parented_places` parameter was False.
+ name_keywords (Optional[WordInfos]): List of search words for the name of
+ the place. `None` when `keywords` parameter is set to False.
+ address_keywords (Optional[WordInfos]): List of search word for the address of
+ the place. `None` when `keywords` parameter is set to False.
+ geometry (dict): Dictionary containing the full geometry of the place
+ in the formats requested in the `geometry_output` parameter.
+ """
+ return self._loop.run_until_complete(self._async_api.details(place, **params))
+
+ def lookup(self, places: Sequence[ntyp.PlaceRef], **params: Any) -> SearchResults:
+ """ Get simple information about a list of places.
+
+ Returns a list of place information for all IDs that were found.
+ Each result is a dataclass with the fields detailed below.
+
+ Parameters:
+ places: List of descriptions of the place to look up. See
+ [Place identification](Input-Parameter-Types.md#place-identification)
+ for the various ways to reference a place.
+
+ Other parameters:
+ geometry_output (enum): Add the full geometry of the place to the result.
+ Multiple formats may be selected. Note that geometries can become
+ quite large. (Default: none)
+ geometry_simplification (float): Simplification factor to use on
+ the geometries before returning them. The factor expresses
+ the tolerance in degrees from which the geometry may differ.
+ Topology is preserved. (Default: 0.0)
+ address_details (bool): Add detailed information about the places
+ that make up the address of the requested object. (Default: False)
+ linked_places (bool): Add detailed information about the places
+ that link to the result. (Default: False)
+ parented_places (bool): Add detailed information about all places
+ for which the requested object is a parent, i.e. all places for
+ which the object provides the address details.
+ Only POI places can have parents. (Default: False)
+ keywords (bool): Add detailed information about the search terms
+ used for this place.
+
+ Returns:
+ source_table (enum): Data source of the place. See below for possible values.
+ category (tuple): A tuple of two strings with the primary OSM tag
+ and value.
+ centroid (Point): Point position of the place.
+ place_id (Optional[int]): Internal ID of the place. This ID may differ
+ for the same place between different installations.
+ osm_object (Optional[tuple]): OSM type and ID of the place, if available.
+ names (Optional[dict]): Dictionary of names of the place. Keys are
+ usually the corresponding OSM tag keys.
+ address (Optional[dict]): Dictionary of address parts directly
+ attributed to the place. Keys are usually the corresponding
+ OSM tag keys with the `addr:` prefix removed.
+ extratags (Optional[dict]): Dictionary of additional attributes for
+ the place. Usually OSM tag keys and values.
+ housenumber (Optional[str]): House number of the place, normalised
+ for lookup. To get the house number in its original spelling,
+ use `address['housenumber']`.
+ postcode (Optional[str]): Computed postcode for the place. To get
+ directly attributed postcodes, use `address['postcode']` instead.
+ wikipedia (Optional[str]): Reference to a wikipedia site for the place.
+ The string has the format <language code>:<wikipedia title>.
+ rank_address (int): [Address rank](../customize/Ranking.md#address-rank).
+ rank_search (int): [Search rank](../customize/Ranking.md#search-rank).
+ importance (Optional[float]): Relative importance of the place. This is a measure
+ how likely the place will be searched for.
+ country_code (Optional[str]): Country the feature is in as
+ ISO 3166-1 alpha-2 country code.
+ address_rows (Optional[AddressLines]): List of places that make up the
+ computed address. `None` when `address_details` parameter was False.
+ linked_rows (Optional[AddressLines]): List of places that link to the object.
+ `None` when `linked_places` parameter was False.
+ parented_rows (Optional[AddressLines]): List of direct children of the place.
+ `None` when `parented_places` parameter was False.
+ name_keywords (Optional[WordInfos]): List of search words for the name of
+ the place. `None` when `keywords` parameter is set to False.
+ address_keywords (Optional[WordInfos]): List of search word for the address of
+ the place. `None` when `keywords` parameter is set to False.
+ bbox (Bbox): Bounding box of the full geometry of the place.
+ If the place is a single point, then the size of the bounding
+ box is guessed according to the type of place.
+ geometry (dict): Dictionary containing the full geometry of the place
+ in the formats requested in the `geometry_output` parameter.
+ """
+ return self._loop.run_until_complete(self._async_api.lookup(places, **params))
+
+ def reverse(self, coord: ntyp.AnyPoint, **params: Any) -> Optional[ReverseResult]:
+ """ Find a place by its coordinates. Also known as reverse geocoding.
+
+ Returns the closest result that can be found or `None` if
+ no place matches the given criteria. The result is a dataclass
+ with the fields as detailed below.
+
+ Parameters:
+ coord: Coordinate to lookup the place for as a Point
+ or a tuple (x, y). Must be in WGS84 projection.
+
+ Other parameters:
+ max_rank (int): Highest address rank to return. Can be used to
+ restrict search to streets or settlements.
+ layers (enum): Defines the kind of data to take into account.
+ See description of layers below. (Default: addresses and POIs)
+ geometry_output (enum): Add the full geometry of the place to the result.
+ Multiple formats may be selected. Note that geometries can become
+ quite large. (Default: none)
+ geometry_simplification (float): Simplification factor to use on
+ the geometries before returning them. The factor expresses
+ the tolerance in degrees from which the geometry may differ.
+ Topology is preserved. (Default: 0.0)
+ address_details (bool): Add detailed information about the places
+ that make up the address of the requested object. (Default: False)
+ linked_places (bool): Add detailed information about the places
+ that link to the result. (Default: False)
+ parented_places (bool): Add detailed information about all places
+ for which the requested object is a parent, i.e. all places for
+ which the object provides the address details.
+ Only POI places can have parents. (Default: False)
+ keywords (bool): Add detailed information about the search terms
+ used for this place.
+
+ Returns:
+ source_table (enum): Data source of the place. See below for possible values.
+ category (tuple): A tuple of two strings with the primary OSM tag
+ and value.
+ centroid (Point): Point position of the place.
+ place_id (Optional[int]): Internal ID of the place. This ID may differ
+ for the same place between different installations.
+ osm_object (Optional[tuple]): OSM type and ID of the place, if available.
+ names (Optional[dict]): Dictionary of names of the place. Keys are
+ usually the corresponding OSM tag keys.
+ address (Optional[dict]): Dictionary of address parts directly
+ attributed to the place. Keys are usually the corresponding
+ OSM tag keys with the `addr:` prefix removed.
+ extratags (Optional[dict]): Dictionary of additional attributes for
+ the place. Usually OSM tag keys and values.
+ housenumber (Optional[str]): House number of the place, normalised
+ for lookup. To get the house number in its original spelling,
+ use `address['housenumber']`.
+ postcode (Optional[str]): Computed postcode for the place. To get
+ directly attributed postcodes, use `address['postcode']` instead.
+ wikipedia (Optional[str]): Reference to a wikipedia site for the place.
+ The string has the format <language code>:<wikipedia title>.
+ rank_address (int): [Address rank](../customize/Ranking.md#address-rank).
+ rank_search (int): [Search rank](../customize/Ranking.md#search-rank).
+ importance (Optional[float]): Relative importance of the place. This is a measure
+ how likely the place will be searched for.
+ country_code (Optional[str]): Country the feature is in as
+ ISO 3166-1 alpha-2 country code.
+ address_rows (Optional[AddressLines]): List of places that make up the
+ computed address. `None` when `address_details` parameter was False.
+ linked_rows (Optional[AddressLines]): List of places that link to the object.
+ `None` when `linked_places` parameter was False.
+ parented_rows (Optional[AddressLines]): List of direct children of the place.
+ `None` when `parented_places` parameter was False.
+ name_keywords (Optional[WordInfos]): List of search words for the name of
+ the place. `None` when `keywords` parameter is set to False.
+ address_keywords (Optional[WordInfos]): List of search word for the address of
+ the place. `None` when `keywords` parameter is set to False.
+ bbox (Bbox): Bounding box of the full geometry of the place.
+ If the place is a single point, then the size of the bounding
+ box is guessed according to the type of place.
+ geometry (dict): Dictionary containing the full geometry of the place
+ in the formats requested in the `geometry_output` parameter.
+ distance (Optional[float]): Distance in degree from the input point.
+ """
+ return self._loop.run_until_complete(self._async_api.reverse(coord, **params))
+
+ def search(self, query: str, **params: Any) -> SearchResults:
+ """ Find a place by free-text search. Also known as forward geocoding.
+
+ Parameters:
+ query: Free-form text query searching for a place.
+
+ Other parameters:
+ max_results (int): Maximum number of results to return. The
+ actual number of results may be less. (Default: 10)
+ min_rank (int): Lowest permissible rank for the result.
+ For addressable places this is the minimum
+ [address rank](../customize/Ranking.md#address-rank). For all
+ other places the [search rank](../customize/Ranking.md#search-rank)
+ is used.
+ max_rank (int): Highest permissible rank for the result. See min_rank above.
+ layers (enum): Defines the kind of data to take into account.
+ See [layers section](Input-Parameter-Types.md#layers) for details.
+ (Default: addresses and POIs)
+ countries (list[str]): Restrict search to countries with the given
+ ISO 3166-1 alpha-2 country code. An empty list (the default)
+ disables this filter.
+ excluded (list[int]): A list of internal IDs of places to exclude
+ from the search.
+ viewbox (Optional[Bbox]): Bounding box of an area to focus search on.
+ bounded_viewbox (bool): Consider the bounding box given in `viewbox`
+ as a filter and return only results within the bounding box.
+ near (Optional[Point]): Focus search around the given point and
+ return results ordered by distance to the given point.
+ near_radius (Optional[float]): Restrict results to results within
+ the given distance in degrees of `near` point. Ignored, when
+ `near` is not set.
+ categories (list[tuple]): Restrict search to places of the given
+ categories. The category is the main OSM tag assigned to each
+ place. An empty list (the default) disables this filter.
+ geometry_output (enum): Add the full geometry of the place to the result.
+ Multiple formats may be selected. Note that geometries can become
+ quite large. (Default: none)
+ geometry_simplification (float): Simplification factor to use on
+ the geometries before returning them. The factor expresses
+ the tolerance in degrees from which the geometry may differ.
+ Topology is preserved. (Default: 0.0)
+ address_details (bool): Add detailed information about the places
+ that make up the address of the requested object. (Default: False)
+ linked_places (bool): Add detailed information about the places
+ that link to the result. (Default: False)
+ parented_places (bool): Add detailed information about all places
+ for which the requested object is a parent, i.e. all places for
+ which the object provides the address details.
+ Only POI places can have parents. (Default: False)
+ keywords (bool): Add detailed information about the search terms
+ used for this place.
+
+ Returns:
+ source_table (enum): Data source of the place. See below for possible values.
+ category (tuple): A tuple of two strings with the primary OSM tag
+ and value.
+ centroid (Point): Point position of the place.
+ place_id (Optional[int]): Internal ID of the place. This ID may differ
+ for the same place between different installations.
+ osm_object (Optional[tuple]): OSM type and ID of the place, if available.
+ names (Optional[dict]): Dictionary of names of the place. Keys are
+ usually the corresponding OSM tag keys.
+ address (Optional[dict]): Dictionary of address parts directly
+ attributed to the place. Keys are usually the corresponding
+ OSM tag keys with the `addr:` prefix removed.
+ extratags (Optional[dict]): Dictionary of additional attributes for
+ the place. Usually OSM tag keys and values.
+ housenumber (Optional[str]): House number of the place, normalised
+ for lookup. To get the house number in its original spelling,
+ use `address['housenumber']`.
+ postcode (Optional[str]): Computed postcode for the place. To get
+ directly attributed postcodes, use `address['postcode']` instead.
+ wikipedia (Optional[str]): Reference to a wikipedia site for the place.
+ The string has the format <language code>:<wikipedia title>.
+ rank_address (int): [Address rank](../customize/Ranking.md#address-rank).
+ rank_search (int): [Search rank](../customize/Ranking.md#search-rank).
+ importance (Optional[float]): Relative importance of the place. This is a measure
+ how likely the place will be searched for.
+ country_code (Optional[str]): Country the feature is in as
+ ISO 3166-1 alpha-2 country code.
+ address_rows (Optional[AddressLines]): List of places that make up the
+ computed address. `None` when `address_details` parameter was False.
+ linked_rows (Optional[AddressLines]): List of places that link to the object.
+ `None` when `linked_places` parameter was False.
+ parented_rows (Optional[AddressLines]): List of direct children of the place.
+ `None` when `parented_places` parameter was False.
+ name_keywords (Optional[WordInfos]): List of search words for the name of
+ the place. `None` when `keywords` parameter is set to False.
+ address_keywords (Optional[WordInfos]): List of search word for the address of
+ the place. `None` when `keywords` parameter is set to False.
+ bbox (Bbox): Bounding box of the full geometry of the place.
+ If the place is a single point, then the size of the bounding
+ box is guessed according to the type of place.
+ geometry (dict): Dictionary containing the full geometry of the place
+ in the formats requested in the `geometry_output` parameter.
+ """
+ return self._loop.run_until_complete(
+ self._async_api.search(query, **params))
+
+ def search_address(self, amenity: Optional[str] = None,
+ street: Optional[str] = None,
+ city: Optional[str] = None,
+ county: Optional[str] = None,
+ state: Optional[str] = None,
+ country: Optional[str] = None,
+ postalcode: Optional[str] = None,
+ **params: Any) -> SearchResults:
+ """ Find an address using structured search.
+
+ Parameters:
+ amenity: Name of a POI.
+ street: Street and optionally housenumber of the address. If the address
+ does not have a street, then the place the housenumber references to.
+ city: Postal city of the address.
+ county: County equivalent of the address. Does not exist in all
+ jurisdictions.
+ state: State or province of the address.
+ country: Country with its full name or its ISO 3166-1 alpha-2 country code.
+ Do not use together with the country_code filter.
+ postalcode: Post code or ZIP for the place.
+
+ Other parameters:
+ max_results (int): Maximum number of results to return. The
+ actual number of results may be less. (Default: 10)
+ min_rank (int): Lowest permissible rank for the result.
+ For addressable places this is the minimum
+ [address rank](../customize/Ranking.md#address-rank). For all
+ other places the [search rank](../customize/Ranking.md#search-rank)
+ is used.
+ max_rank (int): Highest permissible rank for the result. See min_rank above.
+ layers (enum): Defines the kind of data to take into account.
+ See [layers section](Input-Parameter-Types.md#layers) for details.
+ (Default: addresses and POIs)
+ countries (list[str]): Restrict search to countries with the given
+ ISO 3166-1 alpha-2 country code. An empty list (the default)
+ disables this filter. Do not use, when the country parameter
+ is used.
+ excluded (list[int]): A list of internal IDs of places to exclude
+ from the search.
+ viewbox (Optional[Bbox]): Bounding box of an area to focus search on.
+ bounded_viewbox (bool): Consider the bounding box given in `viewbox`
+ as a filter and return only results within the bounding box.
+ near (Optional[Point]): Focus search around the given point and
+ return results ordered by distance to the given point.
+ near_radius (Optional[float]): Restrict results to results within
+ the given distance in degrees of `near` point. Ignored, when
+ `near` is not set.
+ categories (list[tuple]): Restrict search to places of the given
+ categories. The category is the main OSM tag assigned to each
+ place. An empty list (the default) disables this filter.
+ geometry_output (enum): Add the full geometry of the place to the result.
+ Multiple formats may be selected. Note that geometries can become
+ quite large. (Default: none)
+ geometry_simplification (float): Simplification factor to use on
+ the geometries before returning them. The factor expresses
+ the tolerance in degrees from which the geometry may differ.
+ Topology is preserved. (Default: 0.0)
+ address_details (bool): Add detailed information about the places
+ that make up the address of the requested object. (Default: False)
+ linked_places (bool): Add detailed information about the places
+ that link to the result. (Default: False)
+ parented_places (bool): Add detailed information about all places
+ for which the requested object is a parent, i.e. all places for
+ which the object provides the address details.
+ Only POI places can have parents. (Default: False)
+ keywords (bool): Add detailed information about the search terms
+ used for this place.
+
+ Returns:
+ source_table (enum): Data source of the place. See below for possible values.
+ category (tuple): A tuple of two strings with the primary OSM tag
+ and value.
+ centroid (Point): Point position of the place.
+ place_id (Optional[int]): Internal ID of the place. This ID may differ
+ for the same place between different installations.
+ osm_object (Optional[tuple]): OSM type and ID of the place, if available.
+ names (Optional[dict]): Dictionary of names of the place. Keys are
+ usually the corresponding OSM tag keys.
+ address (Optional[dict]): Dictionary of address parts directly
+ attributed to the place. Keys are usually the corresponding
+ OSM tag keys with the `addr:` prefix removed.
+ extratags (Optional[dict]): Dictionary of additional attributes for
+ the place. Usually OSM tag keys and values.
+ housenumber (Optional[str]): House number of the place, normalised
+ for lookup. To get the house number in its original spelling,
+ use `address['housenumber']`.
+ postcode (Optional[str]): Computed postcode for the place. To get
+ directly attributed postcodes, use `address['postcode']` instead.
+ wikipedia (Optional[str]): Reference to a wikipedia site for the place.
+ The string has the format <language code>:<wikipedia title>.
+ rank_address (int): [Address rank](../customize/Ranking.md#address-rank).
+ rank_search (int): [Search rank](../customize/Ranking.md#search-rank).
+ importance (Optional[float]): Relative importance of the place. This is a measure
+ how likely the place will be searched for.
+ country_code (Optional[str]): Country the feature is in as
+ ISO 3166-1 alpha-2 country code.
+ address_rows (Optional[AddressLines]): List of places that make up the
+ computed address. `None` when `address_details` parameter was False.
+ linked_rows (Optional[AddressLines]): List of places that link to the object.
+ `None` when `linked_places` parameter was False.
+ parented_rows (Optional[AddressLines]): List of direct children of the place.
+ `None` when `parented_places` parameter was False.
+ name_keywords (Optional[WordInfos]): List of search words for the name of
+ the place. `None` when `keywords` parameter is set to False.
+ address_keywords (Optional[WordInfos]): List of search word for the address of
+ the place. `None` when `keywords` parameter is set to False.
+ bbox (Bbox): Bounding box of the full geometry of the place.
+ If the place is a single point, then the size of the bounding
+ box is guessed according to the type of place.
+ geometry (dict): Dictionary containing the full geometry of the place
+ in the formats requested in the `geometry_output` parameter.
+ """
+ return self._loop.run_until_complete(
+ self._async_api.search_address(amenity, street, city, county,
+ state, country, postalcode, **params))
+
+ def search_category(self, categories: List[Tuple[str, str]],
+ near_query: Optional[str] = None,
+ **params: Any) -> SearchResults:
+ """ Find an object of a certain category near another place.
+
+ The near place may either be given as an unstructured search
+ query in itself or as a geographic area through the
+ viewbox or near parameters.
+
+ Parameters:
+ categories: Restrict search to places of the given
+ categories. The category is the main OSM tag assigned to each
+ place.
+ near_query: Optional free-text query to define the are to
+ restrict search to.
+
+ Other parameters:
+ max_results (int): Maximum number of results to return. The
+ actual number of results may be less. (Default: 10)
+ min_rank (int): Lowest permissible rank for the result.
+ For addressable places this is the minimum
+ [address rank](../customize/Ranking.md#address-rank). For all
+ other places the [search rank](../customize/Ranking.md#search-rank)
+ is used.
+ max_rank (int): Highest permissible rank for the result. See min_rank above.
+ layers (enum): Defines the kind of data to take into account.
+ See [layers section](Input-Parameter-Types.md#layers) for details.
+ (Default: addresses and POIs)
+ countries (list[str]): Restrict search to countries with the given
+ ISO 3166-1 alpha-2 country code. An empty list (the default)
+ disables this filter.
+ excluded (list[int]): A list of internal IDs of places to exclude
+ from the search.
+ viewbox (Optional[Bbox]): Bounding box of an area to focus search on.
+ bounded_viewbox (bool): Consider the bounding box given in `viewbox`
+ as a filter and return only results within the bounding box.
+ near (Optional[Point]): Focus search around the given point and
+ return results ordered by distance to the given point.
+ near_radius (Optional[float]): Restrict results to results within
+ the given distance in degrees of `near` point. Ignored, when
+ `near` is not set.
+ geometry_output (enum): Add the full geometry of the place to the result.
+ Multiple formats may be selected. Note that geometries can become
+ quite large. (Default: none)
+ geometry_simplification (float): Simplification factor to use on
+ the geometries before returning them. The factor expresses
+ the tolerance in degrees from which the geometry may differ.
+ Topology is preserved. (Default: 0.0)
+ address_details (bool): Add detailed information about the places
+ that make up the address of the requested object. (Default: False)
+ linked_places (bool): Add detailed information about the places
+ that link to the result. (Default: False)
+ parented_places (bool): Add detailed information about all places
+ for which the requested object is a parent, i.e. all places for
+ which the object provides the address details.
+ Only POI places can have parents. (Default: False)
+ keywords (bool): Add detailed information about the search terms
+ used for this place.
+
+ Returns:
+ source_table (enum): Data source of the place. See below for possible values.
+ category (tuple): A tuple of two strings with the primary OSM tag
+ and value.
+ centroid (Point): Point position of the place.
+ place_id (Optional[int]): Internal ID of the place. This ID may differ
+ for the same place between different installations.
+ osm_object (Optional[tuple]): OSM type and ID of the place, if available.
+ names (Optional[dict]): Dictionary of names of the place. Keys are
+ usually the corresponding OSM tag keys.
+ address (Optional[dict]): Dictionary of address parts directly
+ attributed to the place. Keys are usually the corresponding
+ OSM tag keys with the `addr:` prefix removed.
+ extratags (Optional[dict]): Dictionary of additional attributes for
+ the place. Usually OSM tag keys and values.
+ housenumber (Optional[str]): House number of the place, normalised
+ for lookup. To get the house number in its original spelling,
+ use `address['housenumber']`.
+ postcode (Optional[str]): Computed postcode for the place. To get
+ directly attributed postcodes, use `address['postcode']` instead.
+ wikipedia (Optional[str]): Reference to a wikipedia site for the place.
+ The string has the format <language code>:<wikipedia title>.
+ rank_address (int): [Address rank](../customize/Ranking.md#address-rank).
+ rank_search (int): [Search rank](../customize/Ranking.md#search-rank).
+ importance (Optional[float]): Relative importance of the place. This is a measure
+ how likely the place will be searched for.
+ country_code (Optional[str]): Country the feature is in as
+ ISO 3166-1 alpha-2 country code.
+ address_rows (Optional[AddressLines]): List of places that make up the
+ computed address. `None` when `address_details` parameter was False.
+ linked_rows (Optional[AddressLines]): List of places that link to the object.
+ `None` when `linked_places` parameter was False.
+ parented_rows (Optional[AddressLines]): List of direct children of the place.
+ `None` when `parented_places` parameter was False.
+ name_keywords (Optional[WordInfos]): List of search words for the name of
+ the place. `None` when `keywords` parameter is set to False.
+ address_keywords (Optional[WordInfos]): List of search word for the address of
+ the place. `None` when `keywords` parameter is set to False.
+ bbox (Bbox): Bounding box of the full geometry of the place.
+ If the place is a single point, then the size of the bounding
+ box is guessed according to the type of place.
+ geometry (dict): Dictionary containing the full geometry of the place
+ in the formats requested in the `geometry_output` parameter.
+ """
+ return self._loop.run_until_complete(
+ self._async_api.search_category(categories, near_query, **params))
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Custom exception and error classes for Nominatim.
"""
+
class UsageError(Exception):
""" An error raised because of bad user input. This error will usually
not cause a stack trace to be printed unless debugging is enabled.
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper functions for localizing names of results.
+"""
+from typing import Mapping, List, Optional
+
+import re
+
+
+class Locales:
+ """ Helper class for localization of names.
+
+ It takes a list of language prefixes in their order of preferred
+ usage.
+ """
+
+ def __init__(self, langs: Optional[List[str]] = None):
+ self.languages = langs or []
+ self.name_tags: List[str] = []
+
+ # Build the list of supported tags. It is currently hard-coded.
+ self._add_lang_tags('name')
+ self._add_tags('name', 'brand')
+ self._add_lang_tags('official_name', 'short_name')
+ self._add_tags('official_name', 'short_name', 'ref')
+
+ def __bool__(self) -> bool:
+ return len(self.languages) > 0
+
+ def _add_tags(self, *tags: str) -> None:
+ for tag in tags:
+ self.name_tags.append(tag)
+ self.name_tags.append(f"_place_{tag}")
+
+ def _add_lang_tags(self, *tags: str) -> None:
+ for tag in tags:
+ for lang in self.languages:
+ self.name_tags.append(f"{tag}:{lang}")
+ self.name_tags.append(f"_place_{tag}:{lang}")
+
+ def display_name(self, names: Optional[Mapping[str, str]]) -> str:
+ """ Return the best matching name from a dictionary of names
+ containing different name variants.
+
+ If 'names' is null or empty, an empty string is returned. If no
+ appropriate localization is found, the first name is returned.
+ """
+ if not names:
+ return ''
+
+ if len(names) > 1:
+ for tag in self.name_tags:
+ if tag in names:
+ return names[tag]
+
+ # Nothing? Return any of the other names as a default.
+ return next(iter(names.values()))
+
+ @staticmethod
+ def from_accept_languages(langstr: str) -> 'Locales':
+ """ Create a localization object from a language list in the
+ format of HTTP accept-languages header.
+
+ The functions tries to be forgiving of format errors by first splitting
+ the string into comma-separated parts and then parsing each
+ description separately. Badly formatted parts are then ignored.
+ """
+ # split string into languages
+ candidates = []
+ for desc in langstr.split(','):
+ m = re.fullmatch(r'\s*([a-z_-]+)(?:;\s*q\s*=\s*([01](?:\.\d+)?))?\s*',
+ desc, flags=re.I)
+ if m:
+ candidates.append((m[1], float(m[2] or 1.0)))
+
+ # sort the results by the weight of each language (preserving order).
+ candidates.sort(reverse=True, key=lambda e: e[1])
+
+ # If a language has a region variant, also add the language without
+ # variant but only if it isn't already in the list to not mess up the weight.
+ languages = []
+ for lid, _ in candidates:
+ languages.append(lid)
+ parts = lid.split('-', 1)
+ if len(parts) > 1 and all(c[0] != parts[0] for c in candidates):
+ languages.append(parts[0])
+
+ return Locales(languages)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Functions for specialised logging with HTML output.
+"""
+from typing import Any, Iterator, Optional, List, Tuple, cast, Union, Mapping, Sequence
+from contextvars import ContextVar
+import datetime as dt
+import textwrap
+import io
+import re
+import html
+
+import sqlalchemy as sa
+from sqlalchemy.ext.asyncio import AsyncConnection
+
+try:
+ from pygments import highlight
+ from pygments.lexers import PythonLexer, PostgresLexer
+ from pygments.formatters import HtmlFormatter
+ CODE_HIGHLIGHT = True
+except ModuleNotFoundError:
+ CODE_HIGHLIGHT = False
+
+
+def _debug_name(res: Any) -> str:
+ if res.names:
+ return cast(str, res.names.get('name', next(iter(res.names.values()))))
+
+ return f"Hnr {res.housenumber}" if res.housenumber is not None else '[NONE]'
+
+
+class BaseLogger:
+ """ Interface for logging function.
+
+ The base implementation does nothing. Overwrite the functions
+ in derived classes which implement logging functionality.
+ """
+ def get_buffer(self) -> str:
+ """ Return the current content of the log buffer.
+ """
+ return ''
+
+ def function(self, func: str, **kwargs: Any) -> None:
+ """ Start a new debug chapter for the given function and its parameters.
+ """
+
+ def section(self, heading: str) -> None:
+ """ Start a new section with the given title.
+ """
+
+ def comment(self, text: str) -> None:
+ """ Add a simple comment to the debug output.
+ """
+
+ def var_dump(self, heading: str, var: Any) -> None:
+ """ Print the content of the variable to the debug output prefixed by
+ the given heading.
+ """
+
+ def table_dump(self, heading: str, rows: Iterator[Optional[List[Any]]]) -> None:
+ """ Print the table generated by the generator function.
+ """
+
+ def result_dump(self, heading: str, results: Iterator[Tuple[Any, Any]]) -> None:
+ """ Print a list of search results generated by the generator function.
+ """
+
+ def sql(self, conn: AsyncConnection, statement: 'sa.Executable',
+ params: Union[Mapping[str, Any], Sequence[Mapping[str, Any]], None]) -> None:
+ """ Print the SQL for the given statement.
+ """
+
+ def format_sql(self, conn: AsyncConnection, statement: 'sa.Executable',
+ extra_params: Union[Mapping[str, Any], Sequence[Mapping[str, Any]], None]
+ ) -> str:
+ """ Return the compiled version of the statement.
+ """
+ compiled = cast('sa.ClauseElement', statement).compile(conn.sync_engine)
+
+ params = dict(compiled.params)
+ if isinstance(extra_params, Mapping):
+ for k, v in extra_params.items():
+ if hasattr(v, 'to_wkt'):
+ params[k] = v.to_wkt()
+ elif isinstance(v, (int, float)):
+ params[k] = v
+ else:
+ params[k] = str(v)
+ elif isinstance(extra_params, Sequence) and extra_params:
+ for k in extra_params[0]:
+ params[k] = f':{k}'
+
+ sqlstr = str(compiled)
+
+ if conn.dialect.name == 'postgresql':
+ if sa.__version__.startswith('1'):
+ try:
+ sqlstr = re.sub(r'__\[POSTCOMPILE_[^]]*\]', '%s', sqlstr)
+ return sqlstr % tuple((repr(params.get(name, None))
+ for name in compiled.positiontup)) # type: ignore
+ except TypeError:
+ return sqlstr
+
+ # Fixes an odd issue with Python 3.7 where percentages are not
+ # quoted correctly.
+ sqlstr = re.sub(r'%(?!\()', '%%', sqlstr)
+ sqlstr = re.sub(r'__\[POSTCOMPILE_([^]]*)\]', r'%(\1)s', sqlstr)
+ return sqlstr % params
+
+ assert conn.dialect.name == 'sqlite'
+
+ # params in positional order
+ pparams = (repr(params.get(name, None)) for name in compiled.positiontup) # type: ignore
+
+ sqlstr = re.sub(r'__\[POSTCOMPILE_([^]]*)\]', '?', sqlstr)
+ sqlstr = re.sub(r"\?", lambda m: next(pparams), sqlstr)
+
+ return sqlstr
+
+
+class HTMLLogger(BaseLogger):
+ """ Logger that formats messages in HTML.
+ """
+ def __init__(self) -> None:
+ self.buffer = io.StringIO()
+
+ def _timestamp(self) -> None:
+ self._write(f'<p class="timestamp">[{dt.datetime.now()}]</p>')
+
+ def get_buffer(self) -> str:
+ return HTML_HEADER + self.buffer.getvalue() + HTML_FOOTER
+
+ def function(self, func: str, **kwargs: Any) -> None:
+ self._timestamp()
+ self._write(f"<h1>Debug output for {func}()</h1>\n<p>Parameters:<dl>")
+ for name, value in kwargs.items():
+ self._write(f'<dt>{name}</dt><dd>{self._python_var(value)}</dd>')
+ self._write('</dl></p>')
+
+ def section(self, heading: str) -> None:
+ self._timestamp()
+ self._write(f"<h2>{heading}</h2>")
+
+ def comment(self, text: str) -> None:
+ self._timestamp()
+ self._write(f"<p>{text}</p>")
+
+ def var_dump(self, heading: str, var: Any) -> None:
+ self._timestamp()
+ if callable(var):
+ var = var()
+
+ self._write(f'<h5>{heading}</h5>{self._python_var(var)}')
+
+ def table_dump(self, heading: str, rows: Iterator[Optional[List[Any]]]) -> None:
+ self._timestamp()
+ head = next(rows)
+ assert head
+ self._write(f'<table><thead><tr><th colspan="{len(head)}">{heading}</th></tr><tr>')
+ for cell in head:
+ self._write(f'<th>{cell}</th>')
+ self._write('</tr></thead><tbody>')
+ for row in rows:
+ if row is not None:
+ self._write('<tr>')
+ for cell in row:
+ self._write(f'<td>{cell}</td>')
+ self._write('</tr>')
+ self._write('</tbody></table>')
+
+ def result_dump(self, heading: str, results: Iterator[Tuple[Any, Any]]) -> None:
+ """ Print a list of search results generated by the generator function.
+ """
+ self._timestamp()
+
+ def format_osm(osm_object: Optional[Tuple[str, int]]) -> str:
+ if not osm_object:
+ return '-'
+
+ t, i = osm_object
+ if t == 'N':
+ fullt = 'node'
+ elif t == 'W':
+ fullt = 'way'
+ elif t == 'R':
+ fullt = 'relation'
+ else:
+ return f'{t}{i}'
+
+ return f'<a href="https://www.openstreetmap.org/{fullt}/{i}">{t}{i}</a>'
+
+ self._write(f'<h5>{heading}</h5><p><dl>')
+ total = 0
+ for rank, res in results:
+ self._write(f'<dt>[{rank:.3f}]</dt> <dd>{res.source_table.name}(')
+ self._write(f"{_debug_name(res)}, type=({','.join(res.category)}), ")
+ self._write(f"rank={res.rank_address}, ")
+ self._write(f"osm={format_osm(res.osm_object)}, ")
+ self._write(f'cc={res.country_code}, ')
+ self._write(f'importance={res.importance or float("nan"):.5f})</dd>')
+ total += 1
+ self._write(f'</dl><b>TOTAL:</b> {total}</p>')
+
+ def sql(self, conn: AsyncConnection, statement: 'sa.Executable',
+ params: Union[Mapping[str, Any], Sequence[Mapping[str, Any]], None]) -> None:
+ self._timestamp()
+ sqlstr = self.format_sql(conn, statement, params)
+ if CODE_HIGHLIGHT:
+ sqlstr = highlight(sqlstr, PostgresLexer(),
+ HtmlFormatter(nowrap=True, lineseparator='<br />'))
+ self._write(f'<div class="highlight"><code class="lang-sql">{sqlstr}</code></div>')
+ else:
+ self._write(f'<code class="lang-sql">{html.escape(sqlstr)}</code>')
+
+ def _python_var(self, var: Any) -> str:
+ if CODE_HIGHLIGHT:
+ fmt = highlight(str(var), PythonLexer(), HtmlFormatter(nowrap=True))
+ return f'<div class="highlight"><code class="lang-python">{fmt}</code></div>'
+
+ return f'<code class="lang-python">{html.escape(str(var))}</code>'
+
+ def _write(self, text: str) -> None:
+ """ Add the raw text to the debug output.
+ """
+ self.buffer.write(text)
+
+
+class TextLogger(BaseLogger):
+ """ Logger creating output suitable for the console.
+ """
+ def __init__(self) -> None:
+ self.buffer = io.StringIO()
+
+ def _timestamp(self) -> None:
+ self._write(f'[{dt.datetime.now()}]\n')
+
+ def get_buffer(self) -> str:
+ return self.buffer.getvalue()
+
+ def function(self, func: str, **kwargs: Any) -> None:
+ self._write(f"#### Debug output for {func}()\n\nParameters:\n")
+ for name, value in kwargs.items():
+ self._write(f' {name}: {self._python_var(value)}\n')
+ self._write('\n')
+
+ def section(self, heading: str) -> None:
+ self._timestamp()
+ self._write(f"\n# {heading}\n\n")
+
+ def comment(self, text: str) -> None:
+ self._write(f"{text}\n")
+
+ def var_dump(self, heading: str, var: Any) -> None:
+ if callable(var):
+ var = var()
+
+ self._write(f'{heading}:\n {self._python_var(var)}\n\n')
+
+ def table_dump(self, heading: str, rows: Iterator[Optional[List[Any]]]) -> None:
+ self._write(f'{heading}:\n')
+ data = [list(map(self._python_var, row)) if row else None for row in rows]
+ assert data[0] is not None
+ num_cols = len(data[0])
+
+ maxlens = [max(len(d[i]) for d in data if d) for i in range(num_cols)]
+ tablewidth = sum(maxlens) + 3 * num_cols + 1
+ row_format = '| ' + ' | '.join(f'{{:<{ln}}}' for ln in maxlens) + ' |\n'
+ self._write('-'*tablewidth + '\n')
+ self._write(row_format.format(*data[0]))
+ self._write('-'*tablewidth + '\n')
+ for row in data[1:]:
+ if row:
+ self._write(row_format.format(*row))
+ else:
+ self._write('-'*tablewidth + '\n')
+ if data[-1]:
+ self._write('-'*tablewidth + '\n')
+
+ def result_dump(self, heading: str, results: Iterator[Tuple[Any, Any]]) -> None:
+ self._timestamp()
+ self._write(f'{heading}:\n')
+ total = 0
+ for rank, res in results:
+ self._write(f'[{rank:.3f}] {res.source_table.name}(')
+ self._write(f"{_debug_name(res)}, type=({','.join(res.category)}), ")
+ self._write(f"rank={res.rank_address}, ")
+ self._write(f"osm={''.join(map(str, res.osm_object or []))}, ")
+ self._write(f'cc={res.country_code}, ')
+ self._write(f'importance={res.importance or -1:.5f})\n')
+ total += 1
+ self._write(f'TOTAL: {total}\n\n')
+
+ def sql(self, conn: AsyncConnection, statement: 'sa.Executable',
+ params: Union[Mapping[str, Any], Sequence[Mapping[str, Any]], None]) -> None:
+ self._timestamp()
+ sqlstr = '\n| '.join(textwrap.wrap(self.format_sql(conn, statement, params), width=78))
+ self._write(f"| {sqlstr}\n\n")
+
+ def _python_var(self, var: Any) -> str:
+ return str(var)
+
+ def _write(self, text: str) -> None:
+ self.buffer.write(text)
+
+
+logger: ContextVar[BaseLogger] = ContextVar('logger', default=BaseLogger())
+
+
+def set_log_output(fmt: str) -> None:
+ """ Enable collecting debug information.
+ """
+ if fmt == 'html':
+ logger.set(HTMLLogger())
+ elif fmt == 'text':
+ logger.set(TextLogger())
+ else:
+ logger.set(BaseLogger())
+
+
+def log() -> BaseLogger:
+ """ Return the logger for the current context.
+ """
+ return logger.get()
+
+
+def get_and_disable() -> str:
+ """ Return the current content of the debug buffer and disable logging.
+ """
+ buf = logger.get().get_buffer()
+ logger.set(BaseLogger())
+ return buf
+
+
+HTML_HEADER: str = """<!DOCTYPE html>
+<html>
+<head>
+ <title>Nominatim - Debug</title>
+ <style>
+""" + \
+ (HtmlFormatter(nobackground=True).get_style_defs('.highlight') if CODE_HIGHLIGHT else '') + \
+ """
+ h2 { font-size: x-large }
+
+ dl {
+ padding-left: 10pt;
+ font-family: monospace
+ }
+
+ dt {
+ float: left;
+ font-weight: bold;
+ margin-right: 0.5em
+ }
+
+ dt::after { content: ": "; }
+
+ dd::after {
+ clear: left;
+ display: block
+ }
+
+ .lang-sql {
+ color: #555;
+ font-size: small
+ }
+
+ h5 {
+ border: solid lightgrey 0.1pt;
+ margin-bottom: 0;
+ background-color: #f7f7f7
+ }
+
+ h5 + .highlight {
+ padding: 3pt;
+ border: solid lightgrey 0.1pt
+ }
+
+ table, th, tbody {
+ border: thin solid;
+ border-collapse: collapse;
+ }
+ td {
+ border-right: thin solid;
+ padding-left: 3pt;
+ padding-right: 3pt;
+ }
+
+ .timestamp {
+ font-size: 0.8em;
+ color: darkblue;
+ width: calc(100% - 5pt);
+ text-align: right;
+ position: absolute;
+ left: 0;
+ margin-top: -5px;
+ }
+ </style>
+</head>
+<body>
+"""
+
+HTML_FOOTER: str = "</body></html>"
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of place lookup by ID (doing many places at once).
+"""
+from typing import Optional, Callable, Type, Iterable, Tuple, Union
+from dataclasses import dataclass
+import datetime as dt
+
+import sqlalchemy as sa
+
+from .typing import SaColumn, SaRow, SaSelect
+from .connection import SearchConnection
+from .logging import log
+from . import types as ntyp
+from . import results as nres
+
+RowFunc = Callable[[Optional[SaRow], Type[nres.BaseResultT]], Optional[nres.BaseResultT]]
+
+GEOMETRY_TYPE_MAP = {
+ 'POINT': 'ST_Point',
+ 'MULTIPOINT': 'ST_MultiPoint',
+ 'LINESTRING': 'ST_LineString',
+ 'MULTILINESTRING': 'ST_MultiLineString',
+ 'POLYGON': 'ST_Polygon',
+ 'MULTIPOLYGON': 'ST_MultiPolygon',
+ 'GEOMETRYCOLLECTION': 'ST_GeometryCollection'
+}
+
+
+@dataclass
+class LookupTuple:
+ """ Data class saving the SQL result for a single lookup.
+ """
+ pid: ntyp.PlaceRef
+ result: Optional[nres.SearchResult] = None
+
+
+class LookupCollector:
+ """ Result collector for the simple lookup.
+
+ Allows for lookup of multiple places simultaneously.
+ """
+
+ def __init__(self, places: Iterable[ntyp.PlaceRef],
+ details: ntyp.LookupDetails) -> None:
+ self.details = details
+ self.lookups = [LookupTuple(p) for p in places]
+
+ def get_results(self) -> nres.SearchResults:
+ """ Return the list of results available.
+ """
+ return nres.SearchResults(p.result for p in self.lookups if p.result is not None)
+
+ async def add_rows_from_sql(self, conn: SearchConnection, sql: SaSelect,
+ col: SaColumn, row_func: RowFunc[nres.SearchResult]) -> bool:
+ if self.details.geometry_output:
+ if self.details.geometry_simplification > 0.0:
+ col = sa.func.ST_SimplifyPreserveTopology(
+ col, self.details.geometry_simplification)
+
+ if self.details.geometry_output & ntyp.GeometryFormat.GEOJSON:
+ sql = sql.add_columns(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
+ if self.details.geometry_output & ntyp.GeometryFormat.TEXT:
+ sql = sql.add_columns(sa.func.ST_AsText(col).label('geometry_text'))
+ if self.details.geometry_output & ntyp.GeometryFormat.KML:
+ sql = sql.add_columns(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
+ if self.details.geometry_output & ntyp.GeometryFormat.SVG:
+ sql = sql.add_columns(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
+
+ for row in await conn.execute(sql):
+ result = row_func(row, nres.SearchResult)
+ assert result is not None
+ if hasattr(row, 'bbox'):
+ result.bbox = ntyp.Bbox.from_wkb(row.bbox)
+
+ if self.lookups[row._idx].result is None:
+ self.lookups[row._idx].result = result
+
+ return all(p.result is not None for p in self.lookups)
+
+ def enumerate_free_place_ids(self) -> Iterable[Tuple[int, ntyp.PlaceID]]:
+ return ((i, p.pid) for i, p in enumerate(self.lookups)
+ if p.result is None and isinstance(p.pid, ntyp.PlaceID))
+
+ def enumerate_free_osm_ids(self) -> Iterable[Tuple[int, ntyp.OsmID]]:
+ return ((i, p.pid) for i, p in enumerate(self.lookups)
+ if p.result is None and isinstance(p.pid, ntyp.OsmID))
+
+
+class DetailedCollector:
+ """ Result collector for detailed lookup.
+
+ Only one place at the time may be looked up.
+ """
+
+ def __init__(self, place: ntyp.PlaceRef, with_geometry: bool) -> None:
+ self.with_geometry = with_geometry
+ self.place = place
+ self.result: Optional[nres.DetailedResult] = None
+
+ async def add_rows_from_sql(self, conn: SearchConnection, sql: SaSelect,
+ col: SaColumn, row_func: RowFunc[nres.DetailedResult]) -> bool:
+ if self.with_geometry:
+ sql = sql.add_columns(
+ sa.func.ST_AsGeoJSON(
+ sa.case((sa.func.ST_NPoints(col) > 5000,
+ sa.func.ST_SimplifyPreserveTopology(col, 0.0001)),
+ else_=col), 7).label('geometry_geojson'))
+ else:
+ sql = sql.add_columns(sa.func.ST_GeometryType(col).label('geometry_type'))
+
+ for row in await conn.execute(sql):
+ self.result = row_func(row, nres.DetailedResult)
+ assert self.result is not None
+ # add missing details
+ if 'type' in self.result.geometry:
+ self.result.geometry['type'] = \
+ GEOMETRY_TYPE_MAP.get(self.result.geometry['type'],
+ self.result.geometry['type'])
+ indexed_date = getattr(row, 'indexed_date', None)
+ if indexed_date is not None:
+ self.result.indexed_date = indexed_date.replace(tzinfo=dt.timezone.utc)
+
+ return True
+
+ # Nothing found.
+ return False
+
+ def enumerate_free_place_ids(self) -> Iterable[Tuple[int, ntyp.PlaceID]]:
+ if self.result is None and isinstance(self.place, ntyp.PlaceID):
+ return [(0, self.place)]
+ return []
+
+ def enumerate_free_osm_ids(self) -> Iterable[Tuple[int, ntyp.OsmID]]:
+ if self.result is None and isinstance(self.place, ntyp.OsmID):
+ return [(0, self.place)]
+ return []
+
+
+Collector = Union[LookupCollector, DetailedCollector]
+
+
+async def get_detailed_place(conn: SearchConnection, place: ntyp.PlaceRef,
+ details: ntyp.LookupDetails) -> Optional[nres.DetailedResult]:
+ """ Retrieve a place with additional details from the database.
+ """
+ log().function('get_detailed_place', place=place, details=details)
+
+ if details.geometry_output and details.geometry_output != ntyp.GeometryFormat.GEOJSON:
+ raise ValueError("lookup only supports geojosn polygon output.")
+
+ collector = DetailedCollector(place,
+ bool(details.geometry_output & ntyp.GeometryFormat.GEOJSON))
+
+ for func in (find_in_placex, find_in_osmline, find_in_postcode, find_in_tiger):
+ if await func(conn, collector):
+ break
+
+ if collector.result is not None:
+ await nres.add_result_details(conn, [collector.result], details)
+
+ return collector.result
+
+
+async def get_places(conn: SearchConnection, places: Iterable[ntyp.PlaceRef],
+ details: ntyp.LookupDetails) -> nres.SearchResults:
+ """ Retrieve a list of places as simple search results from the
+ database.
+ """
+ log().function('get_places', places=places, details=details)
+
+ collector = LookupCollector(places, details)
+
+ for func in (find_in_placex, find_in_osmline, find_in_postcode, find_in_tiger):
+ if await func(conn, collector):
+ break
+
+ results = collector.get_results()
+ await nres.add_result_details(conn, results, details)
+
+ return results
+
+
+async def find_in_placex(conn: SearchConnection, collector: Collector) -> bool:
+ """ Search for the given places in the main placex table.
+ """
+ log().section("Find in placex table")
+ t = conn.t.placex
+ sql = sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
+ t.c.class_, t.c.type, t.c.admin_level,
+ t.c.address, t.c.extratags,
+ t.c.housenumber, t.c.postcode, t.c.country_code,
+ t.c.importance, t.c.wikipedia, t.c.indexed_date,
+ t.c.parent_place_id, t.c.rank_address, t.c.rank_search,
+ t.c.linked_place_id,
+ t.c.geometry.ST_Expand(0).label('bbox'),
+ t.c.centroid)
+
+ for osm_type in ('N', 'W', 'R'):
+ osm_ids = [{'i': i, 'oi': p.osm_id, 'oc': p.osm_class or ''}
+ for i, p in collector.enumerate_free_osm_ids()
+ if p.osm_type == osm_type]
+
+ if osm_ids:
+ oid_tab = sa.func.JsonArrayEach(sa.type_coerce(osm_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ psql = sql.add_columns(oid_tab.c.value['i'].as_integer().label('_idx'))\
+ .where(t.c.osm_type == osm_type)\
+ .where(t.c.osm_id == oid_tab.c.value['oi'].as_string().cast(sa.BigInteger))\
+ .where(sa.or_(oid_tab.c.value['oc'].as_string() == '',
+ oid_tab.c.value['oc'].as_string() == t.c.class_))\
+ .order_by(t.c.class_)
+
+ if await collector.add_rows_from_sql(conn, psql, t.c.geometry,
+ nres.create_from_placex_row):
+ return True
+
+ place_ids = [{'i': i, 'id': p.place_id}
+ for i, p in collector.enumerate_free_place_ids()]
+
+ if place_ids:
+ pid_tab = sa.func.JsonArrayEach(sa.type_coerce(place_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ psql = sql.add_columns(pid_tab.c.value['i'].as_integer().label('_idx'))\
+ .where(t.c.place_id == pid_tab.c.value['id'].as_string().cast(sa.BigInteger))
+
+ return await collector.add_rows_from_sql(conn, psql, t.c.geometry,
+ nres.create_from_placex_row)
+
+ return False
+
+
+async def find_in_osmline(conn: SearchConnection, collector: Collector) -> bool:
+ """ Search for the given places in the table for address interpolations.
+
+ Return true when all places have been resolved.
+ """
+ log().section("Find in interpolation table")
+ t = conn.t.osmline
+ sql = sa.select(t.c.place_id, t.c.osm_id, t.c.parent_place_id,
+ t.c.indexed_date, t.c.startnumber, t.c.endnumber,
+ t.c.step, t.c.address, t.c.postcode, t.c.country_code,
+ t.c.linegeo.ST_Centroid().label('centroid'))
+
+ osm_ids = [{'i': i, 'oi': p.osm_id, 'oc': p.class_as_housenumber()}
+ for i, p in collector.enumerate_free_osm_ids() if p.osm_type == 'W']
+
+ if osm_ids:
+ oid_tab = sa.func.JsonArrayEach(sa.type_coerce(osm_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ psql = sql.add_columns(oid_tab.c.value['i'].as_integer().label('_idx'))\
+ .where(t.c.osm_id == oid_tab.c.value['oi'].as_string().cast(sa.BigInteger))\
+ .order_by(sa.func.greatest(0,
+ oid_tab.c.value['oc'].as_integer() - t.c.endnumber,
+ t.c.startnumber - oid_tab.c.value['oc'].as_integer()))
+
+ if await collector.add_rows_from_sql(conn, psql, t.c.linegeo,
+ nres.create_from_osmline_row):
+ return True
+
+ place_ids = [{'i': i, 'id': p.place_id}
+ for i, p in collector.enumerate_free_place_ids()]
+
+ if place_ids:
+ pid_tab = sa.func.JsonArrayEach(sa.type_coerce(place_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ psql = sql.add_columns(pid_tab.c.value['i'].label('_idx'))\
+ .where(t.c.place_id == pid_tab.c.value['id'].as_string().cast(sa.BigInteger))
+
+ return await collector.add_rows_from_sql(conn, psql, t.c.linegeo,
+ nres.create_from_osmline_row)
+
+ return False
+
+
+async def find_in_postcode(conn: SearchConnection, collector: Collector) -> bool:
+ """ Search for the given places in the postcode table.
+
+ Return true when all places have been resolved.
+ """
+ log().section("Find in postcode table")
+
+ place_ids = [{'i': i, 'id': p.place_id}
+ for i, p in collector.enumerate_free_place_ids()]
+
+ if place_ids:
+ pid_tab = sa.func.JsonArrayEach(sa.type_coerce(place_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ t = conn.t.postcode
+ sql = sa.select(pid_tab.c.value['i'].as_integer().label('_idx'),
+ t.c.place_id, t.c.parent_place_id,
+ t.c.rank_search, t.c.rank_address,
+ t.c.indexed_date, t.c.postcode, t.c.country_code,
+ t.c.geometry.label('centroid'))\
+ .where(t.c.place_id == pid_tab.c.value['id'].as_string().cast(sa.BigInteger))
+
+ return await collector.add_rows_from_sql(conn, sql, t.c.geometry,
+ nres.create_from_postcode_row)
+
+ return False
+
+
+async def find_in_tiger(conn: SearchConnection, collector: Collector) -> bool:
+ """ Search for the given places in the TIGER address table.
+
+ Return true when all places have been resolved.
+ """
+ log().section("Find in tiger table")
+
+ place_ids = [{'i': i, 'id': p.place_id}
+ for i, p in collector.enumerate_free_place_ids()]
+
+ if place_ids:
+ pid_tab = sa.func.JsonArrayEach(sa.type_coerce(place_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ t = conn.t.tiger
+ parent = conn.t.placex
+ sql = sa.select(pid_tab.c.value['i'].as_integer().label('_idx'),
+ t.c.place_id, t.c.parent_place_id,
+ parent.c.osm_type, parent.c.osm_id,
+ t.c.startnumber, t.c.endnumber, t.c.step,
+ t.c.postcode,
+ t.c.linegeo.ST_Centroid().label('centroid'))\
+ .join(parent, t.c.parent_place_id == parent.c.place_id, isouter=True)\
+ .where(t.c.place_id == pid_tab.c.value['id'].as_string().cast(sa.BigInteger))
+
+ return await collector.add_rows_from_sql(conn, sql, t.c.linegeo,
+ nres.create_from_tiger_row)
+
+ return False
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Common data types and protocols for preprocessing.
+"""
+from typing import List, Callable
+
+from ..typing import Protocol
+from ..search import query as qmod
+from .config import QueryConfig
+
+QueryProcessingFunc = Callable[[List[qmod.Phrase]], List[qmod.Phrase]]
+
+
+class QueryHandler(Protocol):
+ """ Protocol for query modules.
+ """
+ def create(self, config: QueryConfig) -> QueryProcessingFunc:
+ """
+ Create a function for sanitizing a place.
+ Arguments:
+ config: A dictionary with the additional configuration options
+ specified in the tokenizer configuration
+ normalizer: A instance to transliterate text
+ Return:
+ The result is a list modified by the preprocessor.
+ """
+ pass
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Configuration for Sanitizers.
+"""
+from typing import Any, TYPE_CHECKING
+from collections import UserDict
+
+# working around missing generics in Python < 3.8
+# See https://github.com/python/typing/issues/60#issuecomment-869757075
+if TYPE_CHECKING:
+ _BaseUserDict = UserDict[str, Any]
+else:
+ _BaseUserDict = UserDict
+
+
+class QueryConfig(_BaseUserDict):
+ """ The `QueryConfig` class is a read-only dictionary
+ with configuration options for the preprocessor.
+ In addition to the usual dictionary functions, the class provides
+ accessors to standard preprocessor options that are used by many of the
+ preprocessors.
+ """
+
+ def set_normalizer(self, normalizer: Any) -> 'QueryConfig':
+ """ Set the normalizer function to be used.
+ """
+ self['_normalizer'] = normalizer
+
+ return self
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Normalize query text using the same ICU normalization rules that are
+applied during import. If a phrase becomes empty because the normalization
+removes all terms, then the phrase is deleted.
+
+This preprocessor does not come with any extra information. Instead it will
+use the configuration from the `normalization` section.
+"""
+from typing import cast
+
+from .config import QueryConfig
+from .base import QueryProcessingFunc
+from ..search.query import Phrase
+
+
+def create(config: QueryConfig) -> QueryProcessingFunc:
+ normalizer = config.get('_normalizer')
+
+ if not normalizer:
+ return lambda p: p
+
+ return lambda phrases: list(
+ filter(lambda p: p.text,
+ (Phrase(p.ptype, cast(str, normalizer.transliterate(p.text)))
+ for p in phrases)))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2025 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+This file divides Japanese addresses into three categories:
+prefecture, municipality, and other.
+The division is not strict but simple using these keywords.
+"""
+from typing import List
+import re
+
+from .config import QueryConfig
+from .base import QueryProcessingFunc
+from ..search.query import Phrase
+
+MATCH_PATTERNS = [
+ r'''
+ (...??[都都道府県縣]) # [group1] prefecture
+ (.+?[市区區町村]) # [group2] municipalities (city/wards/towns/villages)
+ (.+) # [group3] other words
+ ''',
+ r'''
+ (...??[都都道府県縣]) # [group1] prefecture
+ (.+) # [group3] other words
+ ''',
+ r'''
+ (.+?[市区區町村]) # [group2] municipalities (city/wards/towns/villages)
+ (.+) # [group3] other words
+ '''
+]
+
+
+class _JapanesePreprocessing:
+
+ def __init__(self, config: QueryConfig) -> None:
+ self.config = config
+
+ def split_phrase(self, phrase: Phrase) -> Phrase:
+ """
+ This function performs a division on the given text using a regular expression.
+ """
+ for pattern in MATCH_PATTERNS:
+ result = re.match(pattern, phrase.text, re.VERBOSE)
+ if result is not None:
+ return Phrase(phrase.ptype, ':'.join(result.groups()))
+
+ return phrase
+
+ def __call__(self, phrases: List[Phrase]) -> List[Phrase]:
+ """Split a Japanese address using japanese_tokenizer.
+ """
+ return [self.split_phrase(p) for p in phrases]
+
+
+def create(config: QueryConfig) -> QueryProcessingFunc:
+ """ Create a function of japanese preprocessing.
+ """
+ return _JapanesePreprocessing(config)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper classes and functions for formatting results into API responses.
+"""
+from typing import Type, TypeVar, Dict, List, Callable, Any, Mapping, Optional, cast
+from collections import defaultdict
+from pathlib import Path
+import importlib
+
+from .server.content_types import CONTENT_JSON
+
+T = TypeVar('T')
+FormatFunc = Callable[[T, Mapping[str, Any]], str]
+ErrorFormatFunc = Callable[[str, str, int], str]
+
+
+class FormatDispatcher:
+ """ Container for formatting functions for results.
+ Functions can conveniently be added by using decorated functions.
+ """
+
+ def __init__(self, content_types: Optional[Mapping[str, str]] = None) -> None:
+ self.error_handler: ErrorFormatFunc = lambda ct, msg, status: f"ERROR {status}: {msg}"
+ self.content_types: Dict[str, str] = {}
+ if content_types:
+ self.content_types.update(content_types)
+ self.format_functions: Dict[Type[Any], Dict[str, FormatFunc[Any]]] = defaultdict(dict)
+
+ def format_func(self, result_class: Type[T],
+ fmt: str) -> Callable[[FormatFunc[T]], FormatFunc[T]]:
+ """ Decorator for a function that formats a given type of result into the
+ selected format.
+ """
+ def decorator(func: FormatFunc[T]) -> FormatFunc[T]:
+ self.format_functions[result_class][fmt] = func
+ return func
+
+ return decorator
+
+ def error_format_func(self, func: ErrorFormatFunc) -> ErrorFormatFunc:
+ """ Decorator for a function that formats error messges.
+ There is only one error formatter per dispatcher. Using
+ the decorator repeatedly will overwrite previous functions.
+ """
+ self.error_handler = func
+ return func
+
+ def list_formats(self, result_type: Type[Any]) -> List[str]:
+ """ Return a list of formats supported by this formatter.
+ """
+ return list(self.format_functions[result_type].keys())
+
+ def supports_format(self, result_type: Type[Any], fmt: str) -> bool:
+ """ Check if the given format is supported by this formatter.
+ """
+ return fmt in self.format_functions[result_type]
+
+ def format_result(self, result: Any, fmt: str, options: Mapping[str, Any]) -> str:
+ """ Convert the given result into a string using the given format.
+
+ The format is expected to be in the list returned by
+ `list_formats()`.
+ """
+ return self.format_functions[type(result)][fmt](result, options)
+
+ def format_error(self, content_type: str, msg: str, status: int) -> str:
+ """ Convert the given error message into a response string
+ taking the requested content_type into account.
+
+ Change the format using the error_format_func decorator.
+ """
+ return self.error_handler(content_type, msg, status)
+
+ def set_content_type(self, fmt: str, content_type: str) -> None:
+ """ Set the content type for the given format. This is the string
+ that will be returned in the Content-Type header of the HTML
+ response, when the given format is choosen.
+ """
+ self.content_types[fmt] = content_type
+
+ def get_content_type(self, fmt: str) -> str:
+ """ Return the content type for the given format.
+
+ If no explicit content type has been defined, then
+ JSON format is assumed.
+ """
+ return self.content_types.get(fmt, CONTENT_JSON)
+
+
+def load_format_dispatcher(api_name: str, project_dir: Optional[Path]) -> FormatDispatcher:
+ """ Load the dispatcher for the given API.
+
+ The function first tries to find a module api/<api_name>/format.py
+ in the project directory. This file must export a single variable
+ `dispatcher`.
+
+ If the function does not exist, the default formatter is loaded.
+ """
+ if project_dir is not None:
+ priv_module = project_dir / 'api' / api_name / 'format.py'
+ if priv_module.is_file():
+ spec = importlib.util.spec_from_file_location(f'api.{api_name},format',
+ str(priv_module))
+ if spec:
+ module = importlib.util.module_from_spec(spec)
+ # Do not add to global modules because there is no standard
+ # module name that Python can resolve.
+ assert spec.loader is not None
+ spec.loader.exec_module(module)
+
+ return cast(FormatDispatcher, module.dispatch)
+
+ return cast(FormatDispatcher,
+ importlib.import_module(f'nominatim_api.{api_name}.format').dispatch)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Dataclasses for search results and helper functions to fill them.
+
+Data classes are part of the public API while the functions are for
+internal use only. That's why they are implemented as free-standing functions
+instead of member functions.
+"""
+from typing import Optional, Tuple, Dict, Sequence, TypeVar, Type, List, cast, Callable
+import enum
+import dataclasses
+import datetime as dt
+
+import sqlalchemy as sa
+
+from .typing import SaSelect, SaRow
+from .sql.sqlalchemy_types import Geometry
+from .types import Point, Bbox, LookupDetails
+from .connection import SearchConnection
+from .logging import log
+from .localization import Locales
+
+# This file defines complex result data classes.
+
+
+def _mingle_name_tags(names: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
+ """ Mix-in names from linked places, so that they show up
+ as standard names where necessary.
+ """
+ if not names:
+ return None
+
+ out = {}
+ for k, v in names.items():
+ if k.startswith('_place_'):
+ outkey = k[7:]
+ out[k if outkey in names else outkey] = v
+ else:
+ out[k] = v
+
+ return out
+
+
+class SourceTable(enum.Enum):
+ """ The `SourceTable` type lists the possible sources a result can have.
+ """
+ PLACEX = 1
+ """ The placex table is the main source for result usually containing
+ OSM data.
+ """
+ OSMLINE = 2
+ """ The osmline table contains address interpolations from OSM data.
+ Interpolation addresses are always approximate. The OSM id in the
+ result refers to the OSM way with the interpolation line object.
+ """
+ TIGER = 3
+ """ TIGER address data contains US addresses imported on the side,
+ see [Installing TIGER data](../customize/Tiger.md).
+ TIGER address are also interpolations. The addresses always refer
+ to a street from OSM data. The OSM id in the result refers to
+ that street.
+ """
+ POSTCODE = 4
+ """ The postcode table contains artificial centroids for postcodes,
+ computed from the postcodes available with address points. Results
+ are always approximate.
+ """
+ COUNTRY = 5
+ """ The country table provides a fallback, when country data is missing
+ in the OSM data.
+ """
+
+
+@dataclasses.dataclass
+class AddressLine:
+ """ The `AddressLine` may contain the following fields about a related place
+ and its function as an address object. Most fields are optional.
+ Their presence depends on the kind and function of the address part.
+ """
+ category: Tuple[str, str]
+ """ Main category of the place, described by a key-value pair.
+ """
+ names: Dict[str, str]
+ """ All available names for the place including references, alternative
+ names and translations.
+ """
+ fromarea: bool
+ """ If true, then the exact area of the place is known. Without area
+ information, Nominatim has to make an educated guess if an address
+ belongs to one place or another.
+ """
+ isaddress: bool
+ """ If true, this place should be considered for the final address display.
+ Nominatim will sometimes include more than one candidate for
+ the address in the list when it cannot reliably determine where the
+ place belongs. It will consider names of all candidates when searching
+ but when displaying the result, only the most likely candidate should
+ be shown.
+ """
+ rank_address: int
+ """ [Address rank](../customize/Ranking.md#address-rank) of the place.
+ """
+ distance: float
+ """ Distance in degrees between the result place and this address part.
+ """
+ place_id: Optional[int] = None
+ """ Internal ID of the place.
+ """
+ osm_object: Optional[Tuple[str, int]] = None
+ """ OSM type and ID of the place, if such an object exists.
+ """
+ extratags: Optional[Dict[str, str]] = None
+ """ Any extra information available about the place. This is a dictionary
+ that usually contains OSM tag key-value pairs.
+ """
+
+ admin_level: Optional[int] = None
+ """ The administrative level of a boundary as tagged in the input data.
+ This field is only meaningful for places of the category
+ (boundary, administrative).
+ """
+
+ local_name: Optional[str] = None
+ """ Place holder for localization of this address part. See
+ [Localization](Result-Handling.md#localization) below.
+ """
+
+
+class AddressLines(List[AddressLine]):
+ """ Sequence of address lines order in descending order by their rank.
+ """
+
+ def localize(self, locales: Locales) -> List[str]:
+ """ Set the local name of address parts according to the chosen
+ locale. Return the list of local names without duplicates.
+
+ Only address parts that are marked as isaddress are localized
+ and returned.
+ """
+ label_parts: List[str] = []
+
+ for line in self:
+ if line.isaddress and line.names:
+ line.local_name = locales.display_name(line.names)
+ if not label_parts or label_parts[-1] != line.local_name:
+ label_parts.append(line.local_name)
+
+ return label_parts
+
+
+@dataclasses.dataclass
+class WordInfo:
+ """ Each entry in the list of search terms contains the
+ following detailed information.
+ """
+ word_id: int
+ """ Internal identifier for the word.
+ """
+ word_token: str
+ """ Normalised and transliterated form of the word.
+ This form is used for searching.
+ """
+ word: Optional[str] = None
+ """ Untransliterated form, if available.
+ """
+
+
+WordInfos = Sequence[WordInfo]
+
+
+@dataclasses.dataclass
+class BaseResult:
+ """ Data class collecting information common to all
+ types of search results.
+ """
+ source_table: SourceTable
+ category: Tuple[str, str]
+ centroid: Point
+
+ place_id: Optional[int] = None
+ osm_object: Optional[Tuple[str, int]] = None
+ parent_place_id: Optional[int] = None
+ linked_place_id: Optional[int] = None
+ admin_level: int = 15
+
+ locale_name: Optional[str] = None
+ display_name: Optional[str] = None
+
+ names: Optional[Dict[str, str]] = None
+ address: Optional[Dict[str, str]] = None
+ extratags: Optional[Dict[str, str]] = None
+
+ housenumber: Optional[str] = None
+ postcode: Optional[str] = None
+ wikipedia: Optional[str] = None
+
+ rank_address: int = 30
+ rank_search: int = 30
+ importance: Optional[float] = None
+
+ country_code: Optional[str] = None
+
+ address_rows: Optional[AddressLines] = None
+ linked_rows: Optional[AddressLines] = None
+ parented_rows: Optional[AddressLines] = None
+ name_keywords: Optional[WordInfos] = None
+ address_keywords: Optional[WordInfos] = None
+
+ geometry: Dict[str, str] = dataclasses.field(default_factory=dict)
+
+ @property
+ def lat(self) -> float:
+ """ Get the latitude (or y) of the center point of the place.
+ """
+ return self.centroid[1]
+
+ @property
+ def lon(self) -> float:
+ """ Get the longitude (or x) of the center point of the place.
+ """
+ return self.centroid[0]
+
+ def calculated_importance(self) -> float:
+ """ Get a valid importance value. This is either the stored importance
+ of the value or an artificial value computed from the place's
+ search rank.
+ """
+ return self.importance or (0.40001 - (self.rank_search/75.0))
+
+ def localize(self, locales: Locales) -> None:
+ """ Fill the locale_name and the display_name field for the
+ place and, if available, its address information.
+ """
+ self.locale_name = locales.display_name(self.names)
+ if self.address_rows:
+ self.display_name = ', '.join(self.address_rows.localize(locales))
+ else:
+ self.display_name = self.locale_name
+
+
+BaseResultT = TypeVar('BaseResultT', bound=BaseResult)
+
+
+@dataclasses.dataclass
+class DetailedResult(BaseResult):
+ """ A search result with more internal information from the database
+ added.
+ """
+ indexed_date: Optional[dt.datetime] = None
+
+
+@dataclasses.dataclass
+class ReverseResult(BaseResult):
+ """ A search result for reverse geocoding.
+ """
+ distance: Optional[float] = None
+ bbox: Optional[Bbox] = None
+
+
+class ReverseResults(List[ReverseResult]):
+ """ Sequence of reverse lookup results ordered by distance.
+ May be empty when no result was found.
+ """
+
+
+@dataclasses.dataclass
+class SearchResult(BaseResult):
+ """ A search result for forward geocoding.
+ """
+ bbox: Optional[Bbox] = None
+ accuracy: float = 0.0
+
+ @property
+ def ranking(self) -> float:
+ """ Return the ranking, a combined measure of accuracy and importance.
+ """
+ return (self.accuracy if self.accuracy is not None else 1) \
+ - self.calculated_importance()
+
+
+class SearchResults(List[SearchResult]):
+ """ Sequence of forward lookup results ordered by relevance.
+ May be empty when no result was found.
+ """
+
+
+def _filter_geometries(row: SaRow) -> Dict[str, str]:
+ return {k[9:]: v for k, v in row._mapping.items()
+ if k.startswith('geometry_')}
+
+
+def create_from_placex_row(row: Optional[SaRow],
+ class_type: Type[BaseResultT]) -> Optional[BaseResultT]:
+ """ Construct a new result and add the data from the result row
+ from the placex table. 'class_type' defines the type of result
+ to return. Returns None if the row is None.
+ """
+ if row is None:
+ return None
+
+ return class_type(source_table=SourceTable.PLACEX,
+ place_id=row.place_id,
+ osm_object=(row.osm_type, row.osm_id),
+ category=(row.class_, row.type),
+ parent_place_id=row.parent_place_id,
+ linked_place_id=getattr(row, 'linked_place_id', None),
+ admin_level=getattr(row, 'admin_level', 15),
+ names=_mingle_name_tags(row.name),
+ address=row.address,
+ extratags=row.extratags,
+ housenumber=row.housenumber,
+ postcode=row.postcode,
+ wikipedia=row.wikipedia,
+ rank_address=row.rank_address,
+ rank_search=row.rank_search,
+ importance=row.importance,
+ country_code=row.country_code,
+ centroid=Point.from_wkb(row.centroid),
+ geometry=_filter_geometries(row))
+
+
+def create_from_osmline_row(row: Optional[SaRow],
+ class_type: Type[BaseResultT]) -> Optional[BaseResultT]:
+ """ Construct a new result and add the data from the result row
+ from the address interpolation table osmline. 'class_type' defines
+ the type of result to return. Returns None if the row is None.
+
+ If the row contains a housenumber, then the housenumber is filled out.
+ Otherwise the result contains the interpolation information in extratags.
+ """
+ if row is None:
+ return None
+
+ hnr = getattr(row, 'housenumber', None)
+
+ res = class_type(source_table=SourceTable.OSMLINE,
+ place_id=row.place_id,
+ parent_place_id=row.parent_place_id,
+ osm_object=('W', row.osm_id),
+ category=('place', 'houses' if hnr is None else 'house'),
+ address=row.address,
+ postcode=row.postcode,
+ country_code=row.country_code,
+ centroid=Point.from_wkb(row.centroid),
+ geometry=_filter_geometries(row))
+
+ if hnr is None:
+ res.extratags = {'startnumber': str(row.startnumber),
+ 'endnumber': str(row.endnumber),
+ 'step': str(row.step)}
+ else:
+ res.housenumber = str(hnr)
+
+ return res
+
+
+def create_from_tiger_row(row: Optional[SaRow],
+ class_type: Type[BaseResultT],
+ osm_type: Optional[str] = None,
+ osm_id: Optional[int] = None) -> Optional[BaseResultT]:
+ """ Construct a new result and add the data from the result row
+ from the Tiger data interpolation table. 'class_type' defines
+ the type of result to return. Returns None if the row is None.
+
+ If the row contains a housenumber, then the housenumber is filled out.
+ Otherwise the result contains the interpolation information in extratags.
+ """
+ if row is None:
+ return None
+
+ hnr = getattr(row, 'housenumber', None)
+
+ res = class_type(source_table=SourceTable.TIGER,
+ place_id=row.place_id,
+ parent_place_id=row.parent_place_id,
+ osm_object=(osm_type or row.osm_type, osm_id or row.osm_id),
+ category=('place', 'houses' if hnr is None else 'house'),
+ postcode=row.postcode,
+ country_code='us',
+ centroid=Point.from_wkb(row.centroid),
+ geometry=_filter_geometries(row))
+
+ if hnr is None:
+ res.extratags = {'startnumber': str(row.startnumber),
+ 'endnumber': str(row.endnumber),
+ 'step': str(row.step)}
+ else:
+ res.housenumber = str(hnr)
+
+ return res
+
+
+def create_from_postcode_row(row: Optional[SaRow],
+ class_type: Type[BaseResultT]) -> Optional[BaseResultT]:
+ """ Construct a new result and add the data from the result row
+ from the postcode table. 'class_type' defines
+ the type of result to return. Returns None if the row is None.
+ """
+ if row is None:
+ return None
+
+ return class_type(source_table=SourceTable.POSTCODE,
+ place_id=row.place_id,
+ parent_place_id=row.parent_place_id,
+ category=('place', 'postcode'),
+ names={'ref': row.postcode},
+ rank_search=row.rank_search,
+ rank_address=row.rank_address,
+ country_code=row.country_code,
+ centroid=Point.from_wkb(row.centroid),
+ geometry=_filter_geometries(row))
+
+
+def create_from_country_row(row: Optional[SaRow],
+ class_type: Type[BaseResultT]) -> Optional[BaseResultT]:
+ """ Construct a new result and add the data from the result row
+ from the fallback country tables. 'class_type' defines
+ the type of result to return. Returns None if the row is None.
+ """
+ if row is None:
+ return None
+
+ return class_type(source_table=SourceTable.COUNTRY,
+ category=('place', 'country'),
+ centroid=Point.from_wkb(row.centroid),
+ names=row.name,
+ rank_address=4, rank_search=4,
+ country_code=row.country_code,
+ geometry=_filter_geometries(row))
+
+
+async def add_result_details(conn: SearchConnection, results: List[BaseResultT],
+ details: LookupDetails) -> None:
+ """ Retrieve more details from the database according to the
+ parameters specified in 'details'.
+ """
+ if results:
+ log().section('Query details for result')
+ if details.address_details:
+ log().comment('Query address details')
+ await complete_address_details(conn, results)
+ if details.linked_places:
+ log().comment('Query linked places')
+ for result in results:
+ await complete_linked_places(conn, result)
+ if details.parented_places:
+ log().comment('Query parent places')
+ for result in results:
+ await complete_parented_places(conn, result)
+ if details.keywords:
+ log().comment('Query keywords')
+ for result in results:
+ await complete_keywords(conn, result)
+ for result in results:
+ result.localize(details.locales)
+
+
+def _result_row_to_address_row(row: SaRow, isaddress: Optional[bool] = None) -> AddressLine:
+ """ Create a new AddressLine from the results of a database query.
+ """
+ extratags: Dict[str, str] = getattr(row, 'extratags', {}) or {}
+ if 'linked_place' in extratags:
+ extratags['place'] = extratags['linked_place']
+
+ names = _mingle_name_tags(row.name) or {}
+ if getattr(row, 'housenumber', None) is not None:
+ names['housenumber'] = row.housenumber
+
+ if isaddress is None:
+ isaddress = getattr(row, 'isaddress', True)
+
+ return AddressLine(place_id=row.place_id,
+ osm_object=None if row.osm_type is None else (row.osm_type, row.osm_id),
+ category=(getattr(row, 'class'), row.type),
+ names=names,
+ extratags=extratags,
+ admin_level=row.admin_level,
+ fromarea=row.fromarea,
+ isaddress=isaddress,
+ rank_address=row.rank_address,
+ distance=row.distance)
+
+
+def _get_address_lookup_id(result: BaseResultT) -> int:
+ assert result.place_id
+ if result.source_table != SourceTable.PLACEX or result.rank_search > 27:
+ return result.parent_place_id or result.place_id
+
+ return result.linked_place_id or result.place_id
+
+
+async def _finalize_entry(conn: SearchConnection, result: BaseResultT) -> None:
+ assert result.address_rows is not None
+ if result.category[0] not in ('boundary', 'place')\
+ or result.category[1] not in ('postal_code', 'postcode'):
+ postcode = result.postcode
+ if not postcode and result.address:
+ postcode = result.address.get('postcode')
+ if postcode and ',' not in postcode and ';' not in postcode:
+ result.address_rows.append(AddressLine(
+ category=('place', 'postcode'),
+ names={'ref': postcode},
+ fromarea=False, isaddress=True, rank_address=5,
+ distance=0.0))
+ if result.country_code:
+ async def _get_country_names() -> Optional[Dict[str, str]]:
+ t = conn.t.country_name
+ sql = sa.select(t.c.name, t.c.derived_name)\
+ .where(t.c.country_code == result.country_code)
+ for cres in await conn.execute(sql):
+ names = cast(Dict[str, str], cres[0])
+ if cres[1]:
+ names.update(cast(Dict[str, str], cres[1]))
+ return names
+ return None
+
+ country_names = await conn.get_cached_value('COUNTRY_NAME',
+ result.country_code,
+ _get_country_names)
+ if country_names:
+ result.address_rows.append(AddressLine(
+ category=('place', 'country'),
+ names=country_names,
+ fromarea=False, isaddress=True, rank_address=4,
+ distance=0.0))
+ result.address_rows.append(AddressLine(
+ category=('place', 'country_code'),
+ names={'ref': result.country_code}, extratags={},
+ fromarea=True, isaddress=False, rank_address=4,
+ distance=0.0))
+
+
+def _setup_address_details(result: BaseResultT) -> None:
+ """ Retrieve information about places that make up the address of the result.
+ """
+ result.address_rows = AddressLines()
+ if result.names:
+ result.address_rows.append(AddressLine(
+ place_id=result.place_id,
+ osm_object=result.osm_object,
+ category=result.category,
+ names=result.names,
+ extratags=result.extratags or {},
+ admin_level=result.admin_level,
+ fromarea=True, isaddress=True,
+ rank_address=result.rank_address, distance=0.0))
+ if result.source_table == SourceTable.PLACEX and result.address:
+ housenumber = result.address.get('housenumber')\
+ or result.address.get('streetnumber')\
+ or result.address.get('conscriptionnumber')
+ elif result.housenumber:
+ housenumber = result.housenumber
+ else:
+ housenumber = None
+ if housenumber:
+ result.address_rows.append(AddressLine(
+ category=('place', 'house_number'),
+ names={'ref': housenumber},
+ fromarea=True, isaddress=True, rank_address=28, distance=0))
+ if result.address and '_unlisted_place' in result.address:
+ result.address_rows.append(AddressLine(
+ category=('place', 'locality'),
+ names={'name': result.address['_unlisted_place']},
+ fromarea=False, isaddress=True, rank_address=25, distance=0))
+
+
+async def complete_address_details(conn: SearchConnection, results: List[BaseResultT]) -> None:
+ """ Retrieve information about places that make up the address of the result.
+ """
+ for result in results:
+ _setup_address_details(result)
+
+ # Lookup entries from place_address line
+
+ lookup_ids = [{'pid': r.place_id,
+ 'lid': _get_address_lookup_id(r),
+ 'names': list(r.address.values()) if r.address else [],
+ 'c': ('SRID=4326;' + r.centroid.to_wkt()) if r.centroid else ''}
+ for r in results if r.place_id]
+
+ if not lookup_ids:
+ return
+
+ ltab = sa.func.JsonArrayEach(sa.type_coerce(lookup_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+
+ t = conn.t.placex
+ taddr = conn.t.addressline
+
+ sql = sa.select(ltab.c.value['pid'].as_integer().label('src_place_id'),
+ t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
+ t.c.class_, t.c.type, t.c.extratags,
+ t.c.admin_level, taddr.c.fromarea,
+ sa.case((t.c.rank_address == 11, 5),
+ else_=t.c.rank_address).label('rank_address'),
+ taddr.c.distance, t.c.country_code, t.c.postcode)\
+ .join(taddr, sa.or_(taddr.c.place_id == ltab.c.value['pid'].as_integer(),
+ taddr.c.place_id == ltab.c.value['lid'].as_integer()))\
+ .join(t, taddr.c.address_place_id == t.c.place_id)\
+ .order_by('src_place_id')\
+ .order_by(sa.column('rank_address').desc())\
+ .order_by((taddr.c.place_id == ltab.c.value['pid'].as_integer()).desc())\
+ .order_by(sa.case((sa.func.CrosscheckNames(t.c.name, ltab.c.value['names']), 2),
+ (taddr.c.isaddress, 0),
+ (sa.and_(taddr.c.fromarea,
+ t.c.geometry.ST_Contains(
+ sa.func.ST_GeomFromEWKT(
+ ltab.c.value['c'].as_string()))), 1),
+ else_=-1).desc())\
+ .order_by(taddr.c.fromarea.desc())\
+ .order_by(taddr.c.distance.desc())\
+ .order_by(t.c.rank_search.desc())
+
+ current_result = None
+ current_rank_address = -1
+ for row in await conn.execute(sql):
+ if current_result is None or row.src_place_id != current_result.place_id:
+ current_result = next((r for r in results if r.place_id == row.src_place_id), None)
+ assert current_result is not None
+ current_rank_address = -1
+
+ location_isaddress = row.rank_address != current_rank_address
+
+ if current_result.country_code is None and row.country_code:
+ current_result.country_code = row.country_code
+
+ if row.type in ('postcode', 'postal_code') and location_isaddress:
+ if not row.fromarea or \
+ (current_result.address and 'postcode' in current_result.address):
+ location_isaddress = False
+ else:
+ current_result.postcode = None
+
+ assert current_result.address_rows is not None
+ current_result.address_rows.append(_result_row_to_address_row(row, location_isaddress))
+ current_rank_address = row.rank_address
+
+ for result in results:
+ await _finalize_entry(conn, result)
+
+ # Finally add the record for the parent entry where necessary.
+
+ parent_lookup_ids = list(filter(lambda e: e['pid'] != e['lid'], lookup_ids))
+ if parent_lookup_ids:
+ ltab = sa.func.JsonArrayEach(sa.type_coerce(parent_lookup_ids, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ sql = sa.select(ltab.c.value['pid'].as_integer().label('src_place_id'),
+ t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
+ t.c.class_, t.c.type, t.c.extratags,
+ t.c.admin_level,
+ t.c.rank_address)\
+ .where(t.c.place_id == ltab.c.value['lid'].as_integer())
+
+ for row in await conn.execute(sql):
+ current_result = next((r for r in results if r.place_id == row.src_place_id), None)
+ assert current_result is not None
+ assert current_result.address_rows is not None
+
+ current_result.address_rows.append(AddressLine(
+ place_id=row.place_id,
+ osm_object=(row.osm_type, row.osm_id),
+ category=(row.class_, row.type),
+ names=row.name, extratags=row.extratags or {},
+ admin_level=row.admin_level,
+ fromarea=True, isaddress=True,
+ rank_address=row.rank_address, distance=0.0))
+
+ # Now sort everything
+ def mk_sort_key(place_id: Optional[int]) -> Callable[[AddressLine], Tuple[bool, int, bool]]:
+ return lambda a: (a.place_id != place_id, -a.rank_address, a.isaddress)
+
+ for result in results:
+ assert result.address_rows is not None
+ result.address_rows.sort(key=mk_sort_key(result.place_id))
+
+
+def _placex_select_address_row(conn: SearchConnection,
+ centroid: Point) -> SaSelect:
+ t = conn.t.placex
+ return sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
+ t.c.class_.label('class'), t.c.type,
+ t.c.admin_level, t.c.housenumber,
+ t.c.geometry.is_area().label('fromarea'),
+ t.c.rank_address,
+ t.c.geometry.distance_spheroid(
+ sa.bindparam('centroid', value=centroid, type_=Geometry)).label('distance'))
+
+
+async def complete_linked_places(conn: SearchConnection, result: BaseResult) -> None:
+ """ Retrieve information about places that link to the result.
+ """
+ result.linked_rows = AddressLines()
+ if result.source_table != SourceTable.PLACEX:
+ return
+
+ sql = _placex_select_address_row(conn, result.centroid)\
+ .where(conn.t.placex.c.linked_place_id == result.place_id)
+
+ for row in await conn.execute(sql):
+ result.linked_rows.append(_result_row_to_address_row(row))
+
+
+async def complete_keywords(conn: SearchConnection, result: BaseResult) -> None:
+ """ Retrieve information about the search terms used for this place.
+
+ Requires that the query analyzer was initialised to get access to
+ the word table.
+ """
+ t = conn.t.search_name
+ sql = sa.select(t.c.name_vector, t.c.nameaddress_vector)\
+ .where(t.c.place_id == result.place_id)
+
+ result.name_keywords = []
+ result.address_keywords = []
+
+ t = conn.t.meta.tables['word']
+ sel = sa.select(t.c.word_id, t.c.word_token, t.c.word)
+
+ for name_tokens, address_tokens in await conn.execute(sql):
+ for row in await conn.execute(sel.where(t.c.word_id.in_(name_tokens))):
+ result.name_keywords.append(WordInfo(*row))
+
+ for row in await conn.execute(sel.where(t.c.word_id.in_(address_tokens))):
+ result.address_keywords.append(WordInfo(*row))
+
+
+async def complete_parented_places(conn: SearchConnection, result: BaseResult) -> None:
+ """ Retrieve information about places that the result provides the
+ address for.
+ """
+ result.parented_rows = AddressLines()
+ if result.source_table != SourceTable.PLACEX:
+ return
+
+ sql = _placex_select_address_row(conn, result.centroid)\
+ .where(conn.t.placex.c.parent_place_id == result.place_id)\
+ .where(conn.t.placex.c.rank_search == 30)
+
+ for row in await conn.execute(sql):
+ result.parented_rows.append(_result_row_to_address_row(row))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of reverse geocoding.
+"""
+from typing import Optional, List, Callable, Type, Tuple, Dict, Any, cast, Union
+import functools
+
+import sqlalchemy as sa
+
+from .typing import SaColumn, SaSelect, SaFromClause, SaLabel, SaRow, \
+ SaBind, SaLambdaSelect
+from .sql.sqlalchemy_types import Geometry
+from .connection import SearchConnection
+from . import results as nres
+from .logging import log
+from .types import AnyPoint, DataLayer, ReverseDetails, GeometryFormat, Bbox
+
+
+RowFunc = Callable[[Optional[SaRow], Type[nres.ReverseResult]], Optional[nres.ReverseResult]]
+
+WKT_PARAM: SaBind = sa.bindparam('wkt', type_=Geometry)
+MAX_RANK_PARAM: SaBind = sa.bindparam('max_rank')
+
+
+def no_index(expr: SaColumn) -> SaColumn:
+ """ Wrap the given expression, so that the query planner will
+ refrain from using the expression for index lookup.
+ """
+ return sa.func.coalesce(sa.null(), expr)
+
+
+def _select_from_placex(t: SaFromClause, use_wkt: bool = True) -> SaSelect:
+ """ Create a select statement with the columns relevant for reverse
+ results.
+ """
+ if not use_wkt:
+ distance = t.c.distance
+ centroid = t.c.centroid
+ else:
+ distance = t.c.geometry.ST_Distance(WKT_PARAM)
+ centroid = sa.case((t.c.geometry.is_line_like(), t.c.geometry.ST_ClosestPoint(WKT_PARAM)),
+ else_=t.c.centroid).label('centroid')
+
+ return sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
+ t.c.class_, t.c.type,
+ t.c.address, t.c.extratags,
+ t.c.housenumber, t.c.postcode, t.c.country_code,
+ t.c.importance, t.c.wikipedia,
+ t.c.parent_place_id, t.c.rank_address, t.c.rank_search,
+ centroid,
+ t.c.linked_place_id, t.c.admin_level,
+ distance.label('distance'),
+ t.c.geometry.ST_Expand(0).label('bbox'))
+
+
+def _interpolated_housenumber(table: SaFromClause) -> SaLabel:
+ return sa.cast(table.c.startnumber
+ + sa.func.round(((table.c.endnumber - table.c.startnumber) * table.c.position)
+ / table.c.step) * table.c.step,
+ sa.Integer).label('housenumber')
+
+
+def _interpolated_position(table: SaFromClause) -> SaLabel:
+ fac = sa.cast(table.c.step, sa.Float) / (table.c.endnumber - table.c.startnumber)
+ rounded_pos = sa.func.round(table.c.position / fac) * fac
+ return sa.case(
+ (table.c.endnumber == table.c.startnumber, table.c.linegeo.ST_Centroid()),
+ else_=table.c.linegeo.ST_LineInterpolatePoint(rounded_pos)).label('centroid')
+
+
+def _locate_interpolation(table: SaFromClause) -> SaLabel:
+ """ Given a position, locate the closest point on the line.
+ """
+ return sa.case((table.c.linegeo.is_line_like(),
+ table.c.linegeo.ST_LineLocatePoint(WKT_PARAM)),
+ else_=0).label('position')
+
+
+def _get_closest(*rows: Optional[SaRow]) -> Optional[SaRow]:
+ return min(rows, key=lambda row: 1000 if row is None else row.distance)
+
+
+class ReverseGeocoder:
+ """ Class implementing the logic for looking up a place from a
+ coordinate.
+ """
+
+ def __init__(self, conn: SearchConnection, params: ReverseDetails,
+ restrict_to_country_areas: bool = False) -> None:
+ self.conn = conn
+ self.params = params
+ self.restrict_to_country_areas = restrict_to_country_areas
+
+ self.bind_params: Dict[str, Any] = {'max_rank': params.max_rank}
+
+ @property
+ def max_rank(self) -> int:
+ """ Return the maximum configured rank.
+ """
+ return self.params.max_rank
+
+ def has_geometries(self) -> bool:
+ """ Check if any geometries are requested.
+ """
+ return bool(self.params.geometry_output)
+
+ def layer_enabled(self, *layer: DataLayer) -> bool:
+ """ Return true when any of the given layer types are requested.
+ """
+ return any(self.params.layers & ly for ly in layer)
+
+ def layer_disabled(self, *layer: DataLayer) -> bool:
+ """ Return true when none of the given layer types is requested.
+ """
+ return not any(self.params.layers & ly for ly in layer)
+
+ def has_feature_layers(self) -> bool:
+ """ Return true if any layer other than ADDRESS or POI is requested.
+ """
+ return self.layer_enabled(DataLayer.RAILWAY, DataLayer.MANMADE, DataLayer.NATURAL)
+
+ def _add_geometry_columns(self, sql: SaLambdaSelect, col: SaColumn) -> SaSelect:
+ out = []
+
+ if self.params.geometry_simplification > 0.0:
+ col = sa.func.ST_SimplifyPreserveTopology(col, self.params.geometry_simplification)
+
+ if self.params.geometry_output & GeometryFormat.GEOJSON:
+ out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
+ if self.params.geometry_output & GeometryFormat.TEXT:
+ out.append(sa.func.ST_AsText(col).label('geometry_text'))
+ if self.params.geometry_output & GeometryFormat.KML:
+ out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
+ if self.params.geometry_output & GeometryFormat.SVG:
+ out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
+
+ return sql.add_columns(*out)
+
+ def _filter_by_layer(self, table: SaFromClause) -> SaColumn:
+ if self.layer_enabled(DataLayer.MANMADE):
+ exclude = []
+ if self.layer_disabled(DataLayer.RAILWAY):
+ exclude.append('railway')
+ if self.layer_disabled(DataLayer.NATURAL):
+ exclude.extend(('natural', 'water', 'waterway'))
+ return table.c.class_.not_in(tuple(exclude))
+
+ include = []
+ if self.layer_enabled(DataLayer.RAILWAY):
+ include.append('railway')
+ if self.layer_enabled(DataLayer.NATURAL):
+ include.extend(('natural', 'water', 'waterway'))
+ return table.c.class_.in_(tuple(include))
+
+ async def _find_closest_street_or_poi(self, distance: float) -> Optional[SaRow]:
+ """ Look up the closest rank 26+ place in the database, which
+ is closer than the given distance.
+ """
+ t = self.conn.t.placex
+
+ # PostgreSQL must not get the distance as a parameter because
+ # there is a danger it won't be able to properly estimate index use
+ # when used with prepared statements
+ diststr = sa.text(f"{distance}")
+
+ sql: SaLambdaSelect = sa.lambda_stmt(
+ lambda: _select_from_placex(t)
+ .where(t.c.geometry.within_distance(WKT_PARAM, diststr))
+ .where(t.c.indexed_status == 0)
+ .where(t.c.linked_place_id == None)
+ .where(sa.or_(sa.not_(t.c.geometry.is_area()),
+ t.c.centroid.ST_Distance(WKT_PARAM) < diststr))
+ .order_by('distance')
+ .limit(2))
+
+ if self.has_geometries():
+ sql = self._add_geometry_columns(sql, t.c.geometry)
+
+ restrict: List[Union[SaColumn, Callable[[], SaColumn]]] = []
+
+ if self.layer_enabled(DataLayer.ADDRESS):
+ max_rank = min(29, self.max_rank)
+ restrict.append(lambda: no_index(t.c.rank_address).between(26, max_rank))
+ if self.max_rank == 30:
+ restrict.append(lambda: sa.func.IsAddressPoint(t))
+ if self.layer_enabled(DataLayer.POI) and self.max_rank == 30:
+ restrict.append(lambda: sa.and_(no_index(t.c.rank_search) == 30,
+ t.c.class_.not_in(('place', 'building')),
+ sa.not_(t.c.geometry.is_line_like())))
+ if self.has_feature_layers():
+ restrict.append(sa.and_(no_index(t.c.rank_search).between(26, MAX_RANK_PARAM),
+ no_index(t.c.rank_address) == 0,
+ self._filter_by_layer(t)))
+
+ if not restrict:
+ return None
+
+ sql = sql.where(sa.or_(*restrict))
+
+ # If the closest object is inside an area, then check if there is a
+ # POI node nearby and return that.
+ prev_row = None
+ for row in await self.conn.execute(sql, self.bind_params):
+ if prev_row is None:
+ if row.rank_search <= 27 or row.osm_type == 'N' or row.distance > 0:
+ return row
+ prev_row = row
+ else:
+ if row.rank_search > 27 and row.osm_type == 'N'\
+ and row.distance < 0.0001:
+ return row
+
+ return prev_row
+
+ async def _find_housenumber_for_street(self, parent_place_id: int) -> Optional[SaRow]:
+ t = self.conn.t.placex
+
+ def _base_query() -> SaSelect:
+ return _select_from_placex(t)\
+ .where(t.c.geometry.within_distance(WKT_PARAM, 0.001))\
+ .where(t.c.parent_place_id == parent_place_id)\
+ .where(sa.func.IsAddressPoint(t))\
+ .where(t.c.indexed_status == 0)\
+ .where(t.c.linked_place_id == None)\
+ .order_by('distance')\
+ .limit(1)
+
+ sql: SaLambdaSelect
+ if self.has_geometries():
+ sql = self._add_geometry_columns(_base_query(), t.c.geometry)
+ else:
+ sql = sa.lambda_stmt(_base_query)
+
+ return (await self.conn.execute(sql, self.bind_params)).one_or_none()
+
+ async def _find_interpolation_for_street(self, parent_place_id: Optional[int],
+ distance: float) -> Optional[SaRow]:
+ t = self.conn.t.osmline
+
+ sql = sa.select(t,
+ t.c.linegeo.ST_Distance(WKT_PARAM).label('distance'),
+ _locate_interpolation(t))\
+ .where(t.c.linegeo.within_distance(WKT_PARAM, distance))\
+ .where(t.c.startnumber != None)\
+ .order_by('distance')\
+ .limit(1)
+
+ if parent_place_id is not None:
+ sql = sql.where(t.c.parent_place_id == parent_place_id)
+
+ inner = sql.subquery('ipol')
+
+ sql = sa.select(inner.c.place_id, inner.c.osm_id,
+ inner.c.parent_place_id, inner.c.address,
+ _interpolated_housenumber(inner),
+ _interpolated_position(inner),
+ inner.c.postcode, inner.c.country_code,
+ inner.c.distance)
+
+ if self.has_geometries():
+ sub = sql.subquery('geom')
+ sql = self._add_geometry_columns(sa.select(sub), sub.c.centroid)
+
+ return (await self.conn.execute(sql, self.bind_params)).one_or_none()
+
+ async def _find_tiger_number_for_street(self, parent_place_id: int) -> Optional[SaRow]:
+ t = self.conn.t.tiger
+
+ def _base_query() -> SaSelect:
+ inner = sa.select(t,
+ t.c.linegeo.ST_Distance(WKT_PARAM).label('distance'),
+ _locate_interpolation(t))\
+ .where(t.c.linegeo.within_distance(WKT_PARAM, 0.001))\
+ .where(t.c.parent_place_id == parent_place_id)\
+ .order_by('distance')\
+ .limit(1)\
+ .subquery('tiger')
+
+ return sa.select(inner.c.place_id,
+ inner.c.parent_place_id,
+ _interpolated_housenumber(inner),
+ _interpolated_position(inner),
+ inner.c.postcode,
+ inner.c.distance)
+
+ sql: SaLambdaSelect
+ if self.has_geometries():
+ sub = _base_query().subquery('geom')
+ sql = self._add_geometry_columns(sa.select(sub), sub.c.centroid)
+ else:
+ sql = sa.lambda_stmt(_base_query)
+
+ return (await self.conn.execute(sql, self.bind_params)).one_or_none()
+
+ async def lookup_street_poi(self) -> Tuple[Optional[SaRow], RowFunc]:
+ """ Find a street or POI/address for the given WKT point.
+ """
+ log().section('Reverse lookup on street/address level')
+ distance = 0.006
+ parent_place_id = None
+
+ row = await self._find_closest_street_or_poi(distance)
+ row_func: RowFunc = nres.create_from_placex_row
+ log().var_dump('Result (street/building)', row)
+
+ # If the closest result was a street, but an address was requested,
+ # check for a housenumber nearby which is part of the street.
+ if row is not None:
+ if self.max_rank > 27 \
+ and self.layer_enabled(DataLayer.ADDRESS) \
+ and row.rank_address <= 27:
+ distance = 0.001
+ parent_place_id = row.place_id
+ log().comment('Find housenumber for street')
+ addr_row = await self._find_housenumber_for_street(parent_place_id)
+ log().var_dump('Result (street housenumber)', addr_row)
+
+ if addr_row is not None:
+ row = addr_row
+ row_func = nres.create_from_placex_row
+ distance = addr_row.distance
+ elif row.country_code == 'us' and parent_place_id is not None:
+ log().comment('Find TIGER housenumber for street')
+ addr_row = await self._find_tiger_number_for_street(parent_place_id)
+ log().var_dump('Result (street Tiger housenumber)', addr_row)
+
+ if addr_row is not None:
+ row_func = cast(RowFunc,
+ functools.partial(nres.create_from_tiger_row,
+ osm_type=row.osm_type,
+ osm_id=row.osm_id))
+ row = addr_row
+ else:
+ distance = row.distance
+
+ # Check for an interpolation that is either closer than our result
+ # or belongs to a close street found.
+ if self.max_rank > 27 and self.layer_enabled(DataLayer.ADDRESS):
+ log().comment('Find interpolation for street')
+ addr_row = await self._find_interpolation_for_street(parent_place_id,
+ distance)
+ log().var_dump('Result (street interpolation)', addr_row)
+ if addr_row is not None:
+ row = addr_row
+ row_func = nres.create_from_osmline_row
+
+ return row, row_func
+
+ async def _lookup_area_address(self) -> Optional[SaRow]:
+ """ Lookup large addressable areas for the given WKT point.
+ """
+ log().comment('Reverse lookup by larger address area features')
+ t = self.conn.t.placex
+
+ def _base_query() -> SaSelect:
+ # The inner SQL brings results in the right order, so that
+ # later only a minimum of results needs to be checked with ST_Contains.
+ inner = sa.select(t, sa.literal(0.0).label('distance'))\
+ .where(t.c.rank_search.between(5, MAX_RANK_PARAM))\
+ .where(t.c.rank_address != 5)\
+ .where(t.c.rank_address != 11)\
+ .where(t.c.geometry.intersects(WKT_PARAM))\
+ .where(sa.func.PlacexGeometryReverseLookuppolygon())\
+ .order_by(sa.desc(t.c.rank_search))\
+ .limit(50)\
+ .subquery('area')
+
+ return _select_from_placex(inner, False)\
+ .where(inner.c.geometry.ST_Contains(WKT_PARAM))\
+ .order_by(sa.desc(inner.c.rank_search))\
+ .limit(1)
+
+ sql: SaLambdaSelect = sa.lambda_stmt(_base_query)
+ if self.has_geometries():
+ sql = self._add_geometry_columns(sql, sa.literal_column('area.geometry'))
+
+ address_row = (await self.conn.execute(sql, self.bind_params)).one_or_none()
+ log().var_dump('Result (area)', address_row)
+
+ if address_row is not None and address_row.rank_search < self.max_rank:
+ log().comment('Search for better matching place nodes inside the area')
+
+ address_rank = address_row.rank_search
+ address_id = address_row.place_id
+
+ def _place_inside_area_query() -> SaSelect:
+ inner = \
+ sa.select(t, t.c.geometry.ST_Distance(WKT_PARAM).label('distance'))\
+ .where(t.c.rank_search > address_rank)\
+ .where(t.c.rank_search <= MAX_RANK_PARAM)\
+ .where(t.c.indexed_status == 0)\
+ .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
+ .order_by(sa.desc(t.c.rank_search))\
+ .limit(50)\
+ .subquery('places')
+
+ touter = t.alias('outer')
+ return _select_from_placex(inner, False)\
+ .join(touter, touter.c.geometry.ST_Contains(inner.c.geometry))\
+ .where(touter.c.place_id == address_id)\
+ .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
+ .order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
+ .limit(1)
+
+ if self.has_geometries():
+ sql = self._add_geometry_columns(_place_inside_area_query(),
+ sa.literal_column('places.geometry'))
+ else:
+ sql = sa.lambda_stmt(_place_inside_area_query)
+
+ place_address_row = (await self.conn.execute(sql, self.bind_params)).one_or_none()
+ log().var_dump('Result (place node)', place_address_row)
+
+ if place_address_row is not None:
+ return place_address_row
+
+ return address_row
+
+ async def _lookup_area_others(self) -> Optional[SaRow]:
+ t = self.conn.t.placex
+
+ inner = sa.select(t, t.c.geometry.ST_Distance(WKT_PARAM).label('distance'))\
+ .where(t.c.rank_address == 0)\
+ .where(t.c.rank_search.between(5, MAX_RANK_PARAM))\
+ .where(t.c.name != None)\
+ .where(t.c.indexed_status == 0)\
+ .where(t.c.linked_place_id == None)\
+ .where(self._filter_by_layer(t))\
+ .where(t.c.geometry.intersects(sa.func.ST_Expand(WKT_PARAM, 0.007)))\
+ .order_by(sa.desc(t.c.rank_search))\
+ .order_by('distance')\
+ .limit(50)\
+ .subquery()
+
+ sql = _select_from_placex(inner, False)\
+ .where(sa.or_(sa.not_(inner.c.geometry.is_area()),
+ inner.c.geometry.ST_Contains(WKT_PARAM)))\
+ .order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
+ .limit(1)
+
+ if self.has_geometries():
+ sql = self._add_geometry_columns(sql, inner.c.geometry)
+
+ row = (await self.conn.execute(sql, self.bind_params)).one_or_none()
+ log().var_dump('Result (non-address feature)', row)
+
+ return row
+
+ async def lookup_area(self) -> Optional[SaRow]:
+ """ Lookup large areas for the current search.
+ """
+ log().section('Reverse lookup by larger area features')
+
+ if self.layer_enabled(DataLayer.ADDRESS):
+ address_row = await self._lookup_area_address()
+ else:
+ address_row = None
+
+ if self.has_feature_layers():
+ other_row = await self._lookup_area_others()
+ else:
+ other_row = None
+
+ return _get_closest(address_row, other_row)
+
+ async def lookup_country_codes(self) -> List[str]:
+ """ Lookup the country for the current search.
+ """
+ log().section('Reverse lookup by country code')
+ t = self.conn.t.country_grid
+ sql = sa.select(t.c.country_code).distinct()\
+ .where(t.c.geometry.ST_Contains(WKT_PARAM))
+
+ ccodes = [cast(str, r[0]) for r in await self.conn.execute(sql, self.bind_params)]
+ log().var_dump('Country codes', ccodes)
+ return ccodes
+
+ async def lookup_country(self, ccodes: List[str]) -> Tuple[Optional[SaRow], RowFunc]:
+ """ Lookup the country for the current search.
+ """
+ row_func = nres.create_from_placex_row
+ if not ccodes:
+ ccodes = await self.lookup_country_codes()
+
+ if not ccodes:
+ return None, row_func
+
+ t = self.conn.t.placex
+ if self.max_rank > 4:
+ log().comment('Search for place nodes in country')
+
+ def _base_query() -> SaSelect:
+ inner = sa.select(t, t.c.geometry.ST_Distance(WKT_PARAM).label('distance'))\
+ .where(t.c.rank_search > 4)\
+ .where(t.c.rank_search <= MAX_RANK_PARAM)\
+ .where(t.c.indexed_status == 0)\
+ .where(t.c.country_code.in_(ccodes))\
+ .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
+ .order_by(sa.desc(t.c.rank_search))\
+ .limit(50)\
+ .subquery('area')
+
+ return _select_from_placex(inner, False)\
+ .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
+ .order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
+ .limit(1)
+
+ sql: SaLambdaSelect
+ if self.has_geometries():
+ sql = self._add_geometry_columns(_base_query(),
+ sa.literal_column('area.geometry'))
+ else:
+ sql = sa.lambda_stmt(_base_query)
+
+ address_row = (await self.conn.execute(sql, self.bind_params)).one_or_none()
+ log().var_dump('Result (addressable place node)', address_row)
+ else:
+ address_row = None
+
+ if address_row is None:
+ # Still nothing, then return a country with the appropriate country code.
+ def _country_base_query() -> SaSelect:
+ return _select_from_placex(t)\
+ .where(t.c.country_code.in_(ccodes))\
+ .where(t.c.rank_address == 4)\
+ .where(t.c.rank_search == 4)\
+ .where(t.c.linked_place_id == None)\
+ .order_by('distance')\
+ .limit(1)
+
+ if self.has_geometries():
+ sql = self._add_geometry_columns(_country_base_query(), t.c.geometry)
+ else:
+ sql = sa.lambda_stmt(_country_base_query)
+
+ address_row = (await self.conn.execute(sql, self.bind_params)).one_or_none()
+
+ if address_row is None:
+ # finally fall back to country table
+ t = self.conn.t.country_name
+ tgrid = self.conn.t.country_grid
+
+ sql = sa.select(tgrid.c.country_code,
+ tgrid.c.geometry.ST_Centroid().ST_Collect().ST_Centroid()
+ .label('centroid'),
+ tgrid.c.geometry.ST_Collect().ST_Expand(0).label('bbox'))\
+ .where(tgrid.c.country_code.in_(ccodes))\
+ .group_by(tgrid.c.country_code)
+
+ sub = sql.subquery('grid')
+ sql = sa.select(t.c.country_code,
+ t.c.name.merge(t.c.derived_name).label('name'),
+ sub.c.centroid, sub.c.bbox)\
+ .join(sub, t.c.country_code == sub.c.country_code)\
+ .order_by(t.c.country_code)\
+ .limit(1)
+
+ sql = self._add_geometry_columns(sql, sub.c.centroid)
+
+ address_row = (await self.conn.execute(sql, self.bind_params)).one_or_none()
+ row_func = nres.create_from_country_row
+
+ return address_row, row_func
+
+ async def lookup(self, coord: AnyPoint) -> Optional[nres.ReverseResult]:
+ """ Look up a single coordinate. Returns the place information,
+ if a place was found near the coordinates or None otherwise.
+ """
+ log().function('reverse_lookup', coord=coord, params=self.params)
+
+ self.bind_params['wkt'] = f'POINT({coord[0]} {coord[1]})'
+
+ row: Optional[SaRow] = None
+ row_func: RowFunc = nres.create_from_placex_row
+
+ if self.max_rank >= 26:
+ row, tmp_row_func = await self.lookup_street_poi()
+ if row is not None:
+ row_func = tmp_row_func
+
+ if row is None:
+ if self.restrict_to_country_areas:
+ ccodes = await self.lookup_country_codes()
+ if not ccodes:
+ return None
+ else:
+ ccodes = []
+
+ if self.max_rank > 4:
+ row = await self.lookup_area()
+ if row is None and self.layer_enabled(DataLayer.ADDRESS):
+ row, row_func = await self.lookup_country(ccodes)
+
+ result = row_func(row, nres.ReverseResult)
+ if result is not None:
+ assert row is not None
+ result.distance = getattr(row, 'distance', 0)
+ if hasattr(row, 'bbox'):
+ result.bbox = Bbox.from_wkb(row.bbox)
+ await nres.add_result_details(self.conn, [result], self.params)
+
+ return result
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Module for forward search.
+"""
+from .geocoder import (ForwardGeocoder as ForwardGeocoder)
+from .query import (Phrase as Phrase,
+ PhraseType as PhraseType)
+from .query_analyzer_factory import (make_query_analyzer as make_query_analyzer)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Conversion from token assignment to an abstract DB search.
+"""
+from typing import Optional, List, Tuple, Iterator, Dict
+import heapq
+
+from ..types import SearchDetails, DataLayer
+from .query import QueryStruct, Token, TokenType, TokenRange, BreakType
+from .token_assignment import TokenAssignment
+from . import db_search_fields as dbf
+from . import db_searches as dbs
+from . import db_search_lookups as lookups
+
+
+def wrap_near_search(categories: List[Tuple[str, str]],
+ search: dbs.AbstractSearch) -> dbs.NearSearch:
+ """ Create a new search that wraps the given search in a search
+ for near places of the given category.
+ """
+ return dbs.NearSearch(penalty=search.penalty,
+ categories=dbf.WeightedCategories(categories,
+ [0.0] * len(categories)),
+ search=search)
+
+
+def build_poi_search(category: List[Tuple[str, str]],
+ countries: Optional[List[str]]) -> dbs.PoiSearch:
+ """ Create a new search for places by the given category, possibly
+ constraint to the given countries.
+ """
+ if countries:
+ ccs = dbf.WeightedStrings(countries, [0.0] * len(countries))
+ else:
+ ccs = dbf.WeightedStrings([], [])
+
+ class _PoiData(dbf.SearchData):
+ penalty = 0.0
+ qualifiers = dbf.WeightedCategories(category, [0.0] * len(category))
+ countries = ccs
+
+ return dbs.PoiSearch(_PoiData())
+
+
+class SearchBuilder:
+ """ Build the abstract search queries from token assignments.
+ """
+
+ def __init__(self, query: QueryStruct, details: SearchDetails) -> None:
+ self.query = query
+ self.details = details
+
+ @property
+ def configured_for_country(self) -> bool:
+ """ Return true if the search details are configured to
+ allow countries in the result.
+ """
+ return self.details.min_rank <= 4 and self.details.max_rank >= 4 \
+ and self.details.layer_enabled(DataLayer.ADDRESS)
+
+ @property
+ def configured_for_postcode(self) -> bool:
+ """ Return true if the search details are configured to
+ allow postcodes in the result.
+ """
+ return self.details.min_rank <= 5 and self.details.max_rank >= 11\
+ and self.details.layer_enabled(DataLayer.ADDRESS)
+
+ @property
+ def configured_for_housenumbers(self) -> bool:
+ """ Return true if the search details are configured to
+ allow addresses in the result.
+ """
+ return self.details.max_rank >= 30 \
+ and self.details.layer_enabled(DataLayer.ADDRESS)
+
+ def build(self, assignment: TokenAssignment) -> Iterator[dbs.AbstractSearch]:
+ """ Yield all possible abstract searches for the given token assignment.
+ """
+ sdata = self.get_search_data(assignment)
+ if sdata is None:
+ return
+
+ near_items = self.get_near_items(assignment)
+ if near_items is not None and not near_items:
+ return # impossible combination of near items and category parameter
+
+ if assignment.name is None:
+ if near_items and not sdata.postcodes:
+ sdata.qualifiers = near_items
+ near_items = None
+ builder = self.build_poi_search(sdata)
+ elif assignment.housenumber:
+ hnr_tokens = self.query.get_tokens(assignment.housenumber,
+ TokenType.HOUSENUMBER)
+ builder = self.build_housenumber_search(sdata, hnr_tokens, assignment.address)
+ else:
+ builder = self.build_special_search(sdata, assignment.address,
+ bool(near_items))
+ else:
+ builder = self.build_name_search(sdata, assignment.name, assignment.address,
+ bool(near_items))
+
+ if near_items:
+ penalty = min(near_items.penalties)
+ near_items.penalties = [p - penalty for p in near_items.penalties]
+ for search in builder:
+ search_penalty = search.penalty
+ search.penalty = 0.0
+ yield dbs.NearSearch(penalty + assignment.penalty + search_penalty,
+ near_items, search)
+ else:
+ for search in builder:
+ search.penalty += assignment.penalty
+ yield search
+
+ def build_poi_search(self, sdata: dbf.SearchData) -> Iterator[dbs.AbstractSearch]:
+ """ Build abstract search query for a simple category search.
+ This kind of search requires an additional geographic constraint.
+ """
+ if not sdata.housenumbers \
+ and ((self.details.viewbox and self.details.bounded_viewbox) or self.details.near):
+ yield dbs.PoiSearch(sdata)
+
+ def build_special_search(self, sdata: dbf.SearchData,
+ address: List[TokenRange],
+ is_category: bool) -> Iterator[dbs.AbstractSearch]:
+ """ Build abstract search queries for searches that do not involve
+ a named place.
+ """
+ if sdata.qualifiers:
+ # No special searches over qualifiers supported.
+ return
+
+ if sdata.countries and not address and not sdata.postcodes \
+ and self.configured_for_country:
+ yield dbs.CountrySearch(sdata)
+
+ if sdata.postcodes and (is_category or self.configured_for_postcode):
+ penalty = 0.0 if sdata.countries else 0.1
+ if address:
+ sdata.lookups = [dbf.FieldLookup('nameaddress_vector',
+ [t.token for r in address
+ for t in self.query.get_partials_list(r)],
+ lookups.Restrict)]
+ penalty += 0.2
+ yield dbs.PostcodeSearch(penalty, sdata)
+
+ def build_housenumber_search(self, sdata: dbf.SearchData, hnrs: List[Token],
+ address: List[TokenRange]) -> Iterator[dbs.AbstractSearch]:
+ """ Build a simple address search for special entries where the
+ housenumber is the main name token.
+ """
+ sdata.lookups = [dbf.FieldLookup('name_vector', [t.token for t in hnrs], lookups.LookupAny)]
+ expected_count = sum(t.count for t in hnrs)
+
+ partials = {t.token: t.addr_count for trange in address
+ for t in self.query.get_partials_list(trange)}
+
+ if not partials:
+ # can happen when none of the partials is indexed
+ return
+
+ if expected_count < 8000:
+ sdata.lookups.append(dbf.FieldLookup('nameaddress_vector',
+ list(partials), lookups.Restrict))
+ elif len(partials) != 1 or list(partials.values())[0] < 10000:
+ sdata.lookups.append(dbf.FieldLookup('nameaddress_vector',
+ list(partials), lookups.LookupAll))
+ else:
+ addr_fulls = [t.token for t
+ in self.query.get_tokens(address[0], TokenType.WORD)]
+ if len(addr_fulls) > 5:
+ return
+ sdata.lookups.append(
+ dbf.FieldLookup('nameaddress_vector', addr_fulls, lookups.LookupAny))
+
+ sdata.housenumbers = dbf.WeightedStrings([], [])
+ yield dbs.PlaceSearch(0.05, sdata, expected_count)
+
+ def build_name_search(self, sdata: dbf.SearchData,
+ name: TokenRange, address: List[TokenRange],
+ is_category: bool) -> Iterator[dbs.AbstractSearch]:
+ """ Build abstract search queries for simple name or address searches.
+ """
+ if is_category or not sdata.housenumbers or self.configured_for_housenumbers:
+ ranking = self.get_name_ranking(name)
+ name_penalty = ranking.normalize_penalty()
+ if ranking.rankings:
+ sdata.rankings.append(ranking)
+ for penalty, count, lookup in self.yield_lookups(name, address):
+ sdata.lookups = lookup
+ yield dbs.PlaceSearch(penalty + name_penalty, sdata, count)
+
+ def yield_lookups(self, name: TokenRange, address: List[TokenRange]
+ ) -> Iterator[Tuple[float, int, List[dbf.FieldLookup]]]:
+ """ Yield all variants how the given name and address should best
+ be searched for. This takes into account how frequent the terms
+ are and tries to find a lookup that optimizes index use.
+ """
+ penalty = 0.0 # extra penalty
+ name_partials = {t.token: t for t in self.query.get_partials_list(name)}
+
+ addr_partials = [t for r in address for t in self.query.get_partials_list(r)]
+ addr_tokens = list({t.token for t in addr_partials})
+
+ exp_count = min(t.count for t in name_partials.values()) / (2**(len(name_partials) - 1))
+
+ if (len(name_partials) > 3 or exp_count < 8000):
+ yield penalty, exp_count, dbf.lookup_by_names(list(name_partials.keys()), addr_tokens)
+ return
+
+ addr_count = min(t.addr_count for t in addr_partials) if addr_partials else 30000
+ # Partial term to frequent. Try looking up by rare full names first.
+ name_fulls = self.query.get_tokens(name, TokenType.WORD)
+ if name_fulls:
+ fulls_count = sum(t.count for t in name_fulls)
+
+ if fulls_count < 50000 or addr_count < 30000:
+ yield penalty, fulls_count / (2**len(addr_tokens)), \
+ self.get_full_name_ranking(name_fulls, addr_partials,
+ fulls_count > 30000 / max(1, len(addr_tokens)))
+
+ # To catch remaining results, lookup by name and address
+ # We only do this if there is a reasonable number of results expected.
+ exp_count = exp_count / (2**len(addr_tokens)) if addr_tokens else exp_count
+ if exp_count < 10000 and addr_count < 20000:
+ penalty += 0.35 * max(1 if name_fulls else 0.1,
+ 5 - len(name_partials) - len(addr_tokens))
+ yield penalty, exp_count, \
+ self.get_name_address_ranking(list(name_partials.keys()), addr_partials)
+
+ def get_name_address_ranking(self, name_tokens: List[int],
+ addr_partials: List[Token]) -> List[dbf.FieldLookup]:
+ """ Create a ranking expression looking up by name and address.
+ """
+ lookup = [dbf.FieldLookup('name_vector', name_tokens, lookups.LookupAll)]
+
+ addr_restrict_tokens = []
+ addr_lookup_tokens = []
+ for t in addr_partials:
+ if t.addr_count > 20000:
+ addr_restrict_tokens.append(t.token)
+ else:
+ addr_lookup_tokens.append(t.token)
+
+ if addr_restrict_tokens:
+ lookup.append(dbf.FieldLookup('nameaddress_vector',
+ addr_restrict_tokens, lookups.Restrict))
+ if addr_lookup_tokens:
+ lookup.append(dbf.FieldLookup('nameaddress_vector',
+ addr_lookup_tokens, lookups.LookupAll))
+
+ return lookup
+
+ def get_full_name_ranking(self, name_fulls: List[Token], addr_partials: List[Token],
+ use_lookup: bool) -> List[dbf.FieldLookup]:
+ """ Create a ranking expression with full name terms and
+ additional address lookup. When 'use_lookup' is true, then
+ address lookups will use the index, when the occurrences are not
+ too many.
+ """
+ # At this point drop unindexed partials from the address.
+ # This might yield wrong results, nothing we can do about that.
+ if use_lookup:
+ addr_restrict_tokens = []
+ addr_lookup_tokens = []
+ for t in addr_partials:
+ if t.addr_count > 20000:
+ addr_restrict_tokens.append(t.token)
+ else:
+ addr_lookup_tokens.append(t.token)
+ else:
+ addr_restrict_tokens = [t.token for t in addr_partials]
+ addr_lookup_tokens = []
+
+ return dbf.lookup_by_any_name([t.token for t in name_fulls],
+ addr_restrict_tokens, addr_lookup_tokens)
+
+ def get_name_ranking(self, trange: TokenRange,
+ db_field: str = 'name_vector') -> dbf.FieldRanking:
+ """ Create a ranking expression for a name term in the given range.
+ """
+ name_fulls = self.query.get_tokens(trange, TokenType.WORD)
+ ranks = [dbf.RankedTokens(t.penalty, [t.token]) for t in name_fulls]
+ ranks.sort(key=lambda r: r.penalty)
+ # Fallback, sum of penalty for partials
+ name_partials = self.query.get_partials_list(trange)
+ default = sum(t.penalty for t in name_partials) + 0.2
+ return dbf.FieldRanking(db_field, default, ranks)
+
+ def get_addr_ranking(self, trange: TokenRange) -> dbf.FieldRanking:
+ """ Create a list of ranking expressions for an address term
+ for the given ranges.
+ """
+ todo: List[Tuple[int, int, dbf.RankedTokens]] = []
+ heapq.heappush(todo, (0, trange.start, dbf.RankedTokens(0.0, [])))
+ ranks: List[dbf.RankedTokens] = []
+
+ while todo:
+ neglen, pos, rank = heapq.heappop(todo)
+ for tlist in self.query.nodes[pos].starting:
+ if tlist.ttype in (TokenType.PARTIAL, TokenType.WORD):
+ if tlist.end < trange.end:
+ chgpenalty = PENALTY_WORDCHANGE[self.query.nodes[tlist.end].btype]
+ if tlist.ttype == TokenType.PARTIAL:
+ penalty = rank.penalty + chgpenalty \
+ + max(t.penalty for t in tlist.tokens)
+ heapq.heappush(todo, (neglen - 1, tlist.end,
+ dbf.RankedTokens(penalty, rank.tokens)))
+ else:
+ for t in tlist.tokens:
+ heapq.heappush(todo, (neglen - 1, tlist.end,
+ rank.with_token(t, chgpenalty)))
+ elif tlist.end == trange.end:
+ if tlist.ttype == TokenType.PARTIAL:
+ ranks.append(dbf.RankedTokens(rank.penalty
+ + max(t.penalty for t in tlist.tokens),
+ rank.tokens))
+ else:
+ ranks.extend(rank.with_token(t, 0.0) for t in tlist.tokens)
+ if len(ranks) >= 10:
+ # Too many variants, bail out and only add
+ # Worst-case Fallback: sum of penalty of partials
+ name_partials = self.query.get_partials_list(trange)
+ default = sum(t.penalty for t in name_partials) + 0.2
+ ranks.append(dbf.RankedTokens(rank.penalty + default, []))
+ # Bail out of outer loop
+ todo.clear()
+ break
+
+ ranks.sort(key=lambda r: len(r.tokens))
+ default = ranks[0].penalty + 0.3
+ del ranks[0]
+ ranks.sort(key=lambda r: r.penalty)
+
+ return dbf.FieldRanking('nameaddress_vector', default, ranks)
+
+ def get_search_data(self, assignment: TokenAssignment) -> Optional[dbf.SearchData]:
+ """ Collect the tokens for the non-name search fields in the
+ assignment.
+ """
+ sdata = dbf.SearchData()
+ sdata.penalty = assignment.penalty
+ if assignment.country:
+ tokens = self.get_country_tokens(assignment.country)
+ if not tokens:
+ return None
+ sdata.set_strings('countries', tokens)
+ elif self.details.countries:
+ sdata.countries = dbf.WeightedStrings(self.details.countries,
+ [0.0] * len(self.details.countries))
+ if assignment.housenumber:
+ sdata.set_strings('housenumbers',
+ self.query.get_tokens(assignment.housenumber,
+ TokenType.HOUSENUMBER))
+ if assignment.postcode:
+ sdata.set_strings('postcodes',
+ self.query.get_tokens(assignment.postcode,
+ TokenType.POSTCODE))
+ if assignment.qualifier:
+ tokens = self.get_qualifier_tokens(assignment.qualifier)
+ if not tokens:
+ return None
+ sdata.set_qualifiers(tokens)
+ elif self.details.categories:
+ sdata.qualifiers = dbf.WeightedCategories(self.details.categories,
+ [0.0] * len(self.details.categories))
+
+ if assignment.address:
+ if not assignment.name and assignment.housenumber:
+ # housenumber search: the first item needs to be handled like
+ # a name in ranking or penalties are not comparable with
+ # normal searches.
+ sdata.set_ranking([self.get_name_ranking(assignment.address[0],
+ db_field='nameaddress_vector')]
+ + [self.get_addr_ranking(r) for r in assignment.address[1:]])
+ else:
+ sdata.set_ranking([self.get_addr_ranking(r) for r in assignment.address])
+ else:
+ sdata.rankings = []
+
+ return sdata
+
+ def get_country_tokens(self, trange: TokenRange) -> List[Token]:
+ """ Return the list of country tokens for the given range,
+ optionally filtered by the country list from the details
+ parameters.
+ """
+ tokens = self.query.get_tokens(trange, TokenType.COUNTRY)
+ if self.details.countries:
+ tokens = [t for t in tokens if t.lookup_word in self.details.countries]
+
+ return tokens
+
+ def get_qualifier_tokens(self, trange: TokenRange) -> List[Token]:
+ """ Return the list of qualifier tokens for the given range,
+ optionally filtered by the qualifier list from the details
+ parameters.
+ """
+ tokens = self.query.get_tokens(trange, TokenType.QUALIFIER)
+ if self.details.categories:
+ tokens = [t for t in tokens if t.get_category() in self.details.categories]
+
+ return tokens
+
+ def get_near_items(self, assignment: TokenAssignment) -> Optional[dbf.WeightedCategories]:
+ """ Collect tokens for near items search or use the categories
+ requested per parameter.
+ Returns None if no category search is requested.
+ """
+ if assignment.near_item:
+ tokens: Dict[Tuple[str, str], float] = {}
+ for t in self.query.get_tokens(assignment.near_item, TokenType.NEAR_ITEM):
+ cat = t.get_category()
+ # The category of a near search will be that of near_item.
+ # Thus, if search is restricted to a category parameter,
+ # the two sets must intersect.
+ if (not self.details.categories or cat in self.details.categories)\
+ and t.penalty < tokens.get(cat, 1000.0):
+ tokens[cat] = t.penalty
+ return dbf.WeightedCategories(list(tokens.keys()), list(tokens.values()))
+
+ return None
+
+
+PENALTY_WORDCHANGE = {
+ BreakType.START: 0.0,
+ BreakType.END: 0.0,
+ BreakType.PHRASE: 0.0,
+ BreakType.SOFT_PHRASE: 0.0,
+ BreakType.WORD: 0.1,
+ BreakType.PART: 0.2,
+ BreakType.TOKEN: 0.4
+}
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Data structures for more complex fields in abstract search descriptions.
+"""
+from typing import List, Tuple, Iterator, Dict, Type
+import dataclasses
+
+import sqlalchemy as sa
+
+from ..typing import SaFromClause, SaColumn, SaExpression
+from ..utils.json_writer import JsonWriter
+from .query import Token
+from . import db_search_lookups as lookups
+
+
+@dataclasses.dataclass
+class WeightedStrings:
+ """ A list of strings together with a penalty.
+ """
+ values: List[str]
+ penalties: List[float]
+
+ def __bool__(self) -> bool:
+ return bool(self.values)
+
+ def __iter__(self) -> Iterator[Tuple[str, float]]:
+ return iter(zip(self.values, self.penalties))
+
+ def get_penalty(self, value: str, default: float = 1000.0) -> float:
+ """ Get the penalty for the given value. Returns the given default
+ if the value does not exist.
+ """
+ try:
+ return self.penalties[self.values.index(value)]
+ except ValueError:
+ pass
+ return default
+
+
+@dataclasses.dataclass
+class WeightedCategories:
+ """ A list of class/type tuples together with a penalty.
+ """
+ values: List[Tuple[str, str]]
+ penalties: List[float]
+
+ def __bool__(self) -> bool:
+ return bool(self.values)
+
+ def __iter__(self) -> Iterator[Tuple[Tuple[str, str], float]]:
+ return iter(zip(self.values, self.penalties))
+
+ def get_penalty(self, value: Tuple[str, str], default: float = 1000.0) -> float:
+ """ Get the penalty for the given value. Returns the given default
+ if the value does not exist.
+ """
+ try:
+ return self.penalties[self.values.index(value)]
+ except ValueError:
+ pass
+ return default
+
+ def sql_restrict(self, table: SaFromClause) -> SaExpression:
+ """ Return an SQLAlcheny expression that restricts the
+ class and type columns of the given table to the values
+ in the list.
+ Must not be used with an empty list.
+ """
+ assert self.values
+ if len(self.values) == 1:
+ return sa.and_(table.c.class_ == self.values[0][0],
+ table.c.type == self.values[0][1])
+
+ return sa.or_(*(sa.and_(table.c.class_ == c, table.c.type == t)
+ for c, t in self.values))
+
+
+@dataclasses.dataclass(order=True)
+class RankedTokens:
+ """ List of tokens together with the penalty of using it.
+ """
+ penalty: float
+ tokens: List[int]
+
+ def with_token(self, t: Token, transition_penalty: float) -> 'RankedTokens':
+ """ Create a new RankedTokens list with the given token appended.
+ The tokens penalty as well as the given transition penalty
+ are added to the overall penalty.
+ """
+ return RankedTokens(self.penalty + t.penalty + transition_penalty,
+ self.tokens + [t.token])
+
+
+@dataclasses.dataclass
+class FieldRanking:
+ """ A list of rankings to be applied sequentially until one matches.
+ The matched ranking determines the penalty. If none matches a
+ default penalty is applied.
+ """
+ column: str
+ default: float
+ rankings: List[RankedTokens]
+
+ def normalize_penalty(self) -> float:
+ """ Reduce the default and ranking penalties, such that the minimum
+ penalty is 0. Return the penalty that was subtracted.
+ """
+ if self.rankings:
+ min_penalty = min(self.default, min(r.penalty for r in self.rankings))
+ else:
+ min_penalty = self.default
+ if min_penalty > 0.0:
+ self.default -= min_penalty
+ for ranking in self.rankings:
+ ranking.penalty -= min_penalty
+ return min_penalty
+
+ def sql_penalty(self, table: SaFromClause) -> SaColumn:
+ """ Create an SQL expression for the rankings.
+ """
+ assert self.rankings
+
+ rout = JsonWriter().start_array()
+ for rank in self.rankings:
+ rout.start_array().value(rank.penalty).next()
+ rout.start_array()
+ for token in rank.tokens:
+ rout.value(token).next()
+ rout.end_array()
+ rout.end_array().next()
+ rout.end_array()
+
+ return sa.func.weigh_search(table.c[self.column], rout(), self.default)
+
+
+@dataclasses.dataclass
+class FieldLookup:
+ """ A list of tokens to be searched for. The column names the database
+ column to search in and the lookup_type the operator that is applied.
+ 'lookup_all' requires all tokens to match. 'lookup_any' requires
+ one of the tokens to match. 'restrict' requires to match all tokens
+ but avoids the use of indexes.
+ """
+ column: str
+ tokens: List[int]
+ lookup_type: Type[lookups.LookupType]
+
+ def sql_condition(self, table: SaFromClause) -> SaColumn:
+ """ Create an SQL expression for the given match condition.
+ """
+ return self.lookup_type(table, self.column, self.tokens)
+
+
+class SearchData:
+ """ Search fields derived from query and token assignment
+ to be used with the SQL queries.
+ """
+ penalty: float
+
+ lookups: List[FieldLookup] = []
+ rankings: List[FieldRanking]
+
+ housenumbers: WeightedStrings = WeightedStrings([], [])
+ postcodes: WeightedStrings = WeightedStrings([], [])
+ countries: WeightedStrings = WeightedStrings([], [])
+
+ qualifiers: WeightedCategories = WeightedCategories([], [])
+
+ def set_strings(self, field: str, tokens: List[Token]) -> None:
+ """ Set on of the WeightedStrings properties from the given
+ token list. Adapt the global penalty, so that the
+ minimum penalty is 0.
+ """
+ if tokens:
+ min_penalty = min(t.penalty for t in tokens)
+ self.penalty += min_penalty
+ wstrs = WeightedStrings([t.lookup_word for t in tokens],
+ [t.penalty - min_penalty for t in tokens])
+
+ setattr(self, field, wstrs)
+
+ def set_qualifiers(self, tokens: List[Token]) -> None:
+ """ Set the qulaifier field from the given tokens.
+ """
+ if tokens:
+ categories: Dict[Tuple[str, str], float] = {}
+ min_penalty = 1000.0
+ for t in tokens:
+ min_penalty = min(min_penalty, t.penalty)
+ cat = t.get_category()
+ if t.penalty < categories.get(cat, 1000.0):
+ categories[cat] = t.penalty
+ self.penalty += min_penalty
+ self.qualifiers = WeightedCategories(list(categories.keys()),
+ list(categories.values()))
+
+ def set_ranking(self, rankings: List[FieldRanking]) -> None:
+ """ Set the list of rankings and normalize the ranking.
+ """
+ self.rankings = []
+ for ranking in rankings:
+ if ranking.rankings:
+ self.penalty += ranking.normalize_penalty()
+ self.rankings.append(ranking)
+ else:
+ self.penalty += ranking.default
+
+
+def lookup_by_names(name_tokens: List[int], addr_tokens: List[int]) -> List[FieldLookup]:
+ """ Create a lookup list where name tokens are looked up via index
+ and potential address tokens are used to restrict the search further.
+ """
+ lookup = [FieldLookup('name_vector', name_tokens, lookups.LookupAll)]
+ if addr_tokens:
+ lookup.append(FieldLookup('nameaddress_vector', addr_tokens, lookups.Restrict))
+
+ return lookup
+
+
+def lookup_by_any_name(name_tokens: List[int], addr_restrict_tokens: List[int],
+ addr_lookup_tokens: List[int]) -> List[FieldLookup]:
+ """ Create a lookup list where name tokens are looked up via index
+ and only one of the name tokens must be present.
+ Potential address tokens are used to restrict the search further.
+ """
+ lookup = [FieldLookup('name_vector', name_tokens, lookups.LookupAny)]
+ if addr_restrict_tokens:
+ lookup.append(FieldLookup('nameaddress_vector', addr_restrict_tokens, lookups.Restrict))
+ if addr_lookup_tokens:
+ lookup.append(FieldLookup('nameaddress_vector', addr_lookup_tokens, lookups.LookupAll))
+
+ return lookup
+
+
+def lookup_by_addr(name_tokens: List[int], addr_tokens: List[int]) -> List[FieldLookup]:
+ """ Create a lookup list where address tokens are looked up via index
+ and the name tokens are only used to restrict the search further.
+ """
+ return [FieldLookup('name_vector', name_tokens, lookups.Restrict),
+ FieldLookup('nameaddress_vector', addr_tokens, lookups.LookupAll)]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of lookup functions for the search_name table.
+"""
+from typing import List, Any
+
+import sqlalchemy as sa
+from sqlalchemy.ext.compiler import compiles
+
+from ..typing import SaFromClause
+from ..sql.sqlalchemy_types import IntArray
+
+
+LookupType = sa.sql.expression.FunctionElement[Any]
+
+
+class LookupAll(LookupType):
+ """ Find all entries in search_name table that contain all of
+ a given list of tokens using an index for the search.
+ """
+ inherit_cache = True
+
+ def __init__(self, table: SaFromClause, column: str, tokens: List[int]) -> None:
+ super().__init__(table.c.place_id, getattr(table.c, column), column,
+ sa.type_coerce(tokens, IntArray))
+
+
+@compiles(LookupAll)
+def _default_lookup_all(element: LookupAll,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ _, col, _, tokens = list(element.clauses)
+ return "(%s @> %s)" % (compiler.process(col, **kw),
+ compiler.process(tokens, **kw))
+
+
+@compiles(LookupAll, 'sqlite')
+def _sqlite_lookup_all(element: LookupAll,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ place, col, colname, tokens = list(element.clauses)
+ return "(%s IN (SELECT CAST(value as bigint) FROM"\
+ " (SELECT array_intersect_fuzzy(places) as p FROM"\
+ " (SELECT places FROM reverse_search_name"\
+ " WHERE word IN (SELECT value FROM json_each('[' || %s || ']'))"\
+ " AND column = %s"\
+ " ORDER BY length(places)) as x) as u,"\
+ " json_each('[' || u.p || ']'))"\
+ " AND array_contains(%s, %s))"\
+ % (compiler.process(place, **kw),
+ compiler.process(tokens, **kw),
+ compiler.process(colname, **kw),
+ compiler.process(col, **kw),
+ compiler.process(tokens, **kw))
+
+
+class LookupAny(LookupType):
+ """ Find all entries that contain at least one of the given tokens.
+ Use an index for the search.
+ """
+ inherit_cache = True
+
+ def __init__(self, table: SaFromClause, column: str, tokens: List[int]) -> None:
+ super().__init__(table.c.place_id, getattr(table.c, column), column,
+ sa.type_coerce(tokens, IntArray))
+
+
+@compiles(LookupAny)
+def _default_lookup_any(element: LookupAny,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ _, col, _, tokens = list(element.clauses)
+ return "(%s && %s)" % (compiler.process(col, **kw),
+ compiler.process(tokens, **kw))
+
+
+@compiles(LookupAny, 'sqlite')
+def _sqlite_lookup_any(element: LookupAny,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ place, _, colname, tokens = list(element.clauses)
+ return "%s IN (SELECT CAST(value as bigint) FROM"\
+ " (SELECT array_union(places) as p FROM reverse_search_name"\
+ " WHERE word IN (SELECT value FROM json_each('[' || %s || ']'))"\
+ " AND column = %s) as u,"\
+ " json_each('[' || u.p || ']'))" % (compiler.process(place, **kw),
+ compiler.process(tokens, **kw),
+ compiler.process(colname, **kw))
+
+
+class Restrict(LookupType):
+ """ Find all entries that contain all of the given tokens.
+ Do not use an index for the search.
+ """
+ inherit_cache = True
+
+ def __init__(self, table: SaFromClause, column: str, tokens: List[int]) -> None:
+ super().__init__(getattr(table.c, column),
+ sa.type_coerce(tokens, IntArray))
+
+
+@compiles(Restrict)
+def _default_restrict(element: Restrict,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "(coalesce(null, %s) @> %s)" % (compiler.process(arg1, **kw),
+ compiler.process(arg2, **kw))
+
+
+@compiles(Restrict, 'sqlite')
+def _sqlite_restrict(element: Restrict,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "array_contains(%s)" % compiler.process(element.clauses, **kw)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of the actual database accesses for forward search.
+"""
+from typing import List, Tuple, AsyncIterator, Dict, Any, Callable, cast
+import abc
+
+import sqlalchemy as sa
+
+from ..typing import SaFromClause, SaScalarSelect, SaColumn, \
+ SaExpression, SaSelect, SaLambdaSelect, SaRow, SaBind
+from ..sql.sqlalchemy_types import Geometry, IntArray
+from ..connection import SearchConnection
+from ..types import SearchDetails, DataLayer, GeometryFormat, Bbox
+from .. import results as nres
+from .db_search_fields import SearchData, WeightedCategories
+
+
+def no_index(expr: SaColumn) -> SaColumn:
+ """ Wrap the given expression, so that the query planner will
+ refrain from using the expression for index lookup.
+ """
+ return sa.func.coalesce(sa.null(), expr)
+
+
+def _details_to_bind_params(details: SearchDetails) -> Dict[str, Any]:
+ """ Create a dictionary from search parameters that can be used
+ as bind parameter for SQL execute.
+ """
+ return {'limit': details.max_results,
+ 'min_rank': details.min_rank,
+ 'max_rank': details.max_rank,
+ 'viewbox': details.viewbox,
+ 'viewbox2': details.viewbox_x2,
+ 'near': details.near,
+ 'near_radius': details.near_radius,
+ 'excluded': details.excluded,
+ 'countries': details.countries}
+
+
+LIMIT_PARAM: SaBind = sa.bindparam('limit')
+MIN_RANK_PARAM: SaBind = sa.bindparam('min_rank')
+MAX_RANK_PARAM: SaBind = sa.bindparam('max_rank')
+VIEWBOX_PARAM: SaBind = sa.bindparam('viewbox', type_=Geometry)
+VIEWBOX2_PARAM: SaBind = sa.bindparam('viewbox2', type_=Geometry)
+NEAR_PARAM: SaBind = sa.bindparam('near', type_=Geometry)
+NEAR_RADIUS_PARAM: SaBind = sa.bindparam('near_radius')
+COUNTRIES_PARAM: SaBind = sa.bindparam('countries')
+
+
+def filter_by_area(sql: SaSelect, t: SaFromClause,
+ details: SearchDetails, avoid_index: bool = False) -> SaSelect:
+ """ Apply SQL statements for filtering by viewbox and near point,
+ if applicable.
+ """
+ if details.near is not None and details.near_radius is not None:
+ if details.near_radius < 0.1 and not avoid_index:
+ sql = sql.where(t.c.geometry.within_distance(NEAR_PARAM, NEAR_RADIUS_PARAM))
+ else:
+ sql = sql.where(t.c.geometry.ST_Distance(NEAR_PARAM) <= NEAR_RADIUS_PARAM)
+ if details.viewbox is not None and details.bounded_viewbox:
+ sql = sql.where(t.c.geometry.intersects(VIEWBOX_PARAM,
+ use_index=not avoid_index and
+ details.viewbox.area < 0.2))
+
+ return sql
+
+
+def _exclude_places(t: SaFromClause) -> Callable[[], SaExpression]:
+ return lambda: t.c.place_id.not_in(sa.bindparam('excluded'))
+
+
+def _select_placex(t: SaFromClause) -> SaSelect:
+ return sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
+ t.c.class_, t.c.type,
+ t.c.address, t.c.extratags,
+ t.c.housenumber, t.c.postcode, t.c.country_code,
+ t.c.wikipedia,
+ t.c.parent_place_id, t.c.rank_address, t.c.rank_search,
+ t.c.linked_place_id, t.c.admin_level,
+ t.c.centroid,
+ t.c.geometry.ST_Expand(0).label('bbox'))
+
+
+def _add_geometry_columns(sql: SaLambdaSelect, col: SaColumn, details: SearchDetails) -> SaSelect:
+ out = []
+
+ if details.geometry_simplification > 0.0:
+ col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification)
+
+ if details.geometry_output & GeometryFormat.GEOJSON:
+ out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
+ if details.geometry_output & GeometryFormat.TEXT:
+ out.append(sa.func.ST_AsText(col).label('geometry_text'))
+ if details.geometry_output & GeometryFormat.KML:
+ out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
+ if details.geometry_output & GeometryFormat.SVG:
+ out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
+
+ return sql.add_columns(*out)
+
+
+def _make_interpolation_subquery(table: SaFromClause, inner: SaFromClause,
+ numerals: List[int], details: SearchDetails) -> SaScalarSelect:
+ all_ids = sa.func.ArrayAgg(table.c.place_id)
+ sql = sa.select(all_ids).where(table.c.parent_place_id == inner.c.place_id)
+
+ if len(numerals) == 1:
+ sql = sql.where(sa.between(numerals[0], table.c.startnumber, table.c.endnumber))\
+ .where((numerals[0] - table.c.startnumber) % table.c.step == 0)
+ else:
+ sql = sql.where(sa.or_(
+ *(sa.and_(sa.between(n, table.c.startnumber, table.c.endnumber),
+ (n - table.c.startnumber) % table.c.step == 0)
+ for n in numerals)))
+
+ if details.excluded:
+ sql = sql.where(_exclude_places(table))
+
+ return sql.scalar_subquery()
+
+
+def _filter_by_layer(table: SaFromClause, layers: DataLayer) -> SaColumn:
+ orexpr: List[SaExpression] = []
+ if layers & DataLayer.ADDRESS and layers & DataLayer.POI:
+ orexpr.append(no_index(table.c.rank_address).between(1, 30))
+ elif layers & DataLayer.ADDRESS:
+ orexpr.append(no_index(table.c.rank_address).between(1, 29))
+ orexpr.append(sa.func.IsAddressPoint(table))
+ elif layers & DataLayer.POI:
+ orexpr.append(sa.and_(no_index(table.c.rank_address) == 30,
+ table.c.class_.not_in(('place', 'building'))))
+
+ if layers & DataLayer.MANMADE:
+ exclude = []
+ if not layers & DataLayer.RAILWAY:
+ exclude.append('railway')
+ if not layers & DataLayer.NATURAL:
+ exclude.extend(('natural', 'water', 'waterway'))
+ orexpr.append(sa.and_(table.c.class_.not_in(tuple(exclude)),
+ no_index(table.c.rank_address) == 0))
+ else:
+ include = []
+ if layers & DataLayer.RAILWAY:
+ include.append('railway')
+ if layers & DataLayer.NATURAL:
+ include.extend(('natural', 'water', 'waterway'))
+ orexpr.append(sa.and_(table.c.class_.in_(tuple(include)),
+ no_index(table.c.rank_address) == 0))
+
+ if len(orexpr) == 1:
+ return orexpr[0]
+
+ return sa.or_(*orexpr)
+
+
+def _interpolated_position(table: SaFromClause, nr: SaColumn) -> SaColumn:
+ pos = sa.cast(nr - table.c.startnumber, sa.Float) / (table.c.endnumber - table.c.startnumber)
+ return sa.case(
+ (table.c.endnumber == table.c.startnumber, table.c.linegeo.ST_Centroid()),
+ else_=table.c.linegeo.ST_LineInterpolatePoint(pos)).label('centroid')
+
+
+async def _get_placex_housenumbers(conn: SearchConnection,
+ place_ids: List[int],
+ details: SearchDetails) -> AsyncIterator[nres.SearchResult]:
+ t = conn.t.placex
+ sql = _select_placex(t).add_columns(t.c.importance)\
+ .where(t.c.place_id.in_(place_ids))
+
+ if details.geometry_output:
+ sql = _add_geometry_columns(sql, t.c.geometry, details)
+
+ for row in await conn.execute(sql):
+ result = nres.create_from_placex_row(row, nres.SearchResult)
+ assert result
+ result.bbox = Bbox.from_wkb(row.bbox)
+ yield result
+
+
+def _int_list_to_subquery(inp: List[int]) -> 'sa.Subquery':
+ """ Create a subselect that returns the given list of integers
+ as rows in the column 'nr'.
+ """
+ vtab = sa.func.JsonArrayEach(sa.type_coerce(inp, sa.JSON))\
+ .table_valued(sa.column('value', type_=sa.JSON))
+ return sa.select(sa.cast(sa.cast(vtab.c.value, sa.Text), sa.Integer).label('nr')).subquery()
+
+
+async def _get_osmline(conn: SearchConnection, place_ids: List[int],
+ numerals: List[int],
+ details: SearchDetails) -> AsyncIterator[nres.SearchResult]:
+ t = conn.t.osmline
+
+ values = _int_list_to_subquery(numerals)
+ sql = sa.select(t.c.place_id, t.c.osm_id,
+ t.c.parent_place_id, t.c.address,
+ values.c.nr.label('housenumber'),
+ _interpolated_position(t, values.c.nr),
+ t.c.postcode, t.c.country_code)\
+ .where(t.c.place_id.in_(place_ids))\
+ .join(values, values.c.nr.between(t.c.startnumber, t.c.endnumber))
+
+ if details.geometry_output:
+ sub = sql.subquery()
+ sql = _add_geometry_columns(sa.select(sub), sub.c.centroid, details)
+
+ for row in await conn.execute(sql):
+ result = nres.create_from_osmline_row(row, nres.SearchResult)
+ assert result
+ yield result
+
+
+async def _get_tiger(conn: SearchConnection, place_ids: List[int],
+ numerals: List[int], osm_id: int,
+ details: SearchDetails) -> AsyncIterator[nres.SearchResult]:
+ t = conn.t.tiger
+ values = _int_list_to_subquery(numerals)
+ sql = sa.select(t.c.place_id, t.c.parent_place_id,
+ sa.literal('W').label('osm_type'),
+ sa.literal(osm_id).label('osm_id'),
+ values.c.nr.label('housenumber'),
+ _interpolated_position(t, values.c.nr),
+ t.c.postcode)\
+ .where(t.c.place_id.in_(place_ids))\
+ .join(values, values.c.nr.between(t.c.startnumber, t.c.endnumber))
+
+ if details.geometry_output:
+ sub = sql.subquery()
+ sql = _add_geometry_columns(sa.select(sub), sub.c.centroid, details)
+
+ for row in await conn.execute(sql):
+ result = nres.create_from_tiger_row(row, nres.SearchResult)
+ assert result
+ yield result
+
+
+class AbstractSearch(abc.ABC):
+ """ Encapuslation of a single lookup in the database.
+ """
+ SEARCH_PRIO: int = 2
+
+ def __init__(self, penalty: float) -> None:
+ self.penalty = penalty
+
+ @abc.abstractmethod
+ async def lookup(self, conn: SearchConnection,
+ details: SearchDetails) -> nres.SearchResults:
+ """ Find results for the search in the database.
+ """
+
+
+class NearSearch(AbstractSearch):
+ """ Category search of a place type near the result of another search.
+ """
+ def __init__(self, penalty: float, categories: WeightedCategories,
+ search: AbstractSearch) -> None:
+ super().__init__(penalty)
+ self.search = search
+ self.categories = categories
+
+ async def lookup(self, conn: SearchConnection,
+ details: SearchDetails) -> nres.SearchResults:
+ """ Find results for the search in the database.
+ """
+ results = nres.SearchResults()
+ base = await self.search.lookup(conn, details)
+
+ if not base:
+ return results
+
+ base.sort(key=lambda r: (r.accuracy, r.rank_search))
+ max_accuracy = base[0].accuracy + 0.5
+ if base[0].rank_address == 0:
+ min_rank = 0
+ max_rank = 0
+ elif base[0].rank_address < 26:
+ min_rank = 1
+ max_rank = min(25, base[0].rank_address + 4)
+ else:
+ min_rank = 26
+ max_rank = 30
+ base = nres.SearchResults(r for r in base
+ if (r.source_table == nres.SourceTable.PLACEX
+ and r.accuracy <= max_accuracy
+ and r.bbox and r.bbox.area < 20
+ and r.rank_address >= min_rank
+ and r.rank_address <= max_rank))
+
+ if base:
+ baseids = [b.place_id for b in base[:5] if b.place_id]
+
+ for category, penalty in self.categories:
+ await self.lookup_category(results, conn, baseids, category, penalty, details)
+ if len(results) >= details.max_results:
+ break
+
+ return results
+
+ async def lookup_category(self, results: nres.SearchResults,
+ conn: SearchConnection, ids: List[int],
+ category: Tuple[str, str], penalty: float,
+ details: SearchDetails) -> None:
+ """ Find places of the given category near the list of
+ place ids and add the results to 'results'.
+ """
+ table = await conn.get_class_table(*category)
+
+ tgeom = conn.t.placex.alias('pgeom')
+
+ if table is None:
+ # No classtype table available, do a simplified lookup in placex.
+ table = conn.t.placex
+ sql = sa.select(table.c.place_id,
+ sa.func.min(tgeom.c.centroid.ST_Distance(table.c.centroid))
+ .label('dist'))\
+ .join(tgeom, table.c.geometry.intersects(tgeom.c.centroid.ST_Expand(0.01)))\
+ .where(table.c.class_ == category[0])\
+ .where(table.c.type == category[1])
+ else:
+ # Use classtype table. We can afford to use a larger
+ # radius for the lookup.
+ sql = sa.select(table.c.place_id,
+ sa.func.min(tgeom.c.centroid.ST_Distance(table.c.centroid))
+ .label('dist'))\
+ .join(tgeom,
+ table.c.centroid.ST_CoveredBy(
+ sa.case((sa.and_(tgeom.c.rank_address > 9,
+ tgeom.c.geometry.is_area()),
+ tgeom.c.geometry),
+ else_=tgeom.c.centroid.ST_Expand(0.05))))
+
+ inner = sql.where(tgeom.c.place_id.in_(ids))\
+ .group_by(table.c.place_id).subquery()
+
+ t = conn.t.placex
+ sql = _select_placex(t).add_columns((-inner.c.dist).label('importance'))\
+ .join(inner, inner.c.place_id == t.c.place_id)\
+ .order_by(inner.c.dist)
+
+ sql = sql.where(no_index(t.c.rank_address).between(MIN_RANK_PARAM, MAX_RANK_PARAM))
+ if details.countries:
+ sql = sql.where(t.c.country_code.in_(COUNTRIES_PARAM))
+ if details.excluded:
+ sql = sql.where(_exclude_places(t))
+ if details.layers is not None:
+ sql = sql.where(_filter_by_layer(t, details.layers))
+
+ sql = sql.limit(LIMIT_PARAM)
+ for row in await conn.execute(sql, _details_to_bind_params(details)):
+ result = nres.create_from_placex_row(row, nres.SearchResult)
+ assert result
+ result.accuracy = self.penalty + penalty
+ result.bbox = Bbox.from_wkb(row.bbox)
+ results.append(result)
+
+
+class PoiSearch(AbstractSearch):
+ """ Category search in a geographic area.
+ """
+ def __init__(self, sdata: SearchData) -> None:
+ super().__init__(sdata.penalty)
+ self.qualifiers = sdata.qualifiers
+ self.countries = sdata.countries
+
+ async def lookup(self, conn: SearchConnection,
+ details: SearchDetails) -> nres.SearchResults:
+ """ Find results for the search in the database.
+ """
+ bind_params = _details_to_bind_params(details)
+ t = conn.t.placex
+
+ rows: List[SaRow] = []
+
+ if details.near and details.near_radius is not None and details.near_radius < 0.2:
+ # simply search in placex table
+ def _base_query() -> SaSelect:
+ return _select_placex(t) \
+ .add_columns((-t.c.centroid.ST_Distance(NEAR_PARAM))
+ .label('importance'))\
+ .where(t.c.linked_place_id == None) \
+ .where(t.c.geometry.within_distance(NEAR_PARAM, NEAR_RADIUS_PARAM)) \
+ .order_by(t.c.centroid.ST_Distance(NEAR_PARAM)) \
+ .limit(LIMIT_PARAM)
+
+ classtype = self.qualifiers.values
+ if len(classtype) == 1:
+ cclass, ctype = classtype[0]
+ sql: SaLambdaSelect = sa.lambda_stmt(
+ lambda: _base_query().where(t.c.class_ == cclass)
+ .where(t.c.type == ctype))
+ else:
+ sql = _base_query().where(sa.or_(*(sa.and_(t.c.class_ == cls, t.c.type == typ)
+ for cls, typ in classtype)))
+
+ if self.countries:
+ sql = sql.where(t.c.country_code.in_(self.countries.values))
+
+ if details.viewbox is not None and details.bounded_viewbox:
+ sql = sql.where(t.c.geometry.intersects(VIEWBOX_PARAM))
+
+ rows.extend(await conn.execute(sql, bind_params))
+ else:
+ # use the class type tables
+ for category in self.qualifiers.values:
+ table = await conn.get_class_table(*category)
+ if table is not None:
+ sql = _select_placex(t)\
+ .add_columns(t.c.importance)\
+ .join(table, t.c.place_id == table.c.place_id)\
+ .where(t.c.class_ == category[0])\
+ .where(t.c.type == category[1])
+
+ if details.viewbox is not None and details.bounded_viewbox:
+ sql = sql.where(table.c.centroid.intersects(VIEWBOX_PARAM))
+
+ if details.near and details.near_radius is not None:
+ sql = sql.order_by(table.c.centroid.ST_Distance(NEAR_PARAM))\
+ .where(table.c.centroid.within_distance(NEAR_PARAM,
+ NEAR_RADIUS_PARAM))
+
+ if self.countries:
+ sql = sql.where(t.c.country_code.in_(self.countries.values))
+
+ sql = sql.limit(LIMIT_PARAM)
+ rows.extend(await conn.execute(sql, bind_params))
+
+ results = nres.SearchResults()
+ for row in rows:
+ result = nres.create_from_placex_row(row, nres.SearchResult)
+ assert result
+ result.accuracy = self.penalty + self.qualifiers.get_penalty((row.class_, row.type))
+ result.bbox = Bbox.from_wkb(row.bbox)
+ results.append(result)
+
+ return results
+
+
+class CountrySearch(AbstractSearch):
+ """ Search for a country name or country code.
+ """
+ SEARCH_PRIO = 0
+
+ def __init__(self, sdata: SearchData) -> None:
+ super().__init__(sdata.penalty)
+ self.countries = sdata.countries
+
+ async def lookup(self, conn: SearchConnection,
+ details: SearchDetails) -> nres.SearchResults:
+ """ Find results for the search in the database.
+ """
+ t = conn.t.placex
+
+ ccodes = self.countries.values
+ sql = _select_placex(t)\
+ .add_columns(t.c.importance)\
+ .where(t.c.country_code.in_(ccodes))\
+ .where(t.c.rank_address == 4)
+
+ if details.geometry_output:
+ sql = _add_geometry_columns(sql, t.c.geometry, details)
+
+ if details.excluded:
+ sql = sql.where(_exclude_places(t))
+
+ sql = filter_by_area(sql, t, details)
+
+ results = nres.SearchResults()
+ for row in await conn.execute(sql, _details_to_bind_params(details)):
+ result = nres.create_from_placex_row(row, nres.SearchResult)
+ assert result
+ result.accuracy = self.penalty + self.countries.get_penalty(row.country_code, 5.0)
+ result.bbox = Bbox.from_wkb(row.bbox)
+ results.append(result)
+
+ if not results:
+ results = await self.lookup_in_country_table(conn, details)
+
+ if results:
+ details.min_rank = min(5, details.max_rank)
+ details.max_rank = min(25, details.max_rank)
+
+ return results
+
+ async def lookup_in_country_table(self, conn: SearchConnection,
+ details: SearchDetails) -> nres.SearchResults:
+ """ Look up the country in the fallback country tables.
+ """
+ # Avoid the fallback search when this is a more search. Country results
+ # usually are in the first batch of results and it is not possible
+ # to exclude these fallbacks.
+ if details.excluded:
+ return nres.SearchResults()
+
+ t = conn.t.country_name
+ tgrid = conn.t.country_grid
+
+ sql = sa.select(tgrid.c.country_code,
+ tgrid.c.geometry.ST_Centroid().ST_Collect().ST_Centroid()
+ .label('centroid'),
+ tgrid.c.geometry.ST_Collect().ST_Expand(0).label('bbox'))\
+ .where(tgrid.c.country_code.in_(self.countries.values))\
+ .group_by(tgrid.c.country_code)
+
+ sql = filter_by_area(sql, tgrid, details, avoid_index=True)
+
+ sub = sql.subquery('grid')
+
+ sql = sa.select(t.c.country_code,
+ t.c.name.merge(t.c.derived_name).label('name'),
+ sub.c.centroid, sub.c.bbox)\
+ .join(sub, t.c.country_code == sub.c.country_code)
+
+ if details.geometry_output:
+ sql = _add_geometry_columns(sql, sub.c.centroid, details)
+
+ results = nres.SearchResults()
+ for row in await conn.execute(sql, _details_to_bind_params(details)):
+ result = nres.create_from_country_row(row, nres.SearchResult)
+ assert result
+ result.bbox = Bbox.from_wkb(row.bbox)
+ result.accuracy = self.penalty + self.countries.get_penalty(row.country_code, 5.0)
+ results.append(result)
+
+ return results
+
+
+class PostcodeSearch(AbstractSearch):
+ """ Search for a postcode.
+ """
+ def __init__(self, extra_penalty: float, sdata: SearchData) -> None:
+ super().__init__(sdata.penalty + extra_penalty)
+ self.countries = sdata.countries
+ self.postcodes = sdata.postcodes
+ self.lookups = sdata.lookups
+ self.rankings = sdata.rankings
+
+ async def lookup(self, conn: SearchConnection,
+ details: SearchDetails) -> nres.SearchResults:
+ """ Find results for the search in the database.
+ """
+ t = conn.t.postcode
+ pcs = self.postcodes.values
+
+ sql = sa.select(t.c.place_id, t.c.parent_place_id,
+ t.c.rank_search, t.c.rank_address,
+ t.c.postcode, t.c.country_code,
+ t.c.geometry.label('centroid'))\
+ .where(t.c.postcode.in_(pcs))
+
+ if details.geometry_output:
+ sql = _add_geometry_columns(sql, t.c.geometry, details)
+
+ penalty: SaExpression = sa.literal(self.penalty)
+
+ if details.viewbox is not None and not details.bounded_viewbox:
+ penalty += sa.case((t.c.geometry.intersects(VIEWBOX_PARAM), 0.0),
+ (t.c.geometry.intersects(VIEWBOX2_PARAM), 0.5),
+ else_=1.0)
+
+ if details.near is not None:
+ sql = sql.order_by(t.c.geometry.ST_Distance(NEAR_PARAM))
+
+ sql = filter_by_area(sql, t, details)
+
+ if self.countries:
+ sql = sql.where(t.c.country_code.in_(self.countries.values))
+
+ if details.excluded:
+ sql = sql.where(_exclude_places(t))
+
+ if self.lookups:
+ assert len(self.lookups) == 1
+ tsearch = conn.t.search_name
+ sql = sql.where(tsearch.c.place_id == t.c.parent_place_id)\
+ .where((tsearch.c.name_vector + tsearch.c.nameaddress_vector)
+ .contains(sa.type_coerce(self.lookups[0].tokens,
+ IntArray)))
+
+ for ranking in self.rankings:
+ penalty += ranking.sql_penalty(conn.t.search_name)
+ penalty += sa.case(*((t.c.postcode == v, p) for v, p in self.postcodes),
+ else_=1.0)
+
+ sql = sql.add_columns(penalty.label('accuracy'))
+ sql = sql.order_by('accuracy').limit(LIMIT_PARAM)
+
+ results = nres.SearchResults()
+ for row in await conn.execute(sql, _details_to_bind_params(details)):
+ p = conn.t.placex
+ placex_sql = _select_placex(p)\
+ .add_columns(p.c.importance)\
+ .where(sa.text("""class = 'boundary'
+ AND type = 'postal_code'
+ AND osm_type = 'R'"""))\
+ .where(p.c.country_code == row.country_code)\
+ .where(p.c.postcode == row.postcode)\
+ .limit(1)
+
+ if details.geometry_output:
+ placex_sql = _add_geometry_columns(placex_sql, p.c.geometry, details)
+
+ for prow in await conn.execute(placex_sql, _details_to_bind_params(details)):
+ result = nres.create_from_placex_row(prow, nres.SearchResult)
+ if result is not None:
+ result.bbox = Bbox.from_wkb(prow.bbox)
+ break
+ else:
+ result = nres.create_from_postcode_row(row, nres.SearchResult)
+
+ assert result
+ if result.place_id not in details.excluded:
+ result.accuracy = row.accuracy
+ results.append(result)
+
+ return results
+
+
+class PlaceSearch(AbstractSearch):
+ """ Generic search for an address or named place.
+ """
+ SEARCH_PRIO = 1
+
+ def __init__(self, extra_penalty: float, sdata: SearchData, expected_count: int) -> None:
+ super().__init__(sdata.penalty + extra_penalty)
+ self.countries = sdata.countries
+ self.postcodes = sdata.postcodes
+ self.housenumbers = sdata.housenumbers
+ self.qualifiers = sdata.qualifiers
+ self.lookups = sdata.lookups
+ self.rankings = sdata.rankings
+ self.expected_count = expected_count
+
+ def _inner_search_name_cte(self, conn: SearchConnection,
+ details: SearchDetails) -> 'sa.CTE':
+ """ Create a subquery that preselects the rows in the search_name
+ table.
+ """
+ t = conn.t.search_name
+
+ penalty: SaExpression = sa.literal(self.penalty)
+ for ranking in self.rankings:
+ penalty += ranking.sql_penalty(t)
+
+ sql = sa.select(t.c.place_id, t.c.search_rank, t.c.address_rank,
+ t.c.country_code, t.c.centroid,
+ t.c.name_vector, t.c.nameaddress_vector,
+ sa.case((t.c.importance > 0, t.c.importance),
+ else_=0.40001-(sa.cast(t.c.search_rank, sa.Float())/75))
+ .label('importance'),
+ penalty.label('penalty'))
+
+ for lookup in self.lookups:
+ sql = sql.where(lookup.sql_condition(t))
+
+ if self.countries:
+ sql = sql.where(t.c.country_code.in_(self.countries.values))
+
+ if self.postcodes:
+ # if a postcode is given, don't search for state or country level objects
+ sql = sql.where(t.c.address_rank > 9)
+ if self.expected_count > 10000:
+ # Many results expected. Restrict by postcode.
+ tpc = conn.t.postcode
+ sql = sql.where(sa.select(tpc.c.postcode)
+ .where(tpc.c.postcode.in_(self.postcodes.values))
+ .where(t.c.centroid.within_distance(tpc.c.geometry, 0.4))
+ .exists())
+
+ if details.viewbox is not None:
+ if details.bounded_viewbox:
+ sql = sql.where(t.c.centroid
+ .intersects(VIEWBOX_PARAM,
+ use_index=details.viewbox.area < 0.2))
+ elif not self.postcodes and not self.housenumbers and self.expected_count >= 10000:
+ sql = sql.where(t.c.centroid
+ .intersects(VIEWBOX2_PARAM,
+ use_index=details.viewbox.area < 0.5))
+
+ if details.near is not None and details.near_radius is not None:
+ if details.near_radius < 0.1:
+ sql = sql.where(t.c.centroid.within_distance(NEAR_PARAM,
+ NEAR_RADIUS_PARAM))
+ else:
+ sql = sql.where(t.c.centroid
+ .ST_Distance(NEAR_PARAM) < NEAR_RADIUS_PARAM)
+
+ if self.housenumbers:
+ sql = sql.where(t.c.address_rank.between(16, 30))
+ else:
+ if details.excluded:
+ sql = sql.where(_exclude_places(t))
+ if details.min_rank > 0:
+ sql = sql.where(sa.or_(t.c.address_rank >= MIN_RANK_PARAM,
+ t.c.search_rank >= MIN_RANK_PARAM))
+ if details.max_rank < 30:
+ sql = sql.where(sa.or_(t.c.address_rank <= MAX_RANK_PARAM,
+ t.c.search_rank <= MAX_RANK_PARAM))
+
+ inner = sql.limit(10000).order_by(sa.desc(sa.text('importance'))).subquery()
+
+ sql = sa.select(inner.c.place_id, inner.c.search_rank, inner.c.address_rank,
+ inner.c.country_code, inner.c.centroid, inner.c.importance,
+ inner.c.penalty)
+
+ # If the query is not an address search or has a geographic preference,
+ # preselect most important items to restrict the number of places
+ # that need to be looked up in placex.
+ if not self.housenumbers\
+ and (details.viewbox is None or details.bounded_viewbox)\
+ and (details.near is None or details.near_radius is not None)\
+ and not self.qualifiers:
+ sql = sql.add_columns(sa.func.first_value(inner.c.penalty - inner.c.importance)
+ .over(order_by=inner.c.penalty - inner.c.importance)
+ .label('min_penalty'))
+
+ inner = sql.subquery()
+
+ sql = sa.select(inner.c.place_id, inner.c.search_rank, inner.c.address_rank,
+ inner.c.country_code, inner.c.centroid, inner.c.importance,
+ inner.c.penalty)\
+ .where(inner.c.penalty - inner.c.importance < inner.c.min_penalty + 0.5)
+
+ return sql.cte('searches')
+
+ async def lookup(self, conn: SearchConnection,
+ details: SearchDetails) -> nres.SearchResults:
+ """ Find results for the search in the database.
+ """
+ t = conn.t.placex
+ tsearch = self._inner_search_name_cte(conn, details)
+
+ sql = _select_placex(t).join(tsearch, t.c.place_id == tsearch.c.place_id)
+
+ if details.geometry_output:
+ sql = _add_geometry_columns(sql, t.c.geometry, details)
+
+ penalty: SaExpression = tsearch.c.penalty
+
+ if self.postcodes:
+ tpc = conn.t.postcode
+ pcs = self.postcodes.values
+
+ pc_near = sa.select(sa.func.min(tpc.c.geometry.ST_Distance(t.c.centroid)))\
+ .where(tpc.c.postcode.in_(pcs))\
+ .scalar_subquery()
+ penalty += sa.case((t.c.postcode.in_(pcs), 0.0),
+ else_=sa.func.coalesce(pc_near, cast(SaColumn, 2.0)))
+
+ if details.viewbox is not None and not details.bounded_viewbox:
+ penalty += sa.case((t.c.geometry.intersects(VIEWBOX_PARAM, use_index=False), 0.0),
+ (t.c.geometry.intersects(VIEWBOX2_PARAM, use_index=False), 0.5),
+ else_=1.0)
+
+ if details.near is not None:
+ sql = sql.add_columns((-tsearch.c.centroid.ST_Distance(NEAR_PARAM))
+ .label('importance'))
+ sql = sql.order_by(sa.desc(sa.text('importance')))
+ else:
+ sql = sql.order_by(penalty - tsearch.c.importance)
+ sql = sql.add_columns(tsearch.c.importance)
+
+ sql = sql.add_columns(penalty.label('accuracy'))\
+ .order_by(sa.text('accuracy'))
+
+ if self.housenumbers:
+ hnr_list = '|'.join(self.housenumbers.values)
+ inner = sql.where(sa.or_(tsearch.c.address_rank < 30,
+ sa.func.RegexpWord(hnr_list, t.c.housenumber)))\
+ .subquery()
+
+ # Housenumbers from placex
+ thnr = conn.t.placex.alias('hnr')
+ pid_list = sa.func.ArrayAgg(thnr.c.place_id)
+ place_sql = sa.select(pid_list)\
+ .where(thnr.c.parent_place_id == inner.c.place_id)\
+ .where(sa.func.RegexpWord(hnr_list, thnr.c.housenumber))\
+ .where(thnr.c.linked_place_id == None)\
+ .where(thnr.c.indexed_status == 0)
+
+ if details.excluded:
+ place_sql = place_sql.where(thnr.c.place_id.not_in(sa.bindparam('excluded')))
+ if self.qualifiers:
+ place_sql = place_sql.where(self.qualifiers.sql_restrict(thnr))
+
+ numerals = [int(n) for n in self.housenumbers.values
+ if n.isdigit() and len(n) < 8]
+ interpol_sql: SaColumn
+ tiger_sql: SaColumn
+ if numerals and \
+ (not self.qualifiers or ('place', 'house') in self.qualifiers.values):
+ # Housenumbers from interpolations
+ interpol_sql = _make_interpolation_subquery(conn.t.osmline, inner,
+ numerals, details)
+ # Housenumbers from Tiger
+ tiger_sql = sa.case((inner.c.country_code == 'us',
+ _make_interpolation_subquery(conn.t.tiger, inner,
+ numerals, details)
+ ), else_=None)
+ else:
+ interpol_sql = sa.null()
+ tiger_sql = sa.null()
+
+ unsort = sa.select(inner, place_sql.scalar_subquery().label('placex_hnr'),
+ interpol_sql.label('interpol_hnr'),
+ tiger_sql.label('tiger_hnr')).subquery('unsort')
+ sql = sa.select(unsort)\
+ .order_by(sa.case((unsort.c.placex_hnr != None, 1),
+ (unsort.c.interpol_hnr != None, 2),
+ (unsort.c.tiger_hnr != None, 3),
+ else_=4),
+ unsort.c.accuracy)
+ else:
+ sql = sql.where(t.c.linked_place_id == None)\
+ .where(t.c.indexed_status == 0)
+ if self.qualifiers:
+ sql = sql.where(self.qualifiers.sql_restrict(t))
+ if details.layers is not None:
+ sql = sql.where(_filter_by_layer(t, details.layers))
+
+ sql = sql.limit(LIMIT_PARAM)
+
+ results = nres.SearchResults()
+ for row in await conn.execute(sql, _details_to_bind_params(details)):
+ result = nres.create_from_placex_row(row, nres.SearchResult)
+ assert result
+ result.bbox = Bbox.from_wkb(row.bbox)
+ result.accuracy = row.accuracy
+ if self.housenumbers and row.rank_address < 30:
+ if row.placex_hnr:
+ subs = _get_placex_housenumbers(conn, row.placex_hnr, details)
+ elif row.interpol_hnr:
+ subs = _get_osmline(conn, row.interpol_hnr, numerals, details)
+ elif row.tiger_hnr:
+ subs = _get_tiger(conn, row.tiger_hnr, numerals, row.osm_id, details)
+ else:
+ subs = None
+
+ if subs is not None:
+ async for sub in subs:
+ assert sub.housenumber
+ sub.accuracy = result.accuracy
+ if not any(nr in self.housenumbers.values
+ for nr in sub.housenumber.split(';')):
+ sub.accuracy += 0.6
+ results.append(sub)
+
+ # Only add the street as a result, if it meets all other
+ # filter conditions.
+ if (not details.excluded or result.place_id not in details.excluded)\
+ and (not self.qualifiers or result.category in self.qualifiers.values)\
+ and result.rank_address >= details.min_rank:
+ result.accuracy += 1.0 # penalty for missing housenumber
+ results.append(result)
+ else:
+ results.append(result)
+
+ return results
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Public interface to the search code.
+"""
+from typing import List, Any, Optional, Iterator, Tuple, Dict
+import itertools
+import re
+import datetime as dt
+import difflib
+
+from ..connection import SearchConnection
+from ..types import SearchDetails
+from ..results import SearchResult, SearchResults, add_result_details
+from ..logging import log
+from .token_assignment import yield_token_assignments
+from .db_search_builder import SearchBuilder, build_poi_search, wrap_near_search
+from .db_searches import AbstractSearch
+from .query_analyzer_factory import make_query_analyzer, AbstractQueryAnalyzer
+from .query import Phrase, QueryStruct
+
+
+class ForwardGeocoder:
+ """ Main class responsible for place search.
+ """
+
+ def __init__(self, conn: SearchConnection,
+ params: SearchDetails, timeout: Optional[int]) -> None:
+ self.conn = conn
+ self.params = params
+ self.timeout = dt.timedelta(seconds=timeout or 1000000)
+ self.query_analyzer: Optional[AbstractQueryAnalyzer] = None
+
+ @property
+ def limit(self) -> int:
+ """ Return the configured maximum number of search results.
+ """
+ return self.params.max_results
+
+ async def build_searches(self,
+ phrases: List[Phrase]) -> Tuple[QueryStruct, List[AbstractSearch]]:
+ """ Analyse the query and return the tokenized query and list of
+ possible searches over it.
+ """
+ if self.query_analyzer is None:
+ self.query_analyzer = await make_query_analyzer(self.conn)
+
+ query = await self.query_analyzer.analyze_query(phrases)
+
+ searches: List[AbstractSearch] = []
+ if query.num_token_slots() > 0:
+ # 2. Compute all possible search interpretations
+ log().section('Compute abstract searches')
+ search_builder = SearchBuilder(query, self.params)
+ num_searches = 0
+ for assignment in yield_token_assignments(query):
+ searches.extend(search_builder.build(assignment))
+ if num_searches < len(searches):
+ log().table_dump('Searches for assignment',
+ _dump_searches(searches, query, num_searches))
+ num_searches = len(searches)
+ searches.sort(key=lambda s: (s.penalty, s.SEARCH_PRIO))
+
+ return query, searches
+
+ async def execute_searches(self, query: QueryStruct,
+ searches: List[AbstractSearch]) -> SearchResults:
+ """ Run the abstract searches against the database until a result
+ is found.
+ """
+ log().section('Execute database searches')
+ results: Dict[Any, SearchResult] = {}
+
+ end_time = dt.datetime.now() + self.timeout
+
+ min_ranking = searches[0].penalty + 2.0
+ prev_penalty = 0.0
+ for i, search in enumerate(searches):
+ if search.penalty > prev_penalty and (search.penalty > min_ranking or i > 20):
+ break
+ log().table_dump(f"{i + 1}. Search", _dump_searches([search], query))
+ log().var_dump('Params', self.params)
+ lookup_results = await search.lookup(self.conn, self.params)
+ for result in lookup_results:
+ rhash = (result.source_table, result.place_id,
+ result.housenumber, result.country_code)
+ prevresult = results.get(rhash)
+ if prevresult:
+ prevresult.accuracy = min(prevresult.accuracy, result.accuracy)
+ else:
+ results[rhash] = result
+ min_ranking = min(min_ranking, result.accuracy * 1.2, 2.0)
+ log().result_dump('Results', ((r.accuracy, r) for r in lookup_results))
+ prev_penalty = search.penalty
+ if dt.datetime.now() >= end_time:
+ break
+
+ return SearchResults(results.values())
+
+ def pre_filter_results(self, results: SearchResults) -> SearchResults:
+ """ Remove results that are significantly worse than the
+ best match.
+ """
+ if results:
+ max_ranking = min(r.ranking for r in results) + 0.5
+ results = SearchResults(r for r in results if r.ranking < max_ranking)
+
+ return results
+
+ def sort_and_cut_results(self, results: SearchResults) -> SearchResults:
+ """ Remove badly matching results, sort by ranking and
+ limit to the configured number of results.
+ """
+ if results:
+ results.sort(key=lambda r: (r.ranking, 0 if r.bbox is None else -r.bbox.area))
+ min_rank = results[0].rank_search
+ min_ranking = results[0].ranking
+ results = SearchResults(r for r in results
+ if (r.ranking + 0.03 * (r.rank_search - min_rank)
+ < min_ranking + 0.5))
+
+ results = SearchResults(results[:self.limit])
+
+ return results
+
+ def rerank_by_query(self, query: QueryStruct, results: SearchResults) -> None:
+ """ Adjust the accuracy of the localized result according to how well
+ they match the original query.
+ """
+ assert self.query_analyzer is not None
+ qwords = [word for phrase in query.source
+ for word in re.split('[-,: ]+', phrase.text) if word]
+ if not qwords:
+ return
+
+ for result in results:
+ # Negative importance indicates ordering by distance, which is
+ # more important than word matching.
+ if not result.display_name\
+ or (result.importance is not None and result.importance < 0):
+ continue
+ distance = 0.0
+ norm = self.query_analyzer.normalize_text(' '.join((result.display_name,
+ result.country_code or '')))
+ words = set((w for w in re.split('[-,: ]+', norm) if w))
+ if not words:
+ continue
+ for qword in qwords:
+ wdist = max(difflib.SequenceMatcher(a=qword, b=w).quick_ratio() for w in words)
+ if wdist < 0.5:
+ distance += len(qword)
+ else:
+ distance += (1.0 - wdist) * len(qword)
+ # Compensate for the fact that country names do not get a
+ # match penalty yet by the tokenizer.
+ # Temporary hack that needs to be removed!
+ if result.rank_address == 4:
+ distance *= 2
+ result.accuracy += distance * 0.4 / sum(len(w) for w in qwords)
+
+ async def lookup_pois(self, categories: List[Tuple[str, str]],
+ phrases: List[Phrase]) -> SearchResults:
+ """ Look up places by category. If phrase is given, a place search
+ over the phrase will be executed first and places close to the
+ results returned.
+ """
+ log().function('forward_lookup_pois', categories=categories, params=self.params)
+
+ if phrases:
+ query, searches = await self.build_searches(phrases)
+
+ if query:
+ searches = [wrap_near_search(categories, s) for s in searches[:50]]
+ results = await self.execute_searches(query, searches)
+ results = self.pre_filter_results(results)
+ await add_result_details(self.conn, results, self.params)
+ log().result_dump('Preliminary Results', ((r.accuracy, r) for r in results))
+ results = self.sort_and_cut_results(results)
+ else:
+ results = SearchResults()
+ else:
+ search = build_poi_search(categories, self.params.countries)
+ results = await search.lookup(self.conn, self.params)
+ await add_result_details(self.conn, results, self.params)
+
+ log().result_dump('Final Results', ((r.accuracy, r) for r in results))
+
+ return results
+
+ async def lookup(self, phrases: List[Phrase]) -> SearchResults:
+ """ Look up a single free-text query.
+ """
+ log().function('forward_lookup', phrases=phrases, params=self.params)
+ results = SearchResults()
+
+ if self.params.is_impossible():
+ return results
+
+ query, searches = await self.build_searches(phrases)
+
+ if searches:
+ # Execute SQL until an appropriate result is found.
+ results = await self.execute_searches(query, searches[:50])
+ results = self.pre_filter_results(results)
+ await add_result_details(self.conn, results, self.params)
+ log().result_dump('Preliminary Results', ((r.accuracy, r) for r in results))
+ self.rerank_by_query(query, results)
+ log().result_dump('Results after reranking', ((r.accuracy, r) for r in results))
+ results = self.sort_and_cut_results(results)
+ log().result_dump('Final Results', ((r.accuracy, r) for r in results))
+
+ return results
+
+
+def _dump_searches(searches: List[AbstractSearch], query: QueryStruct,
+ start: int = 0) -> Iterator[Optional[List[Any]]]:
+ yield ['Penalty', 'Lookups', 'Housenr', 'Postcode', 'Countries',
+ 'Qualifier', 'Catgeory', 'Rankings']
+
+ def tk(tl: List[int]) -> str:
+ tstr = [f"{query.find_lookup_word_by_id(t)}({t})" for t in tl]
+
+ return f"[{','.join(tstr)}]"
+
+ def fmt_ranking(f: Any) -> str:
+ if not f:
+ return ''
+ ranks = ','.join((f"{tk(r.tokens)}^{r.penalty:.3g}" for r in f.rankings))
+ if len(ranks) > 100:
+ ranks = ranks[:100] + '...'
+ return f"{f.column}({ranks},def={f.default:.3g})"
+
+ def fmt_lookup(lk: Any) -> str:
+ if not lk:
+ return ''
+
+ return f"{lk.lookup_type}({lk.column}{tk(lk.tokens)})"
+
+ def fmt_cstr(c: Any) -> str:
+ if not c:
+ return ''
+
+ return f'{c[0]}^{c[1]}'
+
+ for search in searches[start:]:
+ fields = ('lookups', 'rankings', 'countries', 'housenumbers',
+ 'postcodes', 'qualifiers')
+ if hasattr(search, 'search'):
+ iters = itertools.zip_longest([f"{search.penalty:.3g}"],
+ *(getattr(search.search, attr, []) for attr in fields),
+ getattr(search, 'categories', []),
+ fillvalue='')
+ else:
+ iters = itertools.zip_longest([f"{search.penalty:.3g}"],
+ *(getattr(search, attr, []) for attr in fields),
+ [],
+ fillvalue='')
+ for penalty, lookup, rank, cc, hnr, pc, qual, cat in iters:
+ yield [penalty, fmt_lookup(lookup), fmt_cstr(hnr),
+ fmt_cstr(pc), fmt_cstr(cc), fmt_cstr(qual), fmt_cstr(cat), fmt_ranking(rank)]
+ yield None
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of query analysis for the ICU tokenizer.
+"""
+from typing import Tuple, Dict, List, Optional, Iterator, Any, cast
+from collections import defaultdict
+import dataclasses
+import difflib
+import re
+from itertools import zip_longest
+
+from icu import Transliterator
+
+import sqlalchemy as sa
+
+from ..errors import UsageError
+from ..typing import SaRow
+from ..sql.sqlalchemy_types import Json
+from ..connection import SearchConnection
+from ..logging import log
+from . import query as qmod
+from ..query_preprocessing.config import QueryConfig
+from .query_analyzer_factory import AbstractQueryAnalyzer
+
+
+DB_TO_TOKEN_TYPE = {
+ 'W': qmod.TokenType.WORD,
+ 'w': qmod.TokenType.PARTIAL,
+ 'H': qmod.TokenType.HOUSENUMBER,
+ 'P': qmod.TokenType.POSTCODE,
+ 'C': qmod.TokenType.COUNTRY
+}
+
+PENALTY_IN_TOKEN_BREAK = {
+ qmod.BreakType.START: 0.5,
+ qmod.BreakType.END: 0.5,
+ qmod.BreakType.PHRASE: 0.5,
+ qmod.BreakType.SOFT_PHRASE: 0.5,
+ qmod.BreakType.WORD: 0.1,
+ qmod.BreakType.PART: 0.0,
+ qmod.BreakType.TOKEN: 0.0
+}
+
+
+@dataclasses.dataclass
+class QueryPart:
+ """ Normalized and transliterated form of a single term in the query.
+ When the term came out of a split during the transliteration,
+ the normalized string is the full word before transliteration.
+ The word number keeps track of the word before transliteration
+ and can be used to identify partial transliterated terms.
+ Penalty is the break penalty for the break following the token.
+ """
+ token: str
+ normalized: str
+ word_number: int
+ penalty: float
+
+
+QueryParts = List[QueryPart]
+WordDict = Dict[str, List[qmod.TokenRange]]
+
+
+def yield_words(terms: List[QueryPart], start: int) -> Iterator[Tuple[str, qmod.TokenRange]]:
+ """ Return all combinations of words in the terms list after the
+ given position.
+ """
+ total = len(terms)
+ for first in range(start, total):
+ word = terms[first].token
+ penalty = PENALTY_IN_TOKEN_BREAK[qmod.BreakType.WORD]
+ yield word, qmod.TokenRange(first, first + 1, penalty=penalty)
+ for last in range(first + 1, min(first + 20, total)):
+ word = ' '.join((word, terms[last].token))
+ penalty += terms[last - 1].penalty
+ yield word, qmod.TokenRange(first, last + 1, penalty=penalty)
+
+
+@dataclasses.dataclass
+class ICUToken(qmod.Token):
+ """ Specialised token for ICU tokenizer.
+ """
+ word_token: str
+ info: Optional[Dict[str, Any]]
+
+ def get_category(self) -> Tuple[str, str]:
+ assert self.info
+ return self.info.get('class', ''), self.info.get('type', '')
+
+ def rematch(self, norm: str) -> None:
+ """ Check how well the token matches the given normalized string
+ and add a penalty, if necessary.
+ """
+ if not self.lookup_word:
+ return
+
+ seq = difflib.SequenceMatcher(a=self.lookup_word, b=norm)
+ distance = 0
+ for tag, afrom, ato, bfrom, bto in seq.get_opcodes():
+ if tag in ('delete', 'insert') and (afrom == 0 or ato == len(self.lookup_word)):
+ distance += 1
+ elif tag == 'replace':
+ distance += max((ato-afrom), (bto-bfrom))
+ elif tag != 'equal':
+ distance += abs((ato-afrom) - (bto-bfrom))
+ self.penalty += (distance/len(self.lookup_word))
+
+ @staticmethod
+ def from_db_row(row: SaRow, base_penalty: float = 0.0) -> 'ICUToken':
+ """ Create a ICUToken from the row of the word table.
+ """
+ count = 1 if row.info is None else row.info.get('count', 1)
+ addr_count = 1 if row.info is None else row.info.get('addr_count', 1)
+
+ penalty = base_penalty
+ if row.type == 'w':
+ penalty += 0.3
+ elif row.type == 'W':
+ if len(row.word_token) == 1 and row.word_token == row.word:
+ penalty += 0.2 if row.word.isdigit() else 0.3
+ elif row.type == 'H':
+ penalty += sum(0.1 for c in row.word_token if c != ' ' and not c.isdigit())
+ if all(not c.isdigit() for c in row.word_token):
+ penalty += 0.2 * (len(row.word_token) - 1)
+ elif row.type == 'C':
+ if len(row.word_token) == 1:
+ penalty += 0.3
+
+ if row.info is None:
+ lookup_word = row.word
+ else:
+ lookup_word = row.info.get('lookup', row.word)
+ if lookup_word:
+ lookup_word = lookup_word.split('@', 1)[0]
+ else:
+ lookup_word = row.word_token
+
+ return ICUToken(penalty=penalty, token=row.word_id, count=max(1, count),
+ lookup_word=lookup_word,
+ word_token=row.word_token, info=row.info,
+ addr_count=max(1, addr_count))
+
+
+class ICUQueryAnalyzer(AbstractQueryAnalyzer):
+ """ Converter for query strings into a tokenized query
+ using the tokens created by a ICU tokenizer.
+ """
+ def __init__(self, conn: SearchConnection) -> None:
+ self.conn = conn
+
+ async def setup(self) -> None:
+ """ Set up static data structures needed for the analysis.
+ """
+ async def _make_normalizer() -> Any:
+ rules = await self.conn.get_property('tokenizer_import_normalisation')
+ return Transliterator.createFromRules("normalization", rules)
+
+ self.normalizer = await self.conn.get_cached_value('ICUTOK', 'normalizer',
+ _make_normalizer)
+
+ async def _make_transliterator() -> Any:
+ rules = await self.conn.get_property('tokenizer_import_transliteration')
+ return Transliterator.createFromRules("transliteration", rules)
+
+ self.transliterator = await self.conn.get_cached_value('ICUTOK', 'transliterator',
+ _make_transliterator)
+
+ await self._setup_preprocessing()
+
+ if 'word' not in self.conn.t.meta.tables:
+ sa.Table('word', self.conn.t.meta,
+ sa.Column('word_id', sa.Integer),
+ sa.Column('word_token', sa.Text, nullable=False),
+ sa.Column('type', sa.Text, nullable=False),
+ sa.Column('word', sa.Text),
+ sa.Column('info', Json))
+
+ async def _setup_preprocessing(self) -> None:
+ """ Load the rules for preprocessing and set up the handlers.
+ """
+
+ rules = self.conn.config.load_sub_configuration('icu_tokenizer.yaml',
+ config='TOKENIZER_CONFIG')
+ preprocessing_rules = rules.get('query-preprocessing', [])
+
+ self.preprocessors = []
+
+ for func in preprocessing_rules:
+ if 'step' not in func:
+ raise UsageError("Preprocessing rule is missing the 'step' attribute.")
+ if not isinstance(func['step'], str):
+ raise UsageError("'step' attribute must be a simple string.")
+
+ module = self.conn.config.load_plugin_module(
+ func['step'], 'nominatim_api.query_preprocessing')
+ self.preprocessors.append(
+ module.create(QueryConfig(func).set_normalizer(self.normalizer)))
+
+ async def analyze_query(self, phrases: List[qmod.Phrase]) -> qmod.QueryStruct:
+ """ Analyze the given list of phrases and return the
+ tokenized query.
+ """
+ log().section('Analyze query (using ICU tokenizer)')
+ for func in self.preprocessors:
+ phrases = func(phrases)
+ query = qmod.QueryStruct(phrases)
+
+ log().var_dump('Normalized query', query.source)
+ if not query.source:
+ return query
+
+ parts, words = self.split_query(query)
+ log().var_dump('Transliterated query', lambda: _dump_transliterated(query, parts))
+
+ for row in await self.lookup_in_db(list(words.keys())):
+ for trange in words[row.word_token]:
+ token = ICUToken.from_db_row(row, trange.penalty or 0.0)
+ if row.type == 'S':
+ if row.info['op'] in ('in', 'near'):
+ if trange.start == 0:
+ query.add_token(trange, qmod.TokenType.NEAR_ITEM, token)
+ else:
+ if trange.start == 0 and trange.end == query.num_token_slots():
+ query.add_token(trange, qmod.TokenType.NEAR_ITEM, token)
+ else:
+ query.add_token(trange, qmod.TokenType.QUALIFIER, token)
+ else:
+ query.add_token(trange, DB_TO_TOKEN_TYPE[row.type], token)
+
+ self.add_extra_tokens(query, parts)
+ self.rerank_tokens(query, parts)
+
+ log().table_dump('Word tokens', _dump_word_tokens(query))
+
+ return query
+
+ def normalize_text(self, text: str) -> str:
+ """ Bring the given text into a normalized form. That is the
+ standardized form search will work with. All information removed
+ at this stage is inevitably lost.
+ """
+ return cast(str, self.normalizer.transliterate(text))
+
+ def split_query(self, query: qmod.QueryStruct) -> Tuple[QueryParts, WordDict]:
+ """ Transliterate the phrases and split them into tokens.
+
+ Returns the list of transliterated tokens together with their
+ normalized form and a dictionary of words for lookup together
+ with their position.
+ """
+ parts: QueryParts = []
+ phrase_start = 0
+ words = defaultdict(list)
+ wordnr = 0
+ for phrase in query.source:
+ query.nodes[-1].ptype = phrase.ptype
+ phrase_split = re.split('([ :-])', phrase.text)
+ # The zip construct will give us the pairs of word/break from
+ # the regular expression split. As the split array ends on the
+ # final word, we simply use the fillvalue to even out the list and
+ # add the phrase break at the end.
+ for word, breakchar in zip_longest(*[iter(phrase_split)]*2, fillvalue=','):
+ if not word:
+ continue
+ trans = self.transliterator.transliterate(word)
+ if trans:
+ for term in trans.split(' '):
+ if term:
+ parts.append(QueryPart(term, word, wordnr,
+ PENALTY_IN_TOKEN_BREAK[qmod.BreakType.TOKEN]))
+ query.add_node(qmod.BreakType.TOKEN, phrase.ptype)
+ query.nodes[-1].btype = qmod.BreakType(breakchar)
+ parts[-1].penalty = PENALTY_IN_TOKEN_BREAK[qmod.BreakType(breakchar)]
+ wordnr += 1
+
+ for word, wrange in yield_words(parts, phrase_start):
+ words[word].append(wrange)
+
+ phrase_start = len(parts)
+ query.nodes[-1].btype = qmod.BreakType.END
+
+ return parts, words
+
+ async def lookup_in_db(self, words: List[str]) -> 'sa.Result[Any]':
+ """ Return the token information from the database for the
+ given word tokens.
+ """
+ t = self.conn.t.meta.tables['word']
+ return await self.conn.execute(t.select().where(t.c.word_token.in_(words)))
+
+ def add_extra_tokens(self, query: qmod.QueryStruct, parts: QueryParts) -> None:
+ """ Add tokens to query that are not saved in the database.
+ """
+ for part, node, i in zip(parts, query.nodes, range(1000)):
+ if len(part.token) <= 4 and part.token.isdigit()\
+ and not node.has_tokens(i+1, qmod.TokenType.HOUSENUMBER):
+ query.add_token(qmod.TokenRange(i, i+1), qmod.TokenType.HOUSENUMBER,
+ ICUToken(penalty=0.5, token=0,
+ count=1, addr_count=1, lookup_word=part.token,
+ word_token=part.token, info=None))
+
+ def rerank_tokens(self, query: qmod.QueryStruct, parts: QueryParts) -> None:
+ """ Add penalties to tokens that depend on presence of other token.
+ """
+ for i, node, tlist in query.iter_token_lists():
+ if tlist.ttype == qmod.TokenType.POSTCODE:
+ for repl in node.starting:
+ if repl.end == tlist.end and repl.ttype != qmod.TokenType.POSTCODE \
+ and (repl.ttype != qmod.TokenType.HOUSENUMBER
+ or len(tlist.tokens[0].lookup_word) > 4):
+ repl.add_penalty(0.39)
+ elif (tlist.ttype == qmod.TokenType.HOUSENUMBER
+ and len(tlist.tokens[0].lookup_word) <= 3):
+ if any(c.isdigit() for c in tlist.tokens[0].lookup_word):
+ for repl in node.starting:
+ if repl.end == tlist.end and repl.ttype != qmod.TokenType.HOUSENUMBER:
+ repl.add_penalty(0.5 - tlist.tokens[0].penalty)
+ elif tlist.ttype not in (qmod.TokenType.COUNTRY, qmod.TokenType.PARTIAL):
+ norm = parts[i].normalized
+ for j in range(i + 1, tlist.end):
+ if parts[j - 1].word_number != parts[j].word_number:
+ norm += ' ' + parts[j].normalized
+ for token in tlist.tokens:
+ cast(ICUToken, token).rematch(norm)
+
+
+def _dump_transliterated(query: qmod.QueryStruct, parts: QueryParts) -> str:
+ out = query.nodes[0].btype.value
+ for node, part in zip(query.nodes[1:], parts):
+ out += part.token + node.btype.value
+ return out
+
+
+def _dump_word_tokens(query: qmod.QueryStruct) -> Iterator[List[Any]]:
+ yield ['type', 'token', 'word_token', 'lookup_word', 'penalty', 'count', 'info']
+ for node in query.nodes:
+ for tlist in node.starting:
+ for token in tlist.tokens:
+ t = cast(ICUToken, token)
+ yield [tlist.ttype.name, t.token, t.word_token or '',
+ t.lookup_word or '', t.penalty, t.count, t.info]
+
+
+async def create_query_analyzer(conn: SearchConnection) -> AbstractQueryAnalyzer:
+ """ Create and set up a new query analyzer for a database based
+ on the ICU tokenizer.
+ """
+ out = ICUQueryAnalyzer(conn)
+ await out.setup()
+
+ return out
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Datastructures for a tokenized query.
+"""
+from typing import List, Tuple, Optional, Iterator
+from abc import ABC, abstractmethod
+import dataclasses
+import enum
+
+
+class BreakType(enum.Enum):
+ """ Type of break between tokens.
+ """
+ START = '<'
+ """ Begin of the query. """
+ END = '>'
+ """ End of the query. """
+ PHRASE = ','
+ """ Hard break between two phrases. Address parts cannot cross hard
+ phrase boundaries."""
+ SOFT_PHRASE = ':'
+ """ Likely break between two phrases. Address parts should not cross soft
+ phrase boundaries. Soft breaks can be inserted by a preprocessor
+ that is analysing the input string.
+ """
+ WORD = ' '
+ """ Break between words. """
+ PART = '-'
+ """ Break inside a word, for example a hyphen or apostrophe. """
+ TOKEN = '`'
+ """ Break created as a result of tokenization.
+ This may happen in languages without spaces between words.
+ """
+
+
+class TokenType(enum.Enum):
+ """ Type of token.
+ """
+ WORD = enum.auto()
+ """ Full name of a place. """
+ PARTIAL = enum.auto()
+ """ Word term without breaks, does not necessarily represent a full name. """
+ HOUSENUMBER = enum.auto()
+ """ Housenumber term. """
+ POSTCODE = enum.auto()
+ """ Postal code term. """
+ COUNTRY = enum.auto()
+ """ Country name or reference. """
+ QUALIFIER = enum.auto()
+ """ Special term used together with name (e.g. _Hotel_ Bellevue). """
+ NEAR_ITEM = enum.auto()
+ """ Special term used as searchable object(e.g. supermarket in ...). """
+
+
+class PhraseType(enum.Enum):
+ """ Designation of a phrase.
+ """
+ NONE = 0
+ """ No specific designation (i.e. source is free-form query). """
+ AMENITY = enum.auto()
+ """ Contains name or type of a POI. """
+ STREET = enum.auto()
+ """ Contains a street name optionally with a housenumber. """
+ CITY = enum.auto()
+ """ Contains the postal city. """
+ COUNTY = enum.auto()
+ """ Contains the equivalent of a county. """
+ STATE = enum.auto()
+ """ Contains a state or province. """
+ POSTCODE = enum.auto()
+ """ Contains a postal code. """
+ COUNTRY = enum.auto()
+ """ Contains the country name or code. """
+
+ def compatible_with(self, ttype: TokenType,
+ is_full_phrase: bool) -> bool:
+ """ Check if the given token type can be used with the phrase type.
+ """
+ if self == PhraseType.NONE:
+ return not is_full_phrase or ttype != TokenType.QUALIFIER
+ if self == PhraseType.AMENITY:
+ return ttype in (TokenType.WORD, TokenType.PARTIAL)\
+ or (is_full_phrase and ttype == TokenType.NEAR_ITEM)\
+ or (not is_full_phrase and ttype == TokenType.QUALIFIER)
+ if self == PhraseType.STREET:
+ return ttype in (TokenType.WORD, TokenType.PARTIAL, TokenType.HOUSENUMBER)
+ if self == PhraseType.POSTCODE:
+ return ttype == TokenType.POSTCODE
+ if self == PhraseType.COUNTRY:
+ return ttype == TokenType.COUNTRY
+
+ return ttype in (TokenType.WORD, TokenType.PARTIAL)
+
+
+@dataclasses.dataclass
+class Token(ABC):
+ """ Base type for tokens.
+ Specific query analyzers must implement the concrete token class.
+ """
+
+ penalty: float
+ token: int
+ count: int
+ addr_count: int
+ lookup_word: str
+
+ @abstractmethod
+ def get_category(self) -> Tuple[str, str]:
+ """ Return the category restriction for qualifier terms and
+ category objects.
+ """
+
+
+@dataclasses.dataclass
+class TokenRange:
+ """ Indexes of query nodes over which a token spans.
+ """
+ start: int
+ end: int
+ penalty: Optional[float] = None
+
+ def __lt__(self, other: 'TokenRange') -> bool:
+ return self.end <= other.start
+
+ def __le__(self, other: 'TokenRange') -> bool:
+ return NotImplemented
+
+ def __gt__(self, other: 'TokenRange') -> bool:
+ return self.start >= other.end
+
+ def __ge__(self, other: 'TokenRange') -> bool:
+ return NotImplemented
+
+ def replace_start(self, new_start: int) -> 'TokenRange':
+ """ Return a new token range with the new start.
+ """
+ return TokenRange(new_start, self.end)
+
+ def replace_end(self, new_end: int) -> 'TokenRange':
+ """ Return a new token range with the new end.
+ """
+ return TokenRange(self.start, new_end)
+
+ def split(self, index: int) -> Tuple['TokenRange', 'TokenRange']:
+ """ Split the span into two spans at the given index.
+ The index must be within the span.
+ """
+ return self.replace_end(index), self.replace_start(index)
+
+
+@dataclasses.dataclass
+class TokenList:
+ """ List of all tokens of a given type going from one breakpoint to another.
+ """
+ end: int
+ ttype: TokenType
+ tokens: List[Token]
+
+ def add_penalty(self, penalty: float) -> None:
+ """ Add the given penalty to all tokens in the list.
+ """
+ for token in self.tokens:
+ token.penalty += penalty
+
+
+@dataclasses.dataclass
+class QueryNode:
+ """ A node of the query representing a break between terms.
+ """
+ btype: BreakType
+ ptype: PhraseType
+ starting: List[TokenList] = dataclasses.field(default_factory=list)
+
+ def has_tokens(self, end: int, *ttypes: TokenType) -> bool:
+ """ Check if there are tokens of the given types ending at the
+ given node.
+ """
+ return any(tl.end == end and tl.ttype in ttypes for tl in self.starting)
+
+ def get_tokens(self, end: int, ttype: TokenType) -> Optional[List[Token]]:
+ """ Get the list of tokens of the given type starting at this node
+ and ending at the node 'end'. Returns 'None' if no such
+ tokens exist.
+ """
+ for tlist in self.starting:
+ if tlist.end == end and tlist.ttype == ttype:
+ return tlist.tokens
+ return None
+
+
+@dataclasses.dataclass
+class Phrase:
+ """ A normalized query part. Phrases may be typed which means that
+ they then represent a specific part of the address.
+ """
+ ptype: PhraseType
+ text: str
+
+
+class QueryStruct:
+ """ A tokenized search query together with the normalized source
+ from which the tokens have been parsed.
+
+ The query contains a list of nodes that represent the breaks
+ between words. Tokens span between nodes, which don't necessarily
+ need to be direct neighbours. Thus the query is represented as a
+ directed acyclic graph.
+
+ When created, a query contains a single node: the start of the
+ query. Further nodes can be added by appending to 'nodes'.
+ """
+
+ def __init__(self, source: List[Phrase]) -> None:
+ self.source = source
+ self.nodes: List[QueryNode] = \
+ [QueryNode(BreakType.START, source[0].ptype if source else PhraseType.NONE)]
+
+ def num_token_slots(self) -> int:
+ """ Return the length of the query in vertice steps.
+ """
+ return len(self.nodes) - 1
+
+ def add_node(self, btype: BreakType, ptype: PhraseType) -> None:
+ """ Append a new break node with the given break type.
+ The phrase type denotes the type for any tokens starting
+ at the node.
+ """
+ self.nodes.append(QueryNode(btype, ptype))
+
+ def add_token(self, trange: TokenRange, ttype: TokenType, token: Token) -> None:
+ """ Add a token to the query. 'start' and 'end' are the indexes of the
+ nodes from which to which the token spans. The indexes must exist
+ and are expected to be in the same phrase.
+ 'ttype' denotes the type of the token and 'token' the token to
+ be inserted.
+
+ If the token type is not compatible with the phrase it should
+ be added to, then the token is silently dropped.
+ """
+ snode = self.nodes[trange.start]
+ full_phrase = snode.btype in (BreakType.START, BreakType.PHRASE)\
+ and self.nodes[trange.end].btype in (BreakType.PHRASE, BreakType.END)
+ if snode.ptype.compatible_with(ttype, full_phrase):
+ tlist = snode.get_tokens(trange.end, ttype)
+ if tlist is None:
+ snode.starting.append(TokenList(trange.end, ttype, [token]))
+ else:
+ tlist.append(token)
+
+ def get_tokens(self, trange: TokenRange, ttype: TokenType) -> List[Token]:
+ """ Get the list of tokens of a given type, spanning the given
+ nodes. The nodes must exist. If no tokens exist, an
+ empty list is returned.
+ """
+ return self.nodes[trange.start].get_tokens(trange.end, ttype) or []
+
+ def get_partials_list(self, trange: TokenRange) -> List[Token]:
+ """ Create a list of partial tokens between the given nodes.
+ The list is composed of the first token of type PARTIAL
+ going to the subsequent node. Such PARTIAL tokens are
+ assumed to exist.
+ """
+ return [next(iter(self.get_tokens(TokenRange(i, i+1), TokenType.PARTIAL)))
+ for i in range(trange.start, trange.end)]
+
+ def iter_token_lists(self) -> Iterator[Tuple[int, QueryNode, TokenList]]:
+ """ Iterator over all token lists in the query.
+ """
+ for i, node in enumerate(self.nodes):
+ for tlist in node.starting:
+ yield i, node, tlist
+
+ def find_lookup_word_by_id(self, token: int) -> str:
+ """ Find the first token with the given token ID and return
+ its lookup word. Returns 'None' if no such token exists.
+ The function is very slow and must only be used for
+ debugging.
+ """
+ for node in self.nodes:
+ for tlist in node.starting:
+ for t in tlist.tokens:
+ if t.token == token:
+ return f"[{tlist.ttype.name[0]}]{t.lookup_word}"
+ return 'None'
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Factory for creating a query analyzer for the configured tokenizer.
+"""
+from typing import List, cast, TYPE_CHECKING
+from abc import ABC, abstractmethod
+from pathlib import Path
+import importlib
+
+from ..logging import log
+from ..connection import SearchConnection
+
+if TYPE_CHECKING:
+ from .query import Phrase, QueryStruct
+
+
+class AbstractQueryAnalyzer(ABC):
+ """ Class for analysing incoming queries.
+
+ Query analyzers are tied to the tokenizer used on import.
+ """
+
+ @abstractmethod
+ async def analyze_query(self, phrases: List['Phrase']) -> 'QueryStruct':
+ """ Analyze the given phrases and return the tokenized query.
+ """
+
+ @abstractmethod
+ def normalize_text(self, text: str) -> str:
+ """ Bring the given text into a normalized form. That is the
+ standardized form search will work with. All information removed
+ at this stage is inevitably lost.
+ """
+
+
+async def make_query_analyzer(conn: SearchConnection) -> AbstractQueryAnalyzer:
+ """ Create a query analyzer for the tokenizer used by the database.
+ """
+ name = await conn.get_property('tokenizer')
+
+ src_file = Path(__file__).parent / f'{name}_tokenizer.py'
+ if not src_file.is_file():
+ log().comment(f"No tokenizer named '{name}' available. Database not set up properly.")
+ raise RuntimeError('Tokenizer not found')
+
+ module = importlib.import_module(f'nominatim_api.search.{name}_tokenizer')
+
+ return cast(AbstractQueryAnalyzer, await module.create_query_analyzer(conn))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Create query interpretations where each vertice in the query is assigned
+a specific function (expressed as a token type).
+"""
+from typing import Optional, List, Iterator
+import dataclasses
+
+from ..logging import log
+from . import query as qmod
+
+
+@dataclasses.dataclass
+class TypedRange:
+ """ A token range for a specific type of tokens.
+ """
+ ttype: qmod.TokenType
+ trange: qmod.TokenRange
+
+
+PENALTY_TOKENCHANGE = {
+ qmod.BreakType.START: 0.0,
+ qmod.BreakType.END: 0.0,
+ qmod.BreakType.PHRASE: 0.0,
+ qmod.BreakType.SOFT_PHRASE: 0.0,
+ qmod.BreakType.WORD: 0.1,
+ qmod.BreakType.PART: 0.2,
+ qmod.BreakType.TOKEN: 0.4
+}
+
+TypedRangeSeq = List[TypedRange]
+
+
+@dataclasses.dataclass
+class TokenAssignment:
+ """ Representation of a possible assignment of token types
+ to the tokens in a tokenized query.
+ """
+ penalty: float = 0.0
+ name: Optional[qmod.TokenRange] = None
+ address: List[qmod.TokenRange] = dataclasses.field(default_factory=list)
+ housenumber: Optional[qmod.TokenRange] = None
+ postcode: Optional[qmod.TokenRange] = None
+ country: Optional[qmod.TokenRange] = None
+ near_item: Optional[qmod.TokenRange] = None
+ qualifier: Optional[qmod.TokenRange] = None
+
+ @staticmethod
+ def from_ranges(ranges: TypedRangeSeq) -> 'TokenAssignment':
+ """ Create a new token assignment from a sequence of typed spans.
+ """
+ out = TokenAssignment()
+ for token in ranges:
+ if token.ttype == qmod.TokenType.PARTIAL:
+ out.address.append(token.trange)
+ elif token.ttype == qmod.TokenType.HOUSENUMBER:
+ out.housenumber = token.trange
+ elif token.ttype == qmod.TokenType.POSTCODE:
+ out.postcode = token.trange
+ elif token.ttype == qmod.TokenType.COUNTRY:
+ out.country = token.trange
+ elif token.ttype == qmod.TokenType.NEAR_ITEM:
+ out.near_item = token.trange
+ elif token.ttype == qmod.TokenType.QUALIFIER:
+ out.qualifier = token.trange
+ return out
+
+
+class _TokenSequence:
+ """ Working state used to put together the token assignments.
+
+ Represents an intermediate state while traversing the tokenized
+ query.
+ """
+ def __init__(self, seq: TypedRangeSeq,
+ direction: int = 0, penalty: float = 0.0) -> None:
+ self.seq = seq
+ self.direction = direction
+ self.penalty = penalty
+
+ def __str__(self) -> str:
+ seq = ''.join(f'[{r.trange.start} - {r.trange.end}: {r.ttype.name}]' for r in self.seq)
+ return f'{seq} (dir: {self.direction}, penalty: {self.penalty})'
+
+ @property
+ def end_pos(self) -> int:
+ """ Return the index of the global end of the current sequence.
+ """
+ return self.seq[-1].trange.end if self.seq else 0
+
+ def has_types(self, *ttypes: qmod.TokenType) -> bool:
+ """ Check if the current sequence contains any typed ranges of
+ the given types.
+ """
+ return any(s.ttype in ttypes for s in self.seq)
+
+ def is_final(self) -> bool:
+ """ Return true when the sequence cannot be extended by any
+ form of token anymore.
+ """
+ # Country and category must be the final term for left-to-right
+ return len(self.seq) > 1 and \
+ self.seq[-1].ttype in (qmod.TokenType.COUNTRY, qmod.TokenType.NEAR_ITEM)
+
+ def appendable(self, ttype: qmod.TokenType) -> Optional[int]:
+ """ Check if the give token type is appendable to the existing sequence.
+
+ Returns None if the token type is not appendable, otherwise the
+ new direction of the sequence after adding such a type. The
+ token is not added.
+ """
+ if ttype == qmod.TokenType.WORD:
+ return None
+
+ if not self.seq:
+ # Append unconditionally to the empty list
+ if ttype == qmod.TokenType.COUNTRY:
+ return -1
+ if ttype in (qmod.TokenType.HOUSENUMBER, qmod.TokenType.QUALIFIER):
+ return 1
+ return self.direction
+
+ # Name tokens are always acceptable and don't change direction
+ if ttype == qmod.TokenType.PARTIAL:
+ # qualifiers cannot appear in the middle of the query. They need
+ # to be near the next phrase.
+ if self.direction == -1 \
+ and any(t.ttype == qmod.TokenType.QUALIFIER for t in self.seq[:-1]):
+ return None
+ return self.direction
+
+ # Other tokens may only appear once
+ if self.has_types(ttype):
+ return None
+
+ if ttype == qmod.TokenType.HOUSENUMBER:
+ if self.direction == 1:
+ if len(self.seq) == 1 and self.seq[0].ttype == qmod.TokenType.QUALIFIER:
+ return None
+ if len(self.seq) > 2 \
+ or self.has_types(qmod.TokenType.POSTCODE, qmod.TokenType.COUNTRY):
+ return None # direction left-to-right: housenumber must come before anything
+ elif (self.direction == -1
+ or self.has_types(qmod.TokenType.POSTCODE, qmod.TokenType.COUNTRY)):
+ return -1 # force direction right-to-left if after other terms
+
+ return self.direction
+
+ if ttype == qmod.TokenType.POSTCODE:
+ if self.direction == -1:
+ if self.has_types(qmod.TokenType.HOUSENUMBER, qmod.TokenType.QUALIFIER):
+ return None
+ return -1
+ if self.direction == 1:
+ return None if self.has_types(qmod.TokenType.COUNTRY) else 1
+ if self.has_types(qmod.TokenType.HOUSENUMBER, qmod.TokenType.QUALIFIER):
+ return 1
+ return self.direction
+
+ if ttype == qmod.TokenType.COUNTRY:
+ return None if self.direction == -1 else 1
+
+ if ttype == qmod.TokenType.NEAR_ITEM:
+ return self.direction
+
+ if ttype == qmod.TokenType.QUALIFIER:
+ if self.direction == 1:
+ if (len(self.seq) == 1
+ and self.seq[0].ttype in (qmod.TokenType.PARTIAL, qmod.TokenType.NEAR_ITEM)) \
+ or (len(self.seq) == 2
+ and self.seq[0].ttype == qmod.TokenType.NEAR_ITEM
+ and self.seq[1].ttype == qmod.TokenType.PARTIAL):
+ return 1
+ return None
+ if self.direction == -1:
+ return -1
+
+ tempseq = self.seq[1:] if self.seq[0].ttype == qmod.TokenType.NEAR_ITEM else self.seq
+ if len(tempseq) == 0:
+ return 1
+ if len(tempseq) == 1 and self.seq[0].ttype == qmod.TokenType.HOUSENUMBER:
+ return None
+ if len(tempseq) > 1 or self.has_types(qmod.TokenType.POSTCODE, qmod.TokenType.COUNTRY):
+ return -1
+ return 0
+
+ return None
+
+ def advance(self, ttype: qmod.TokenType, end_pos: int,
+ btype: qmod.BreakType) -> Optional['_TokenSequence']:
+ """ Return a new token sequence state with the given token type
+ extended.
+ """
+ newdir = self.appendable(ttype)
+ if newdir is None:
+ return None
+
+ if not self.seq:
+ newseq = [TypedRange(ttype, qmod.TokenRange(0, end_pos))]
+ new_penalty = 0.0
+ else:
+ last = self.seq[-1]
+ if btype != qmod.BreakType.PHRASE and last.ttype == ttype:
+ # extend the existing range
+ newseq = self.seq[:-1] + [TypedRange(ttype, last.trange.replace_end(end_pos))]
+ new_penalty = 0.0
+ else:
+ # start a new range
+ newseq = list(self.seq) + [TypedRange(ttype,
+ qmod.TokenRange(last.trange.end, end_pos))]
+ new_penalty = PENALTY_TOKENCHANGE[btype]
+
+ return _TokenSequence(newseq, newdir, self.penalty + new_penalty)
+
+ def _adapt_penalty_from_priors(self, priors: int, new_dir: int) -> bool:
+ if priors >= 2:
+ if self.direction == 0:
+ self.direction = new_dir
+ else:
+ if priors == 2:
+ self.penalty += 0.8
+ else:
+ return False
+
+ return True
+
+ def recheck_sequence(self) -> bool:
+ """ Check that the sequence is a fully valid token assignment
+ and adapt direction and penalties further if necessary.
+
+ This function catches some impossible assignments that need
+ forward context and can therefore not be excluded when building
+ the assignment.
+ """
+ # housenumbers may not be further than 2 words from the beginning.
+ # If there are two words in front, give it a penalty.
+ hnrpos = next((i for i, tr in enumerate(self.seq)
+ if tr.ttype == qmod.TokenType.HOUSENUMBER),
+ None)
+ if hnrpos is not None:
+ if self.direction != -1:
+ priors = sum(1 for t in self.seq[:hnrpos] if t.ttype == qmod.TokenType.PARTIAL)
+ if not self._adapt_penalty_from_priors(priors, -1):
+ return False
+ if self.direction != 1:
+ priors = sum(1 for t in self.seq[hnrpos+1:] if t.ttype == qmod.TokenType.PARTIAL)
+ if not self._adapt_penalty_from_priors(priors, 1):
+ return False
+ if any(t.ttype == qmod.TokenType.NEAR_ITEM for t in self.seq):
+ self.penalty += 1.0
+
+ return True
+
+ def _get_assignments_postcode(self, base: TokenAssignment,
+ query_len: int) -> Iterator[TokenAssignment]:
+ """ Yield possible assignments of Postcode searches with an
+ address component.
+ """
+ assert base.postcode is not None
+
+ if (base.postcode.start == 0 and self.direction != -1)\
+ or (base.postcode.end == query_len and self.direction != 1):
+ log().comment('postcode search')
+ # <address>,<postcode> should give preference to address search
+ if base.postcode.start == 0:
+ penalty = self.penalty
+ self.direction = -1 # name searches are only possible backwards
+ else:
+ penalty = self.penalty + 0.1
+ self.direction = 1 # name searches are only possible forwards
+ yield dataclasses.replace(base, penalty=penalty)
+
+ def _get_assignments_address_forward(self, base: TokenAssignment,
+ query: qmod.QueryStruct) -> Iterator[TokenAssignment]:
+ """ Yield possible assignments of address searches with
+ left-to-right reading.
+ """
+ first = base.address[0]
+
+ log().comment('first word = name')
+ yield dataclasses.replace(base, penalty=self.penalty,
+ name=first, address=base.address[1:])
+
+ # To paraphrase:
+ # * if another name term comes after the first one and before the
+ # housenumber
+ # * a qualifier comes after the name
+ # * the containing phrase is strictly typed
+ if (base.housenumber and first.end < base.housenumber.start)\
+ or (base.qualifier and base.qualifier > first)\
+ or (query.nodes[first.start].ptype != qmod.PhraseType.NONE):
+ return
+
+ penalty = self.penalty
+
+ # Penalty for:
+ # * <name>, <street>, <housenumber> , ...
+ # * queries that are comma-separated
+ if (base.housenumber and base.housenumber > first) or len(query.source) > 1:
+ penalty += 0.25
+
+ for i in range(first.start + 1, first.end):
+ name, addr = first.split(i)
+ log().comment(f'split first word = name ({i - first.start})')
+ yield dataclasses.replace(base, name=name, address=[addr] + base.address[1:],
+ penalty=penalty + PENALTY_TOKENCHANGE[query.nodes[i].btype])
+
+ def _get_assignments_address_backward(self, base: TokenAssignment,
+ query: qmod.QueryStruct) -> Iterator[TokenAssignment]:
+ """ Yield possible assignments of address searches with
+ right-to-left reading.
+ """
+ last = base.address[-1]
+
+ if self.direction == -1 or len(base.address) > 1:
+ log().comment('last word = name')
+ yield dataclasses.replace(base, penalty=self.penalty,
+ name=last, address=base.address[:-1])
+
+ # To paraphrase:
+ # * if another name term comes before the last one and after the
+ # housenumber
+ # * a qualifier comes before the name
+ # * the containing phrase is strictly typed
+ if (base.housenumber and last.start > base.housenumber.end)\
+ or (base.qualifier and base.qualifier < last)\
+ or (query.nodes[last.start].ptype != qmod.PhraseType.NONE):
+ return
+
+ penalty = self.penalty
+ if base.housenumber and base.housenumber < last:
+ penalty += 0.4
+ if len(query.source) > 1:
+ penalty += 0.25
+
+ for i in range(last.start + 1, last.end):
+ addr, name = last.split(i)
+ log().comment(f'split last word = name ({i - last.start})')
+ yield dataclasses.replace(base, name=name, address=base.address[:-1] + [addr],
+ penalty=penalty + PENALTY_TOKENCHANGE[query.nodes[i].btype])
+
+ def get_assignments(self, query: qmod.QueryStruct) -> Iterator[TokenAssignment]:
+ """ Yield possible assignments for the current sequence.
+
+ This function splits up general name assignments into name
+ and address and yields all possible variants of that.
+ """
+ base = TokenAssignment.from_ranges(self.seq)
+
+ num_addr_tokens = sum(t.end - t.start for t in base.address)
+ if num_addr_tokens > 50:
+ return
+
+ # Postcode search (postcode-only search is covered in next case)
+ if base.postcode is not None and base.address:
+ yield from self._get_assignments_postcode(base, query.num_token_slots())
+
+ # Postcode or country-only search
+ if not base.address:
+ if not base.housenumber and (base.postcode or base.country or base.near_item):
+ log().comment('postcode/country search')
+ yield dataclasses.replace(base, penalty=self.penalty)
+ else:
+ # <postcode>,<address> should give preference to postcode search
+ if base.postcode and base.postcode.start == 0:
+ self.penalty += 0.1
+
+ # Right-to-left reading of the address
+ if self.direction != -1:
+ yield from self._get_assignments_address_forward(base, query)
+
+ # Left-to-right reading of the address
+ if self.direction != 1:
+ yield from self._get_assignments_address_backward(base, query)
+
+ # variant for special housenumber searches
+ if base.housenumber and not base.qualifier:
+ yield dataclasses.replace(base, penalty=self.penalty)
+
+
+def yield_token_assignments(query: qmod.QueryStruct) -> Iterator[TokenAssignment]:
+ """ Return possible word type assignments to word positions.
+
+ The assignments are computed from the concrete tokens listed
+ in the tokenized query.
+
+ The result includes the penalty for transitions from one word type to
+ another. It does not include penalties for transitions within a
+ type.
+ """
+ todo = [_TokenSequence([], direction=0 if query.source[0].ptype == qmod.PhraseType.NONE else 1)]
+
+ while todo:
+ state = todo.pop()
+ node = query.nodes[state.end_pos]
+
+ for tlist in node.starting:
+ newstate = state.advance(tlist.ttype, tlist.end, node.btype)
+ if newstate is not None:
+ if newstate.end_pos == query.num_token_slots():
+ if newstate.recheck_sequence():
+ log().var_dump('Assignment', newstate)
+ yield from newstate.get_assignments(query)
+ elif not newstate.is_final():
+ todo.append(newstate)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Base abstraction for implementing based on different ASGI frameworks.
+"""
+from typing import Optional, Any, NoReturn, Callable
+import abc
+import math
+
+from ..config import Configuration
+from ..core import NominatimAPIAsync
+from ..result_formatting import FormatDispatcher
+from .content_types import CONTENT_TEXT
+
+
+class ASGIAdaptor(abc.ABC):
+ """ Adapter class for the different ASGI frameworks.
+ Wraps functionality over concrete requests and responses.
+ """
+ content_type: str = CONTENT_TEXT
+
+ @abc.abstractmethod
+ def get(self, name: str, default: Optional[str] = None) -> Optional[str]:
+ """ Return an input parameter as a string. If the parameter was
+ not provided, return the 'default' value.
+ """
+
+ @abc.abstractmethod
+ def get_header(self, name: str, default: Optional[str] = None) -> Optional[str]:
+ """ Return a HTTP header parameter as a string. If the parameter was
+ not provided, return the 'default' value.
+ """
+
+ @abc.abstractmethod
+ def error(self, msg: str, status: int = 400) -> Exception:
+ """ Construct an appropriate exception from the given error message.
+ The exception must result in a HTTP error with the given status.
+ """
+
+ @abc.abstractmethod
+ def create_response(self, status: int, output: str, num_results: int) -> Any:
+ """ Create a response from the given parameters. The result will
+ be returned by the endpoint functions. The adaptor may also
+ return None when the response is created internally with some
+ different means.
+
+ The response must return the HTTP given status code 'status', set
+ the HTTP content-type headers to the string provided and the
+ body of the response to 'output'.
+ """
+
+ @abc.abstractmethod
+ def base_uri(self) -> str:
+ """ Return the URI of the original request.
+ """
+
+ @abc.abstractmethod
+ def config(self) -> Configuration:
+ """ Return the current configuration object.
+ """
+
+ @abc.abstractmethod
+ def formatting(self) -> FormatDispatcher:
+ """ Return the formatting object to use.
+ """
+
+ def get_int(self, name: str, default: Optional[int] = None) -> int:
+ """ Return an input parameter as an int. Raises an exception if
+ the parameter is given but not in an integer format.
+
+ If 'default' is given, then it will be returned when the parameter
+ is missing completely. When 'default' is None, an error will be
+ raised on a missing parameter.
+ """
+ value = self.get(name)
+
+ if value is None:
+ if default is not None:
+ return default
+
+ self.raise_error(f"Parameter '{name}' missing.")
+
+ try:
+ intval = int(value)
+ except ValueError:
+ self.raise_error(f"Parameter '{name}' must be a number.")
+
+ return intval
+
+ def get_float(self, name: str, default: Optional[float] = None) -> float:
+ """ Return an input parameter as a flaoting-point number. Raises an
+ exception if the parameter is given but not in an float format.
+
+ If 'default' is given, then it will be returned when the parameter
+ is missing completely. When 'default' is None, an error will be
+ raised on a missing parameter.
+ """
+ value = self.get(name)
+
+ if value is None:
+ if default is not None:
+ return default
+
+ self.raise_error(f"Parameter '{name}' missing.")
+
+ try:
+ fval = float(value)
+ except ValueError:
+ self.raise_error(f"Parameter '{name}' must be a number.")
+
+ if math.isnan(fval) or math.isinf(fval):
+ self.raise_error(f"Parameter '{name}' must be a number.")
+
+ return fval
+
+ def get_bool(self, name: str, default: Optional[bool] = None) -> bool:
+ """ Return an input parameter as bool. Only '0' is accepted as
+ an input for 'false' all other inputs will be interpreted as 'true'.
+
+ If 'default' is given, then it will be returned when the parameter
+ is missing completely. When 'default' is None, an error will be
+ raised on a missing parameter.
+ """
+ value = self.get(name)
+
+ if value is None:
+ if default is not None:
+ return default
+
+ self.raise_error(f"Parameter '{name}' missing.")
+
+ return value != '0'
+
+ def raise_error(self, msg: str, status: int = 400) -> NoReturn:
+ """ Raise an exception resulting in the given HTTP status and
+ message. The message will be formatted according to the
+ output format chosen by the request.
+ """
+ raise self.error(self.formatting().format_error(self.content_type, msg, status),
+ status)
+
+
+EndpointFunc = Callable[[NominatimAPIAsync, ASGIAdaptor], Any]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Constants for various content types for server responses.
+"""
+
+CONTENT_TEXT = 'text/plain; charset=utf-8'
+CONTENT_XML = 'text/xml; charset=utf-8'
+CONTENT_HTML = 'text/html; charset=utf-8'
+CONTENT_JSON = 'application/json; charset=utf-8'
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Server implementation using the falcon webserver framework.
+"""
+from typing import Optional, Mapping, Any, List
+from pathlib import Path
+import datetime as dt
+import asyncio
+
+from falcon.asgi import App, Request, Response
+
+from ...config import Configuration
+from ...core import NominatimAPIAsync
+from ... import v1 as api_impl
+from ...result_formatting import FormatDispatcher, load_format_dispatcher
+from ... import logging as loglib
+from ..asgi_adaptor import ASGIAdaptor, EndpointFunc
+
+
+class HTTPNominatimError(Exception):
+ """ A special exception class for errors raised during processing.
+ """
+ def __init__(self, msg: str, status: int, content_type: str) -> None:
+ self.msg = msg
+ self.status = status
+ self.content_type = content_type
+
+
+async def nominatim_error_handler(req: Request, resp: Response,
+ exception: HTTPNominatimError,
+ _: Any) -> None:
+ """ Special error handler that passes message and content type as
+ per exception info.
+ """
+ resp.status = exception.status
+ resp.text = exception.msg
+ resp.content_type = exception.content_type
+
+
+async def timeout_error_handler(req: Request, resp: Response,
+ exception: TimeoutError,
+ _: Any) -> None:
+ """ Special error handler that passes message and content type as
+ per exception info.
+ """
+ resp.status = 503
+
+ loglib.log().comment('Aborted: Query took too long to process.')
+ logdata = loglib.get_and_disable()
+ if logdata:
+ resp.text = logdata
+ resp.content_type = 'text/html; charset=utf-8'
+ else:
+ resp.text = "Query took too long to process."
+ resp.content_type = 'text/plain; charset=utf-8'
+
+
+class ParamWrapper(ASGIAdaptor):
+ """ Adaptor class for server glue to Falcon framework.
+ """
+
+ def __init__(self, req: Request, resp: Response,
+ config: Configuration, formatter: FormatDispatcher) -> None:
+ self.request = req
+ self.response = resp
+ self._config = config
+ self._formatter = formatter
+
+ def get(self, name: str, default: Optional[str] = None) -> Optional[str]:
+ return self.request.get_param(name, default=default)
+
+ def get_header(self, name: str, default: Optional[str] = None) -> Optional[str]:
+ return self.request.get_header(name, default=default)
+
+ def error(self, msg: str, status: int = 400) -> HTTPNominatimError:
+ return HTTPNominatimError(msg, status, self.content_type)
+
+ def create_response(self, status: int, output: str, num_results: int) -> None:
+ self.response.context.num_results = num_results
+ self.response.status = status
+ self.response.text = output
+ self.response.content_type = self.content_type
+
+ def base_uri(self) -> str:
+ return self.request.forwarded_prefix
+
+ def config(self) -> Configuration:
+ return self._config
+
+ def formatting(self) -> FormatDispatcher:
+ return self._formatter
+
+
+class EndpointWrapper:
+ """ Converter for server glue endpoint functions to Falcon request handlers.
+ """
+
+ def __init__(self, name: str, func: EndpointFunc, api: NominatimAPIAsync,
+ formatter: FormatDispatcher) -> None:
+ self.name = name
+ self.func = func
+ self.api = api
+ self.formatter = formatter
+
+ async def on_get(self, req: Request, resp: Response) -> None:
+ """ Implementation of the endpoint.
+ """
+ await self.func(self.api, ParamWrapper(req, resp, self.api.config,
+ self.formatter))
+
+
+class FileLoggingMiddleware:
+ """ Middleware to log selected requests into a file.
+ """
+
+ def __init__(self, file_name: str):
+ self.fd = open(file_name, 'a', buffering=1, encoding='utf8')
+
+ async def process_request(self, req: Request, _: Response) -> None:
+ """ Callback before the request starts timing.
+ """
+ req.context.start = dt.datetime.now(tz=dt.timezone.utc)
+
+ async def process_response(self, req: Request, resp: Response,
+ resource: Optional[EndpointWrapper],
+ req_succeeded: bool) -> None:
+ """ Callback after requests writes to the logfile. It only
+ writes logs for successful requests for search, reverse and lookup.
+ """
+ if not req_succeeded or resource is None or resp.status != 200\
+ or resource.name not in ('reverse', 'search', 'lookup', 'details'):
+ return
+
+ finish = dt.datetime.now(tz=dt.timezone.utc)
+ duration = (finish - req.context.start).total_seconds()
+ params = req.scope['query_string'].decode('utf8')
+ start = req.context.start.replace(tzinfo=None)\
+ .isoformat(sep=' ', timespec='milliseconds')
+
+ self.fd.write(f"[{start}] "
+ f"{duration:.4f} {getattr(resp.context, 'num_results', 0)} "
+ f'{resource.name} "{params}"\n')
+
+
+class APIMiddleware:
+ """ Middleware managing the Nominatim database connection.
+ """
+
+ def __init__(self, project_dir: Path, environ: Optional[Mapping[str, str]]) -> None:
+ self.api = NominatimAPIAsync(project_dir, environ)
+ self.app: Optional[App] = None
+
+ @property
+ def config(self) -> Configuration:
+ """ Get the configuration for Nominatim.
+ """
+ return self.api.config
+
+ def set_app(self, app: App) -> None:
+ """ Set the Falcon application this middleware is connected to.
+ """
+ self.app = app
+
+ async def process_startup(self, *_: Any) -> None:
+ """ Process the ASGI lifespan startup event.
+ """
+ assert self.app is not None
+ legacy_urls = self.api.config.get_bool('SERVE_LEGACY_URLS')
+ formatter = load_format_dispatcher('v1', self.api.config.project_dir)
+ for name, func in await api_impl.get_routes(self.api):
+ endpoint = EndpointWrapper(name, func, self.api, formatter)
+ self.app.add_route(f"/{name}", endpoint)
+ if legacy_urls:
+ self.app.add_route(f"/{name}.php", endpoint)
+
+ async def process_shutdown(self, *_: Any) -> None:
+ """Process the ASGI lifespan shutdown event.
+ """
+ await self.api.close()
+
+
+def get_application(project_dir: Path,
+ environ: Optional[Mapping[str, str]] = None) -> App:
+ """ Create a Nominatim Falcon ASGI application.
+ """
+ apimw = APIMiddleware(project_dir, environ)
+
+ middleware: List[object] = [apimw]
+ log_file = apimw.config.LOG_FILE
+ if log_file:
+ middleware.append(FileLoggingMiddleware(log_file))
+
+ app = App(cors_enable=apimw.config.get_bool('CORS_NOACCESSCONTROL'),
+ middleware=middleware)
+
+ apimw.set_app(app)
+ app.add_error_handler(HTTPNominatimError, nominatim_error_handler)
+ app.add_error_handler(TimeoutError, timeout_error_handler)
+ # different from TimeoutError in Python <= 3.10
+ app.add_error_handler(asyncio.TimeoutError, timeout_error_handler) # type: ignore[arg-type]
+
+ return app
+
+
+def run_wsgi() -> App:
+ """ Entry point for uvicorn.
+
+ Make sure uvicorn is run from the project directory.
+ """
+ return get_application(Path('.'))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Server implementation using the starlette webserver framework.
+"""
+from typing import Any, Optional, Mapping, Callable, cast, Coroutine, Dict, \
+ Awaitable, AsyncIterator
+from pathlib import Path
+import datetime as dt
+import asyncio
+import contextlib
+
+from starlette.applications import Starlette
+from starlette.routing import Route
+from starlette.exceptions import HTTPException
+from starlette.responses import Response, PlainTextResponse, HTMLResponse
+from starlette.requests import Request
+from starlette.middleware import Middleware
+from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
+from starlette.middleware.cors import CORSMiddleware
+
+from ...config import Configuration
+from ...core import NominatimAPIAsync
+from ... import v1 as api_impl
+from ...result_formatting import FormatDispatcher, load_format_dispatcher
+from ..asgi_adaptor import ASGIAdaptor, EndpointFunc
+from ... import logging as loglib
+
+
+class ParamWrapper(ASGIAdaptor):
+ """ Adaptor class for server glue to Starlette framework.
+ """
+
+ def __init__(self, request: Request) -> None:
+ self.request = request
+
+ def get(self, name: str, default: Optional[str] = None) -> Optional[str]:
+ return self.request.query_params.get(name, default=default)
+
+ def get_header(self, name: str, default: Optional[str] = None) -> Optional[str]:
+ return self.request.headers.get(name, default)
+
+ def error(self, msg: str, status: int = 400) -> HTTPException:
+ return HTTPException(status, detail=msg,
+ headers={'content-type': self.content_type})
+
+ def create_response(self, status: int, output: str, num_results: int) -> Response:
+ self.request.state.num_results = num_results
+ return Response(output, status_code=status, media_type=self.content_type)
+
+ def base_uri(self) -> str:
+ scheme = self.request.url.scheme
+ host = self.request.url.hostname
+ port = self.request.url.port
+ root = self.request.scope['root_path']
+ if (scheme == 'http' and port == 80) or (scheme == 'https' and port == 443):
+ port = None
+ if port is not None:
+ return f"{scheme}://{host}:{port}{root}"
+
+ return f"{scheme}://{host}{root}"
+
+ def config(self) -> Configuration:
+ return cast(Configuration, self.request.app.state.API.config)
+
+ def formatting(self) -> FormatDispatcher:
+ return cast(FormatDispatcher, self.request.app.state.formatter)
+
+
+def _wrap_endpoint(func: EndpointFunc)\
+ -> Callable[[Request], Coroutine[Any, Any, Response]]:
+ async def _callback(request: Request) -> Response:
+ return cast(Response, await func(request.app.state.API, ParamWrapper(request)))
+
+ return _callback
+
+
+class FileLoggingMiddleware(BaseHTTPMiddleware):
+ """ Middleware to log selected requests into a file.
+ """
+
+ def __init__(self, app: Starlette, file_name: str = ''):
+ super().__init__(app)
+ self.fd = open(file_name, 'a', buffering=1, encoding='utf8')
+
+ async def dispatch(self, request: Request,
+ call_next: RequestResponseEndpoint) -> Response:
+ start = dt.datetime.now(tz=dt.timezone.utc)
+ response = await call_next(request)
+
+ if response.status_code != 200:
+ return response
+
+ finish = dt.datetime.now(tz=dt.timezone.utc)
+
+ for endpoint in ('reverse', 'search', 'lookup', 'details'):
+ if request.url.path.startswith('/' + endpoint):
+ qtype = endpoint
+ break
+ else:
+ return response
+
+ duration = (finish - start).total_seconds()
+ params = request.scope['query_string'].decode('utf8')
+
+ self.fd.write(f"[{start.replace(tzinfo=None).isoformat(sep=' ', timespec='milliseconds')}] "
+ f"{duration:.4f} {getattr(request.state, 'num_results', 0)} "
+ f'{qtype} "{params}"\n')
+
+ return response
+
+
+async def timeout_error(request: Request,
+ _: Exception) -> Response:
+ """ Error handler for query timeouts.
+ """
+ loglib.log().comment('Aborted: Query took too long to process.')
+ logdata = loglib.get_and_disable()
+
+ if logdata:
+ return HTMLResponse(logdata)
+
+ return PlainTextResponse("Query took too long to process.", status_code=503)
+
+
+def get_application(project_dir: Path,
+ environ: Optional[Mapping[str, str]] = None,
+ debug: bool = True) -> Starlette:
+ """ Create a Nominatim falcon ASGI application.
+ """
+ config = Configuration(project_dir, environ)
+
+ middleware = []
+ if config.get_bool('CORS_NOACCESSCONTROL'):
+ middleware.append(Middleware(CORSMiddleware,
+ allow_origins=['*'],
+ allow_methods=['GET', 'OPTIONS'],
+ max_age=86400))
+
+ log_file = config.LOG_FILE
+ if log_file:
+ middleware.append(Middleware(FileLoggingMiddleware, file_name=log_file))
+
+ exceptions: Dict[Any, Callable[[Request, Exception], Awaitable[Response]]] = {
+ TimeoutError: timeout_error,
+ asyncio.TimeoutError: timeout_error
+ }
+
+ @contextlib.asynccontextmanager
+ async def lifespan(app: Starlette) -> AsyncIterator[Any]:
+ app.state.API = NominatimAPIAsync(project_dir, environ)
+ config = app.state.API.config
+
+ legacy_urls = config.get_bool('SERVE_LEGACY_URLS')
+ for name, func in await api_impl.get_routes(app.state.API):
+ endpoint = _wrap_endpoint(func)
+ app.routes.append(Route(f"/{name}", endpoint=endpoint))
+ if legacy_urls:
+ app.routes.append(Route(f"/{name}.php", endpoint=endpoint))
+
+ yield
+
+ await app.state.API.close()
+
+ app = Starlette(debug=debug, middleware=middleware,
+ exception_handlers=exceptions,
+ lifespan=lifespan)
+
+ app.state.formatter = load_format_dispatcher('v1', project_dir)
+
+ return app
+
+
+def run_wsgi() -> Starlette:
+ """ Entry point for uvicorn.
+ """
+ return get_application(Path('.'), debug=False)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Import the base library to use with asynchronous SQLAlchemy.
+"""
+from typing import Any
+
+# flake8: noqa
+
+try:
+ import sqlalchemy.dialects.postgresql.psycopg
+ import psycopg
+ PGCORE_LIB = 'psycopg'
+ PGCORE_ERROR: Any = psycopg.Error
+except ModuleNotFoundError:
+ import sqlalchemy.dialects.postgresql.asyncpg
+ import asyncpg
+ PGCORE_LIB = 'asyncpg'
+ PGCORE_ERROR = asyncpg.PostgresError
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Custom functions and expressions for SQLAlchemy.
+"""
+from __future__ import annotations
+from typing import Any
+
+import sqlalchemy as sa
+from sqlalchemy.ext.compiler import compiles
+
+from ..typing import SaColumn
+
+
+class PlacexGeometryReverseLookuppolygon(sa.sql.functions.GenericFunction[Any]):
+ """ Check for conditions that allow partial index use on
+ 'idx_placex_geometry_reverse_lookupPolygon'.
+
+ Needs to be constant, so that the query planner picks them up correctly
+ in prepared statements.
+ """
+ name = 'PlacexGeometryReverseLookuppolygon'
+ inherit_cache = True
+
+
+@compiles(PlacexGeometryReverseLookuppolygon)
+def _default_intersects(element: PlacexGeometryReverseLookuppolygon,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return ("(ST_GeometryType(placex.geometry) in ('ST_Polygon', 'ST_MultiPolygon')"
+ " AND placex.rank_address between 4 and 25"
+ " AND placex.type != 'postcode'"
+ " AND placex.name is not null"
+ " AND placex.indexed_status = 0"
+ " AND placex.linked_place_id is null)")
+
+
+@compiles(PlacexGeometryReverseLookuppolygon, 'sqlite')
+def _sqlite_intersects(element: PlacexGeometryReverseLookuppolygon,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return ("(ST_GeometryType(placex.geometry) in ('POLYGON', 'MULTIPOLYGON')"
+ " AND placex.rank_address between 4 and 25"
+ " AND placex.type != 'postcode'"
+ " AND placex.name is not null"
+ " AND placex.indexed_status = 0"
+ " AND placex.linked_place_id is null)")
+
+
+class IntersectsReverseDistance(sa.sql.functions.GenericFunction[Any]):
+ name = 'IntersectsReverseDistance'
+ inherit_cache = True
+
+ def __init__(self, table: sa.Table, geom: SaColumn) -> None:
+ super().__init__(table.c.geometry,
+ table.c.rank_search, geom)
+ self.tablename = table.name
+
+
+@compiles(IntersectsReverseDistance)
+def default_reverse_place_diameter(element: IntersectsReverseDistance,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ table = element.tablename
+ return f"({table}.rank_address between 4 and 25"\
+ f" AND {table}.type != 'postcode'"\
+ f" AND {table}.name is not null"\
+ f" AND {table}.linked_place_id is null"\
+ f" AND {table}.osm_type = 'N'" + \
+ " AND ST_Buffer(%s, reverse_place_diameter(%s)) && %s)" \
+ % tuple(map(lambda c: compiler.process(c, **kw), element.clauses))
+
+
+@compiles(IntersectsReverseDistance, 'sqlite')
+def sqlite_reverse_place_diameter(element: IntersectsReverseDistance,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ geom1, rank, geom2 = list(element.clauses)
+ table = element.tablename
+
+ return (f"({table}.rank_address between 4 and 25"
+ f" AND {table}.type != 'postcode'"
+ f" AND {table}.name is not null"
+ f" AND {table}.linked_place_id is null"
+ f" AND {table}.osm_type = 'N'"
+ " AND MbrIntersects(%s, ST_Expand(%s, 14.0 * exp(-0.2 * %s) - 0.03))"
+ f" AND {table}.place_id IN"
+ " (SELECT place_id FROM placex_place_node_areas"
+ " WHERE ROWID IN (SELECT ROWID FROM SpatialIndex"
+ " WHERE f_table_name = 'placex_place_node_areas'"
+ " AND search_frame = %s)))") % (
+ compiler.process(geom1, **kw),
+ compiler.process(geom2, **kw),
+ compiler.process(rank, **kw),
+ compiler.process(geom2, **kw))
+
+
+class IsBelowReverseDistance(sa.sql.functions.GenericFunction[Any]):
+ name = 'IsBelowReverseDistance'
+ inherit_cache = True
+
+
+@compiles(IsBelowReverseDistance)
+def default_is_below_reverse_distance(element: IsBelowReverseDistance,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ dist, rank = list(element.clauses)
+ return "%s < reverse_place_diameter(%s)" % (compiler.process(dist, **kw),
+ compiler.process(rank, **kw))
+
+
+@compiles(IsBelowReverseDistance, 'sqlite')
+def sqlite_is_below_reverse_distance(element: IsBelowReverseDistance,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ dist, rank = list(element.clauses)
+ return "%s < 14.0 * exp(-0.2 * %s) - 0.03" % (compiler.process(dist, **kw),
+ compiler.process(rank, **kw))
+
+
+class IsAddressPoint(sa.sql.functions.GenericFunction[Any]):
+ name = 'IsAddressPoint'
+ inherit_cache = True
+
+ def __init__(self, table: sa.Table) -> None:
+ super().__init__(table.c.rank_address,
+ table.c.housenumber, table.c.name)
+
+
+@compiles(IsAddressPoint)
+def default_is_address_point(element: IsAddressPoint,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ rank, hnr, name = list(element.clauses)
+ return "(%s = 30 AND (%s IS NOT NULL OR %s ? 'addr:housename'))" % (
+ compiler.process(rank, **kw),
+ compiler.process(hnr, **kw),
+ compiler.process(name, **kw))
+
+
+@compiles(IsAddressPoint, 'sqlite')
+def sqlite_is_address_point(element: IsAddressPoint,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ rank, hnr, name = list(element.clauses)
+ return "(%s = 30 AND coalesce(%s, json_extract(%s, '$.addr:housename')) IS NOT NULL)" % (
+ compiler.process(rank, **kw),
+ compiler.process(hnr, **kw),
+ compiler.process(name, **kw))
+
+
+class CrosscheckNames(sa.sql.functions.GenericFunction[Any]):
+ """ Check if in the given list of names in parameters 1 any of the names
+ from the JSON array in parameter 2 are contained.
+ """
+ name = 'CrosscheckNames'
+ inherit_cache = True
+
+
+@compiles(CrosscheckNames)
+def compile_crosscheck_names(element: CrosscheckNames,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "coalesce(avals(%s) && ARRAY(SELECT * FROM json_array_elements_text(%s)), false)" % (
+ compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(CrosscheckNames, 'sqlite')
+def compile_sqlite_crosscheck_names(element: CrosscheckNames,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "EXISTS(SELECT *"\
+ " FROM json_each(%s) as name, json_each(%s) as match_name"\
+ " WHERE name.value = match_name.value)"\
+ % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+class JsonArrayEach(sa.sql.functions.GenericFunction[Any]):
+ """ Return elements of a json array as a set.
+ """
+ name = 'JsonArrayEach'
+ inherit_cache = True
+
+
+@compiles(JsonArrayEach)
+def default_json_array_each(element: JsonArrayEach, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "json_array_elements(%s)" % compiler.process(element.clauses, **kw)
+
+
+@compiles(JsonArrayEach, 'sqlite')
+def sqlite_json_array_each(element: JsonArrayEach, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "json_each(%s)" % compiler.process(element.clauses, **kw)
+
+
+class Greatest(sa.sql.functions.GenericFunction[Any]):
+ """ Function to compute maximum of all its input parameters.
+ """
+ name = 'greatest'
+ inherit_cache = True
+
+
+@compiles(Greatest, 'sqlite')
+def sqlite_greatest(element: Greatest, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "max(%s)" % compiler.process(element.clauses, **kw)
+
+
+class RegexpWord(sa.sql.functions.GenericFunction[Any]):
+ """ Check if a full word is in a given string.
+ """
+ name = 'RegexpWord'
+ inherit_cache = True
+
+
+@compiles(RegexpWord, 'postgresql')
+def postgres_regexp_nocase(element: RegexpWord, compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "%s ~* ('\\m(' || %s || ')\\M')::text" \
+ % (compiler.process(arg2, **kw), compiler.process(arg1, **kw))
+
+
+@compiles(RegexpWord, 'sqlite')
+def sqlite_regexp_nocase(element: RegexpWord, compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "regexp('\\b(' || %s || ')\\b', %s)"\
+ % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+SQLAlchemy definitions for all tables used by the frontend.
+"""
+import sqlalchemy as sa
+
+from .sqlalchemy_types import Geometry, KeyValueStore, IntArray
+
+
+class SearchTables:
+ """ Data class that holds the tables of the Nominatim database.
+
+ This schema strictly reflects the read-access view of the database.
+ Any data used for updates only will not be visible.
+ """
+
+ def __init__(self, meta: sa.MetaData) -> None:
+ self.meta = meta
+
+ self.import_status = sa.Table(
+ 'import_status', meta,
+ sa.Column('lastimportdate', sa.DateTime(True), nullable=False),
+ sa.Column('sequence_id', sa.Integer),
+ sa.Column('indexed', sa.Boolean))
+
+ self.properties = sa.Table(
+ 'nominatim_properties', meta,
+ sa.Column('property', sa.Text, nullable=False),
+ sa.Column('value', sa.Text))
+
+ self.placex = sa.Table(
+ 'placex', meta,
+ sa.Column('place_id', sa.BigInteger, nullable=False),
+ sa.Column('parent_place_id', sa.BigInteger),
+ sa.Column('linked_place_id', sa.BigInteger),
+ sa.Column('importance', sa.Float),
+ sa.Column('indexed_date', sa.DateTime),
+ sa.Column('rank_address', sa.SmallInteger),
+ sa.Column('rank_search', sa.SmallInteger),
+ sa.Column('indexed_status', sa.SmallInteger),
+ sa.Column('osm_type', sa.String(1), nullable=False),
+ sa.Column('osm_id', sa.BigInteger, nullable=False),
+ sa.Column('class', sa.Text, nullable=False, key='class_'),
+ sa.Column('type', sa.Text, nullable=False),
+ sa.Column('admin_level', sa.SmallInteger),
+ sa.Column('name', KeyValueStore),
+ sa.Column('address', KeyValueStore),
+ sa.Column('extratags', KeyValueStore),
+ sa.Column('geometry', Geometry, nullable=False),
+ sa.Column('wikipedia', sa.Text),
+ sa.Column('country_code', sa.String(2)),
+ sa.Column('housenumber', sa.Text),
+ sa.Column('postcode', sa.Text),
+ sa.Column('centroid', Geometry))
+
+ self.addressline = sa.Table(
+ 'place_addressline', meta,
+ sa.Column('place_id', sa.BigInteger),
+ sa.Column('address_place_id', sa.BigInteger),
+ sa.Column('distance', sa.Float),
+ sa.Column('fromarea', sa.Boolean),
+ sa.Column('isaddress', sa.Boolean))
+
+ self.postcode = sa.Table(
+ 'location_postcode', meta,
+ sa.Column('place_id', sa.BigInteger),
+ sa.Column('parent_place_id', sa.BigInteger),
+ sa.Column('rank_search', sa.SmallInteger),
+ sa.Column('rank_address', sa.SmallInteger),
+ sa.Column('indexed_status', sa.SmallInteger),
+ sa.Column('indexed_date', sa.DateTime),
+ sa.Column('country_code', sa.String(2)),
+ sa.Column('postcode', sa.Text),
+ sa.Column('geometry', Geometry))
+
+ self.osmline = sa.Table(
+ 'location_property_osmline', meta,
+ sa.Column('place_id', sa.BigInteger, nullable=False),
+ sa.Column('osm_id', sa.BigInteger),
+ sa.Column('parent_place_id', sa.BigInteger),
+ sa.Column('indexed_date', sa.DateTime),
+ sa.Column('startnumber', sa.Integer),
+ sa.Column('endnumber', sa.Integer),
+ sa.Column('step', sa.SmallInteger),
+ sa.Column('indexed_status', sa.SmallInteger),
+ sa.Column('linegeo', Geometry),
+ sa.Column('address', KeyValueStore),
+ sa.Column('postcode', sa.Text),
+ sa.Column('country_code', sa.String(2)))
+
+ self.country_name = sa.Table(
+ 'country_name', meta,
+ sa.Column('country_code', sa.String(2)),
+ sa.Column('name', KeyValueStore),
+ sa.Column('derived_name', KeyValueStore),
+ sa.Column('partition', sa.Integer))
+
+ self.country_grid = sa.Table(
+ 'country_osm_grid', meta,
+ sa.Column('country_code', sa.String(2)),
+ sa.Column('area', sa.Float),
+ sa.Column('geometry', Geometry))
+
+ # The following tables are not necessarily present.
+ self.search_name = sa.Table(
+ 'search_name', meta,
+ sa.Column('place_id', sa.BigInteger),
+ sa.Column('importance', sa.Float),
+ sa.Column('search_rank', sa.SmallInteger),
+ sa.Column('address_rank', sa.SmallInteger),
+ sa.Column('name_vector', IntArray),
+ sa.Column('nameaddress_vector', IntArray),
+ sa.Column('country_code', sa.String(2)),
+ sa.Column('centroid', Geometry))
+
+ self.tiger = sa.Table(
+ 'location_property_tiger', meta,
+ sa.Column('place_id', sa.BigInteger),
+ sa.Column('parent_place_id', sa.BigInteger),
+ sa.Column('startnumber', sa.Integer),
+ sa.Column('endnumber', sa.Integer),
+ sa.Column('step', sa.SmallInteger),
+ sa.Column('linegeo', Geometry),
+ sa.Column('postcode', sa.Text))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Module with custom types for SQLAlchemy
+"""
+
+from .geometry import (Geometry as Geometry)
+from .int_array import (IntArray as IntArray)
+from .key_value import (KeyValueStore as KeyValueStore)
+from .json import (Json as Json)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Custom types for SQLAlchemy.
+"""
+from __future__ import annotations
+from typing import Callable, Any, cast
+
+import sqlalchemy as sa
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy import types
+
+from ...typing import SaColumn, SaBind
+
+
+class Geometry_DistanceSpheroid(sa.sql.expression.FunctionElement[float]):
+ """ Function to compute the spherical distance in meters.
+ """
+ type = sa.Float()
+ name = 'Geometry_DistanceSpheroid'
+ inherit_cache = True
+
+
+@compiles(Geometry_DistanceSpheroid)
+def _default_distance_spheroid(element: Geometry_DistanceSpheroid,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_DistanceSpheroid(%s,"\
+ " 'SPHEROID[\"WGS 84\",6378137,298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]')"\
+ % compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_DistanceSpheroid, 'sqlite')
+def _spatialite_distance_spheroid(element: Geometry_DistanceSpheroid,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "COALESCE(Distance(%s, true), 0.0)" % compiler.process(element.clauses, **kw)
+
+
+class Geometry_IsLineLike(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the geometry is a line or multiline.
+ """
+ name = 'Geometry_IsLineLike'
+ inherit_cache = True
+
+
+@compiles(Geometry_IsLineLike)
+def _default_is_line_like(element: Geometry_IsLineLike,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('ST_LineString', 'ST_MultiLineString')" % \
+ compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_IsLineLike, 'sqlite')
+def _sqlite_is_line_like(element: Geometry_IsLineLike,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('LINESTRING', 'MULTILINESTRING')" % \
+ compiler.process(element.clauses, **kw)
+
+
+class Geometry_IsAreaLike(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the geometry is a polygon or multipolygon.
+ """
+ name = 'Geometry_IsLineLike'
+ inherit_cache = True
+
+
+@compiles(Geometry_IsAreaLike)
+def _default_is_area_like(element: Geometry_IsAreaLike,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('ST_Polygon', 'ST_MultiPolygon')" % \
+ compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_IsAreaLike, 'sqlite')
+def _sqlite_is_area_like(element: Geometry_IsAreaLike,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('POLYGON', 'MULTIPOLYGON')" % \
+ compiler.process(element.clauses, **kw)
+
+
+class Geometry_IntersectsBbox(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the bounding boxes of the given geometries intersect.
+ """
+ name = 'Geometry_IntersectsBbox'
+ inherit_cache = True
+
+
+@compiles(Geometry_IntersectsBbox)
+def _default_intersects(element: Geometry_IntersectsBbox,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(Geometry_IntersectsBbox, 'sqlite')
+def _sqlite_intersects(element: Geometry_IntersectsBbox,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "MbrIntersects(%s) = 1" % compiler.process(element.clauses, **kw)
+
+
+class Geometry_ColumnIntersectsBbox(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the bounding box of the geometry intersects with the
+ given table column, using the spatial index for the column.
+
+ The index must exist or the query may return nothing.
+ """
+ name = 'Geometry_ColumnIntersectsBbox'
+ inherit_cache = True
+
+
+@compiles(Geometry_ColumnIntersectsBbox)
+def default_intersects_column(element: Geometry_ColumnIntersectsBbox,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(Geometry_ColumnIntersectsBbox, 'sqlite')
+def spatialite_intersects_column(element: Geometry_ColumnIntersectsBbox,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "MbrIntersects(%s, %s) = 1 and "\
+ "%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
+ " WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
+ " AND search_frame = %s)"\
+ % (compiler.process(arg1, **kw),
+ compiler.process(arg2, **kw),
+ arg1.table.name, arg1.table.name, arg1.name,
+ compiler.process(arg2, **kw))
+
+
+class Geometry_ColumnDWithin(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the geometry is within the distance of the
+ given table column, using the spatial index for the column.
+
+ The index must exist or the query may return nothing.
+ """
+ name = 'Geometry_ColumnDWithin'
+ inherit_cache = True
+
+
+@compiles(Geometry_ColumnDWithin)
+def default_dwithin_column(element: Geometry_ColumnDWithin,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_DWithin(%s)" % compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_ColumnDWithin, 'sqlite')
+def spatialite_dwithin_column(element: Geometry_ColumnDWithin,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ geom1, geom2, dist = list(element.clauses)
+ return "ST_Distance(%s, %s) < %s and "\
+ "%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
+ " WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
+ " AND search_frame = ST_Expand(%s, %s))"\
+ % (compiler.process(geom1, **kw),
+ compiler.process(geom2, **kw),
+ compiler.process(dist, **kw),
+ geom1.table.name, geom1.table.name, geom1.name,
+ compiler.process(geom2, **kw),
+ compiler.process(dist, **kw))
+
+
+class Geometry(types.UserDefinedType): # type: ignore[type-arg]
+ """ Simplified type decorator for PostGIS geometry. This type
+ only supports geometries in 4326 projection.
+ """
+ cache_ok = True
+
+ def __init__(self, subtype: str = 'Geometry'):
+ self.subtype = subtype
+
+ def get_col_spec(self) -> str:
+ return f'GEOMETRY({self.subtype}, 4326)'
+
+ def bind_processor(self, dialect: 'sa.Dialect') -> Callable[[Any], str]:
+ def process(value: Any) -> str:
+ if isinstance(value, str):
+ return value
+
+ return cast(str, value.to_wkt())
+ return process
+
+ def result_processor(self, dialect: 'sa.Dialect', coltype: object) -> Callable[[Any], str]:
+ def process(value: Any) -> str:
+ assert isinstance(value, str)
+ return value
+ return process
+
+ def column_expression(self, col: SaColumn) -> SaColumn:
+ return sa.func.ST_AsEWKB(col)
+
+ def bind_expression(self, bindvalue: SaBind) -> SaColumn:
+ return sa.func.ST_GeomFromText(bindvalue, sa.text('4326'), type_=self)
+
+ class comparator_factory(types.UserDefinedType.Comparator): # type: ignore[type-arg]
+
+ def intersects(self, other: SaColumn, use_index: bool = True) -> 'sa.Operators':
+ if not use_index:
+ return Geometry_IntersectsBbox(sa.func.coalesce(sa.null(), self.expr), other)
+
+ if isinstance(self.expr, sa.Column):
+ return Geometry_ColumnIntersectsBbox(self.expr, other)
+
+ return Geometry_IntersectsBbox(self.expr, other)
+
+ def is_line_like(self) -> SaColumn:
+ return Geometry_IsLineLike(self)
+
+ def is_area(self) -> SaColumn:
+ return Geometry_IsAreaLike(self)
+
+ def within_distance(self, other: SaColumn, distance: SaColumn) -> SaColumn:
+ if isinstance(self.expr, sa.Column):
+ return Geometry_ColumnDWithin(self.expr, other, distance)
+
+ return self.ST_Distance(other) < distance
+
+ def ST_Distance(self, other: SaColumn) -> SaColumn:
+ return sa.func.ST_Distance(self, other, type_=sa.Float)
+
+ def ST_Contains(self, other: SaColumn) -> SaColumn:
+ return sa.func.ST_Contains(self, other, type_=sa.Boolean)
+
+ def ST_CoveredBy(self, other: SaColumn) -> SaColumn:
+ return sa.func.ST_CoveredBy(self, other, type_=sa.Boolean)
+
+ def ST_ClosestPoint(self, other: SaColumn) -> SaColumn:
+ return sa.func.coalesce(sa.func.ST_ClosestPoint(self, other, type_=Geometry),
+ other)
+
+ def ST_Buffer(self, other: SaColumn) -> SaColumn:
+ return sa.func.ST_Buffer(self, other, type_=Geometry)
+
+ def ST_Expand(self, other: SaColumn) -> SaColumn:
+ return sa.func.ST_Expand(self, other, type_=Geometry)
+
+ def ST_Collect(self) -> SaColumn:
+ return sa.func.ST_Collect(self, type_=Geometry)
+
+ def ST_Centroid(self) -> SaColumn:
+ return sa.func.ST_Centroid(self, type_=Geometry)
+
+ def ST_LineInterpolatePoint(self, other: SaColumn) -> SaColumn:
+ return sa.func.ST_LineInterpolatePoint(self, other, type_=Geometry)
+
+ def ST_LineLocatePoint(self, other: SaColumn) -> SaColumn:
+ return sa.func.ST_LineLocatePoint(self, other, type_=sa.Float)
+
+ def distance_spheroid(self, other: SaColumn) -> SaColumn:
+ return Geometry_DistanceSpheroid(self, other)
+
+
+@compiles(Geometry, 'sqlite')
+def get_col_spec(self, *args, **kwargs): # type: ignore[no-untyped-def]
+ return 'GEOMETRY'
+
+
+SQLITE_FUNCTION_ALIAS = (
+ ('ST_AsEWKB', sa.Text, 'AsEWKB'),
+ ('ST_GeomFromEWKT', Geometry, 'GeomFromEWKT'),
+ ('ST_AsGeoJSON', sa.Text, 'AsGeoJSON'),
+ ('ST_AsKML', sa.Text, 'AsKML'),
+ ('ST_AsSVG', sa.Text, 'AsSVG'),
+ ('ST_LineLocatePoint', sa.Float, 'ST_Line_Locate_Point'),
+ ('ST_LineInterpolatePoint', sa.Float, 'ST_Line_Interpolate_Point'),
+)
+
+
+def _add_function_alias(func: str, ftype: type, alias: str) -> None:
+ _FuncDef = type(func, (sa.sql.functions.GenericFunction, ), {
+ "type": ftype(),
+ "name": func,
+ "identifier": func,
+ "inherit_cache": True})
+
+ func_templ = f"{alias}(%s)"
+
+ def _sqlite_impl(element: Any, compiler: Any, **kw: Any) -> Any:
+ return func_templ % compiler.process(element.clauses, **kw)
+
+ compiles(_FuncDef, 'sqlite')(_sqlite_impl)
+
+
+for alias in SQLITE_FUNCTION_ALIAS:
+ _add_function_alias(*alias)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Custom type for an array of integers.
+"""
+from typing import Any, List, Optional
+
+import sqlalchemy as sa
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy.dialects.postgresql import ARRAY
+
+from ...typing import SaDialect, SaColumn
+
+
+class IntList(sa.types.TypeDecorator[Any]):
+ """ A list of integers saved as a text of comma-separated numbers.
+ """
+ impl = sa.types.Unicode
+ cache_ok = True
+
+ def process_bind_param(self, value: Optional[Any], dialect: 'sa.Dialect') -> Optional[str]:
+ if value is None:
+ return None
+
+ assert isinstance(value, list)
+ return ','.join(map(str, value))
+
+ def process_result_value(self, value: Optional[Any],
+ dialect: SaDialect) -> Optional[List[int]]:
+ return [int(v) for v in value.split(',')] if value is not None else None
+
+ def copy(self, **kw: Any) -> 'IntList':
+ return IntList(self.impl.length)
+
+
+class IntArray(sa.types.TypeDecorator[Any]):
+ """ Dialect-independent list of integers.
+ """
+ impl = IntList
+ cache_ok = True
+
+ def load_dialect_impl(self, dialect: SaDialect) -> sa.types.TypeEngine[Any]:
+ if dialect.name == 'postgresql':
+ return ARRAY(sa.Integer())
+
+ return IntList()
+
+ class comparator_factory(sa.types.UserDefinedType.Comparator): # type: ignore[type-arg]
+
+ def __add__(self, other: SaColumn) -> 'sa.ColumnOperators':
+ """ Concate the array with the given array. If one of the
+ operants is null, the value of the other will be returned.
+ """
+ return ArrayCat(self.expr, other)
+
+ def contains(self, other: SaColumn, **kwargs: Any) -> 'sa.ColumnOperators':
+ """ Return true if the array contains all the value of the argument
+ array.
+ """
+ return ArrayContains(self.expr, other)
+
+
+class ArrayAgg(sa.sql.functions.GenericFunction[Any]):
+ """ Aggregate function to collect elements in an array.
+ """
+ type = IntArray()
+ identifier = 'ArrayAgg'
+ name = 'array_agg'
+ inherit_cache = True
+
+
+@compiles(ArrayAgg, 'sqlite')
+def sqlite_array_agg(element: ArrayAgg, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "group_concat(%s, ',')" % compiler.process(element.clauses, **kw)
+
+
+class ArrayContains(sa.sql.expression.FunctionElement[Any]):
+ """ Function to check if an array is fully contained in another.
+ """
+ name = 'ArrayContains'
+ inherit_cache = True
+
+
+@compiles(ArrayContains)
+def generic_array_contains(element: ArrayContains, compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "(%s @> %s)" % (compiler.process(arg1, **kw),
+ compiler.process(arg2, **kw))
+
+
+@compiles(ArrayContains, 'sqlite')
+def sqlite_array_contains(element: ArrayContains, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "array_contains(%s)" % compiler.process(element.clauses, **kw)
+
+
+class ArrayCat(sa.sql.expression.FunctionElement[Any]):
+ """ Function to check if an array is fully contained in another.
+ """
+ type = IntArray()
+ identifier = 'ArrayCat'
+ inherit_cache = True
+
+
+@compiles(ArrayCat)
+def generic_array_cat(element: ArrayCat, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "array_cat(%s)" % compiler.process(element.clauses, **kw)
+
+
+@compiles(ArrayCat, 'sqlite')
+def sqlite_array_cat(element: ArrayCat, compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "(%s || ',' || %s)" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Common json type for different dialects.
+"""
+from typing import Any
+
+import sqlalchemy as sa
+from sqlalchemy.dialects.postgresql import JSONB
+from sqlalchemy.dialects.sqlite import JSON as sqlite_json
+
+from ...typing import SaDialect
+
+
+class Json(sa.types.TypeDecorator[Any]):
+ """ Dialect-independent type for JSON.
+ """
+ impl = sa.types.JSON
+ cache_ok = True
+
+ def load_dialect_impl(self, dialect: SaDialect) -> sa.types.TypeEngine[Any]:
+ if dialect.name == 'postgresql':
+ return JSONB(none_as_null=True) # type: ignore[no-untyped-call]
+
+ return sqlite_json(none_as_null=True)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+A custom type that implements a simple key-value store of strings.
+"""
+from typing import Any
+
+import sqlalchemy as sa
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy.dialects.postgresql import HSTORE
+from sqlalchemy.dialects.sqlite import JSON as sqlite_json
+
+from ...typing import SaDialect, SaColumn
+
+
+class KeyValueStore(sa.types.TypeDecorator[Any]):
+ """ Dialect-independent type of a simple key-value store of strings.
+ """
+ impl = HSTORE
+ cache_ok = True
+
+ def load_dialect_impl(self, dialect: SaDialect) -> sa.types.TypeEngine[Any]:
+ if dialect.name == 'postgresql':
+ return HSTORE() # type: ignore[no-untyped-call]
+
+ return sqlite_json(none_as_null=True)
+
+ class comparator_factory(sa.types.UserDefinedType.Comparator): # type: ignore[type-arg]
+
+ def merge(self, other: SaColumn) -> 'sa.Operators':
+ """ Merge the values from the given KeyValueStore into this
+ one, overwriting values where necessary. When the argument
+ is null, nothing happens.
+ """
+ return KeyValueConcat(self.expr, other)
+
+
+class KeyValueConcat(sa.sql.expression.FunctionElement[Any]):
+ """ Return the merged key-value store from the input parameters.
+ """
+ type = KeyValueStore()
+ name = 'JsonConcat'
+ inherit_cache = True
+
+
+@compiles(KeyValueConcat)
+def default_json_concat(element: KeyValueConcat, compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "(%s || coalesce(%s, ''::hstore))"\
+ % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(KeyValueConcat, 'sqlite')
+def sqlite_json_concat(element: KeyValueConcat, compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "json_patch(%s, coalesce(%s, '{}'))"\
+ % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Custom functions for SQLite.
+"""
+from typing import cast, Optional, Set, Any
+import json
+
+
+def weigh_search(search_vector: Optional[str], rankings: str, default: float) -> float:
+ """ Custom weight function for search results.
+ """
+ if search_vector is not None:
+ svec = [int(x) for x in search_vector.split(',')]
+ for rank in json.loads(rankings):
+ if all(r in svec for r in rank[1]):
+ return cast(float, rank[0])
+
+ return default
+
+
+class ArrayIntersectFuzzy:
+ """ Compute the array of common elements of all input integer arrays.
+ Very large input parameters may be ignored to speed up
+ computation. Therefore, the result is a superset of common elements.
+
+ Input and output arrays are given as comma-separated lists.
+ """
+ def __init__(self) -> None:
+ self.first = ''
+ self.values: Optional[Set[int]] = None
+
+ def step(self, value: Optional[str]) -> None:
+ """ Add the next array to the intersection.
+ """
+ if value is not None:
+ if not self.first:
+ self.first = value
+ elif len(value) < 10000000:
+ if self.values is None:
+ self.values = {int(x) for x in self.first.split(',')}
+ self.values.intersection_update((int(x) for x in value.split(',')))
+
+ def finalize(self) -> str:
+ """ Return the final result.
+ """
+ if self.values is not None:
+ return ','.join(map(str, self.values))
+
+ return self.first
+
+
+class ArrayUnion:
+ """ Compute the set of all elements of the input integer arrays.
+
+ Input and output arrays are given as strings of comma-separated lists.
+ """
+ def __init__(self) -> None:
+ self.values: Optional[Set[str]] = None
+
+ def step(self, value: Optional[str]) -> None:
+ """ Add the next array to the union.
+ """
+ if value is not None:
+ if self.values is None:
+ self.values = set(value.split(','))
+ else:
+ self.values.update(value.split(','))
+
+ def finalize(self) -> str:
+ """ Return the final result.
+ """
+ return '' if self.values is None else ','.join(self.values)
+
+
+def array_contains(container: Optional[str], containee: Optional[str]) -> Optional[bool]:
+ """ Is the array 'containee' completely contained in array 'container'.
+ """
+ if container is None or containee is None:
+ return None
+
+ vset = container.split(',')
+ return all(v in vset for v in containee.split(','))
+
+
+def array_pair_contains(container1: Optional[str], container2: Optional[str],
+ containee: Optional[str]) -> Optional[bool]:
+ """ Is the array 'containee' completely contained in the union of
+ array 'container1' and array 'container2'.
+ """
+ if container1 is None or container2 is None or containee is None:
+ return None
+
+ vset = container1.split(',') + container2.split(',')
+ return all(v in vset for v in containee.split(','))
+
+
+def install_custom_functions(conn: Any) -> None:
+ """ Install helper functions for Nominatim into the given SQLite
+ database connection.
+ """
+ conn.create_function('weigh_search', 3, weigh_search, deterministic=True)
+ conn.create_function('array_contains', 2, array_contains, deterministic=True)
+ conn.create_function('array_pair_contains', 3, array_pair_contains, deterministic=True)
+ _create_aggregate(conn, 'array_intersect_fuzzy', 1, ArrayIntersectFuzzy)
+ _create_aggregate(conn, 'array_union', 1, ArrayUnion)
+
+
+async def _make_aggregate(aioconn: Any, *args: Any) -> None:
+ await aioconn._execute(aioconn._conn.create_aggregate, *args)
+
+
+def _create_aggregate(conn: Any, name: str, nargs: int, aggregate: Any) -> None:
+ try:
+ conn.await_(_make_aggregate(conn._connection, name, nargs, aggregate))
+ except Exception as error:
+ conn._handle_exception(error)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Classes and function related to status call.
+"""
+from typing import Optional
+import datetime as dt
+import dataclasses
+
+import sqlalchemy as sa
+
+from .connection import SearchConnection
+from .version import NOMINATIM_API_VERSION
+
+
+@dataclasses.dataclass
+class StatusResult:
+ """ Result of a call to the status API.
+ """
+ status: int
+ message: str
+ software_version = NOMINATIM_API_VERSION
+ data_updated: Optional[dt.datetime] = None
+ database_version: Optional[str] = None
+
+
+async def get_status(conn: SearchConnection) -> StatusResult:
+ """ Execute a status API call.
+ """
+ status = StatusResult(0, 'OK')
+
+ # Last update date
+ sql = sa.select(conn.t.import_status.c.lastimportdate).limit(1)
+ status.data_updated = await conn.scalar(sql)
+
+ if status.data_updated is not None:
+ if status.data_updated.tzinfo is None:
+ status.data_updated = status.data_updated.replace(tzinfo=dt.timezone.utc)
+ else:
+ status.data_updated = status.data_updated.astimezone(dt.timezone.utc)
+
+ # Database version
+ try:
+ status.database_version = await conn.get_property('database_version')
+ except ValueError:
+ pass
+
+ return status
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Complex datatypes used by the Nominatim API.
+"""
+from typing import Optional, Union, Tuple, NamedTuple, TypeVar, Type, Dict, \
+ Any, List, Sequence
+from collections import abc
+import dataclasses
+import enum
+import math
+from struct import unpack
+from binascii import unhexlify
+
+from .errors import UsageError
+from .localization import Locales
+
+
+@dataclasses.dataclass
+class PlaceID:
+ """ Reference a place by Nominatim's internal ID.
+
+ A PlaceID may reference place from the main table placex, from
+ the interpolation tables or the postcode tables. Place IDs are not
+ stable between installations. You may use this type theefore only
+ with place IDs obtained from the same database.
+ """
+ place_id: int
+ """
+ The internal ID of the place to reference.
+ """
+
+
+@dataclasses.dataclass
+class OsmID:
+ """ Reference a place by its OSM ID and potentially the basic category.
+
+ The OSM ID may refer to places in the main table placex and OSM
+ interpolation lines.
+ """
+ osm_type: str
+ """ OSM type of the object. Must be one of `N`(node), `W`(way) or
+ `R`(relation).
+ """
+ osm_id: int
+ """ The OSM ID of the object.
+ """
+ osm_class: Optional[str] = None
+ """ The same OSM object may appear multiple times in the database under
+ different categories. The optional class parameter allows to distinguish
+ the different categories and corresponds to the key part of the category.
+ If there are multiple objects in the database and `osm_class` is
+ left out, then one of the objects is returned at random.
+ """
+
+ def __post_init__(self) -> None:
+ if self.osm_type not in ('N', 'W', 'R'):
+ raise ValueError(f"Illegal OSM type '{self.osm_type}'. Must be one of N, W, R.")
+
+ def class_as_housenumber(self) -> Optional[int]:
+ """ Interpret the class property as a housenumber and return it.
+
+ If the OSM ID points to an interpolation, then the class may be
+ a number pointing to the exact number requested. This function
+ returns the housenumber as an int, if class is set and is a number.
+ """
+ if self.osm_class and self.osm_class.isdigit():
+ return int(self.osm_class)
+ return None
+
+
+PlaceRef = Union[PlaceID, OsmID]
+
+
+class Point(NamedTuple):
+ """ A geographic point in WGS84 projection.
+ """
+ x: float
+ y: float
+
+ @property
+ def lat(self) -> float:
+ """ Return the latitude of the point.
+ """
+ return self.y
+
+ @property
+ def lon(self) -> float:
+ """ Return the longitude of the point.
+ """
+ return self.x
+
+ def to_geojson(self) -> str:
+ """ Return the point in GeoJSON format.
+ """
+ return f'{{"type": "Point","coordinates": [{self.x}, {self.y}]}}'
+
+ @staticmethod
+ def from_wkb(wkb: Union[str, bytes]) -> 'Point':
+ """ Create a point from EWKB as returned from the database.
+ """
+ if isinstance(wkb, str):
+ wkb = unhexlify(wkb)
+ if len(wkb) != 25:
+ raise ValueError(f"Point wkb has unexpected length {len(wkb)}")
+ if wkb[0] == 0:
+ gtype, srid, x, y = unpack('>iidd', wkb[1:])
+ elif wkb[0] == 1:
+ gtype, srid, x, y = unpack('<iidd', wkb[1:])
+ else:
+ raise ValueError("WKB has unknown endian value.")
+
+ if gtype != 0x20000001:
+ raise ValueError("WKB must be a point geometry.")
+ if srid != 4326:
+ raise ValueError("Only WGS84 WKB supported.")
+
+ return Point(x, y)
+
+ @staticmethod
+ def from_param(inp: Any) -> 'Point':
+ """ Create a point from an input parameter. The parameter
+ may be given as a point, a string or a sequence of
+ strings or floats. Raises a UsageError if the format is
+ not correct.
+ """
+ if isinstance(inp, Point):
+ return inp
+
+ seq: Sequence[str]
+ if isinstance(inp, str):
+ seq = inp.split(',')
+ elif isinstance(inp, abc.Sequence):
+ seq = inp
+
+ if len(seq) != 2:
+ raise UsageError('Point parameter needs 2 coordinates.')
+ try:
+ x, y = filter(math.isfinite, map(float, seq))
+ except ValueError as exc:
+ raise UsageError('Point parameter needs to be numbers.') from exc
+
+ if x < -180.0 or x > 180.0 or y < -90.0 or y > 90.0:
+ raise UsageError('Point coordinates invalid.')
+
+ return Point(x, y)
+
+ def to_wkt(self) -> str:
+ """ Return the WKT representation of the point.
+ """
+ return f'POINT({self.x} {self.y})'
+
+
+AnyPoint = Union[Point, Tuple[float, float]]
+
+WKB_BBOX_HEADER_LE = b'\x01\x03\x00\x00\x20\xE6\x10\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00'
+WKB_BBOX_HEADER_BE = b'\x00\x20\x00\x00\x03\x00\x00\x10\xe6\x00\x00\x00\x01\x00\x00\x00\x05'
+
+
+class Bbox:
+ """ A bounding box in WGS84 projection.
+
+ The coordinates are available as an array in the 'coord'
+ property in the order (minx, miny, maxx, maxy).
+ """
+ def __init__(self, minx: float, miny: float, maxx: float, maxy: float) -> None:
+ """ Create a new bounding box with the given coordinates in WGS84
+ projection.
+ """
+ self.coords = (minx, miny, maxx, maxy)
+
+ @property
+ def minlat(self) -> float:
+ """ Southern-most latitude, corresponding to the minimum y coordinate.
+ """
+ return self.coords[1]
+
+ @property
+ def maxlat(self) -> float:
+ """ Northern-most latitude, corresponding to the maximum y coordinate.
+ """
+ return self.coords[3]
+
+ @property
+ def minlon(self) -> float:
+ """ Western-most longitude, corresponding to the minimum x coordinate.
+ """
+ return self.coords[0]
+
+ @property
+ def maxlon(self) -> float:
+ """ Eastern-most longitude, corresponding to the maximum x coordinate.
+ """
+ return self.coords[2]
+
+ @property
+ def area(self) -> float:
+ """ Return the area of the box in WGS84.
+ """
+ return (self.coords[2] - self.coords[0]) * (self.coords[3] - self.coords[1])
+
+ def contains(self, pt: Point) -> bool:
+ """ Check if the point is inside or on the boundary of the box.
+ """
+ return self.coords[0] <= pt[0] and self.coords[1] <= pt[1]\
+ and self.coords[2] >= pt[0] and self.coords[3] >= pt[1]
+
+ def to_wkt(self) -> str:
+ """ Return the WKT representation of the Bbox. This
+ is a simple polygon with four points.
+ """
+ return 'POLYGON(({0} {1},{0} {3},{2} {3},{2} {1},{0} {1}))'\
+ .format(*self.coords)
+
+ @staticmethod
+ def from_wkb(wkb: Union[None, str, bytes]) -> 'Optional[Bbox]':
+ """ Create a Bbox from a bounding box polygon as returned by
+ the database. Returns `None` if the input value is None.
+ """
+ if wkb is None:
+ return None
+
+ if isinstance(wkb, str):
+ wkb = unhexlify(wkb)
+
+ if len(wkb) != 97:
+ raise ValueError("WKB must be a bounding box polygon")
+ if wkb.startswith(WKB_BBOX_HEADER_LE):
+ x1, y1, _, _, x2, y2 = unpack('<dddddd', wkb[17:65])
+ elif wkb.startswith(WKB_BBOX_HEADER_BE):
+ x1, y1, _, _, x2, y2 = unpack('>dddddd', wkb[17:65])
+ else:
+ raise ValueError("WKB has wrong header")
+
+ return Bbox(min(x1, x2), min(y1, y2), max(x1, x2), max(y1, y2))
+
+ @staticmethod
+ def from_point(pt: Point, buffer: float) -> 'Bbox':
+ """ Return a Bbox around the point with the buffer added to all sides.
+ """
+ return Bbox(pt[0] - buffer, pt[1] - buffer,
+ pt[0] + buffer, pt[1] + buffer)
+
+ @staticmethod
+ def from_param(inp: Any) -> 'Bbox':
+ """ Return a Bbox from an input parameter. The box may be
+ given as a Bbox, a string or a list or strings or integer.
+ Raises a UsageError if the format is incorrect.
+ """
+ if isinstance(inp, Bbox):
+ return inp
+
+ seq: Sequence[str]
+ if isinstance(inp, str):
+ seq = inp.split(',')
+ elif isinstance(inp, abc.Sequence):
+ seq = inp
+
+ if len(seq) != 4:
+ raise UsageError('Bounding box parameter needs 4 coordinates.')
+ try:
+ x1, y1, x2, y2 = filter(math.isfinite, map(float, seq))
+ except ValueError as exc:
+ raise UsageError('Bounding box parameter needs to be numbers.') from exc
+
+ x1 = min(180, max(-180, x1))
+ x2 = min(180, max(-180, x2))
+ y1 = min(90, max(-90, y1))
+ y2 = min(90, max(-90, y2))
+
+ if x1 == x2 or y1 == y2:
+ raise UsageError('Bounding box with invalid parameters.')
+
+ return Bbox(min(x1, x2), min(y1, y2), max(x1, x2), max(y1, y2))
+
+
+class GeometryFormat(enum.Flag):
+ """ All search functions support returning the full geometry of a place in
+ various formats. The internal geometry is converted by PostGIS to
+ the desired format and then returned as a string. It is possible to
+ request multiple formats at the same time.
+ """
+ NONE = 0
+ """ No geometry requested. Alias for a empty flag.
+ """
+ GEOJSON = enum.auto()
+ """
+ [GeoJSON](https://geojson.org/) format
+ """
+ KML = enum.auto()
+ """
+ [KML](https://en.wikipedia.org/wiki/Keyhole_Markup_Language) format
+ """
+ SVG = enum.auto()
+ """
+ [SVG](http://www.w3.org/TR/SVG/paths.html) format
+ """
+ TEXT = enum.auto()
+ """
+ [WKT](https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry) format
+ """
+
+
+class DataLayer(enum.Flag):
+ """ The `DataLayer` flag type defines the layers that can be selected
+ for reverse and forward search.
+ """
+ ADDRESS = enum.auto()
+ """ The address layer contains all places relevant for addresses:
+ fully qualified addresses with a house number (or a house name equivalent,
+ for some addresses) and places that can be part of an address like
+ roads, cities, states.
+ """
+ POI = enum.auto()
+ """ Layer for points of interest like shops, restaurants but also
+ recycling bins or postboxes.
+ """
+ RAILWAY = enum.auto()
+ """ Layer with railway features including tracks and other infrastructure.
+ Note that in Nominatim's standard configuration, only very few railway
+ features are imported into the database. Thus a custom configuration
+ is required to make full use of this layer.
+ """
+ NATURAL = enum.auto()
+ """ Layer with natural features like rivers, lakes and mountains.
+ """
+ MANMADE = enum.auto()
+ """ Layer with other human-made features and boundaries. This layer is
+ the catch-all and includes all features not covered by the other
+ layers. A typical example for this layer are national park boundaries.
+ """
+
+
+def format_country(cc: Any) -> List[str]:
+ """ Extract a list of country codes from the input which may be either
+ a string or list of strings. Filters out all values that are not
+ a two-letter string.
+ """
+ clist: Sequence[str]
+ if isinstance(cc, str):
+ clist = cc.split(',')
+ elif isinstance(cc, abc.Sequence):
+ clist = cc
+ else:
+ raise UsageError("Parameter 'country' needs to be a comma-separated list "
+ "or a Python list of strings.")
+
+ return [cc.lower() for cc in clist if isinstance(cc, str) and len(cc) == 2]
+
+
+def format_excluded(ids: Any) -> List[int]:
+ """ Extract a list of place ids from the input which may be either
+ a string or a list of strings or ints. Ignores empty value but
+ throws a UserError on anything that cannot be converted to int.
+ """
+ plist: Sequence[str]
+ if isinstance(ids, str):
+ plist = [s.strip() for s in ids.split(',')]
+ elif isinstance(ids, abc.Sequence):
+ plist = ids
+ else:
+ raise UsageError("Parameter 'excluded' needs to be a comma-separated list "
+ "or a Python list of numbers.")
+ if not all(isinstance(i, int) or
+ (isinstance(i, str) and (not i or i.isdigit())) for i in plist):
+ raise UsageError("Parameter 'excluded' only takes place IDs.")
+
+ return [int(id) for id in plist if id] or [0]
+
+
+def format_categories(categories: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
+ """ Extract a list of categories. Currently a noop.
+ """
+ return categories
+
+
+TParam = TypeVar('TParam', bound='LookupDetails')
+
+
+@dataclasses.dataclass
+class LookupDetails:
+ """ Collection of parameters that define which kind of details are
+ returned with a lookup or details result.
+ """
+ geometry_output: GeometryFormat = GeometryFormat.NONE
+ """ Add the full geometry of the place to the result. Multiple
+ formats may be selected. Note that geometries can become quite large.
+ """
+ address_details: bool = False
+ """ Get detailed information on the places that make up the address
+ for the result.
+ """
+ linked_places: bool = False
+ """ Get detailed information on the places that link to the result.
+ """
+ parented_places: bool = False
+ """ Get detailed information on all places that this place is a parent
+ for, i.e. all places for which it provides the address details.
+ Only POI places can have parents.
+ """
+ keywords: bool = False
+ """ Add information about the search terms used for this place.
+ """
+ geometry_simplification: float = 0.0
+ """ Simplification factor for a geometry in degrees WGS. A factor of
+ 0.0 means the original geometry is kept. The higher the value, the
+ more the geometry gets simplified.
+ """
+ locales: Locales = Locales()
+ """ Preferred languages for localization of results.
+ """
+
+ @classmethod
+ def from_kwargs(cls: Type[TParam], kwargs: Dict[str, Any]) -> TParam:
+ """ Load the data fields of the class from a dictionary.
+ Unknown entries in the dictionary are ignored, missing ones
+ get the default setting.
+
+ The function supports type checking and throws a UsageError
+ when the value does not fit.
+ """
+ def _check_field(v: Any, field: 'dataclasses.Field[Any]') -> Any:
+ if v is None:
+ return field.default_factory() \
+ if field.default_factory != dataclasses.MISSING \
+ else field.default
+ if field.metadata and 'transform' in field.metadata:
+ return field.metadata['transform'](v)
+ if not isinstance(v, field.type): # type: ignore[arg-type]
+ raise UsageError(f"Parameter '{field.name}' needs to be of {field.type!s}.")
+ return v
+
+ return cls(**{f.name: _check_field(kwargs[f.name], f)
+ for f in dataclasses.fields(cls) if f.name in kwargs})
+
+
+@dataclasses.dataclass
+class ReverseDetails(LookupDetails):
+ """ Collection of parameters for the reverse call.
+ """
+
+ max_rank: int = dataclasses.field(default=30,
+ metadata={'transform': lambda v: max(0, min(v, 30))})
+ """ Highest address rank to return.
+ """
+
+ layers: DataLayer = DataLayer.ADDRESS | DataLayer.POI
+ """ Filter which kind of data to include.
+ """
+
+
+@dataclasses.dataclass
+class SearchDetails(LookupDetails):
+ """ Collection of parameters for the search call.
+ """
+ max_results: int = 10
+ """ Maximum number of results to be returned. The actual number of results
+ may be less.
+ """
+
+ min_rank: int = dataclasses.field(default=0,
+ metadata={'transform': lambda v: max(0, min(v, 30))})
+ """ Lowest address rank to return.
+ """
+
+ max_rank: int = dataclasses.field(default=30,
+ metadata={'transform': lambda v: max(0, min(v, 30))})
+ """ Highest address rank to return.
+ """
+
+ layers: Optional[DataLayer] = dataclasses.field(default=None,
+ metadata={'transform': lambda r: r})
+ """ Filter which kind of data to include. When 'None' (the default) then
+ filtering by layers is disabled.
+ """
+
+ countries: List[str] = dataclasses.field(default_factory=list,
+ metadata={'transform': format_country})
+ """ Restrict search results to the given countries. An empty list (the
+ default) will disable this filter.
+ """
+
+ excluded: List[int] = dataclasses.field(default_factory=list,
+ metadata={'transform': format_excluded})
+ """ List of OSM objects to exclude from the results. Currently only
+ works when the internal place ID is given.
+ An empty list (the default) will disable this filter.
+ """
+
+ viewbox: Optional[Bbox] = dataclasses.field(default=None,
+ metadata={'transform': Bbox.from_param})
+ """ Focus the search on a given map area.
+ """
+
+ bounded_viewbox: bool = False
+ """ Use 'viewbox' as a filter and restrict results to places within the
+ given area.
+ """
+
+ near: Optional[Point] = dataclasses.field(default=None,
+ metadata={'transform': Point.from_param})
+ """ Order results by distance to the given point.
+ """
+
+ near_radius: Optional[float] = dataclasses.field(default=None,
+ metadata={'transform': lambda r: r})
+ """ Use near point as a filter and drop results outside the given
+ radius. Radius is given in degrees WSG84.
+ """
+
+ categories: List[Tuple[str, str]] = dataclasses.field(default_factory=list,
+ metadata={'transform': format_categories})
+ """ Restrict search to places with one of the given class/type categories.
+ An empty list (the default) will disable this filter.
+ """
+
+ viewbox_x2: Optional[Bbox] = None
+
+ def __post_init__(self) -> None:
+ if self.viewbox is not None:
+ xext = (self.viewbox.maxlon - self.viewbox.minlon)/2
+ yext = (self.viewbox.maxlat - self.viewbox.minlat)/2
+ self.viewbox_x2 = Bbox(self.viewbox.minlon - xext, self.viewbox.minlat - yext,
+ self.viewbox.maxlon + xext, self.viewbox.maxlat + yext)
+
+ def restrict_min_max_rank(self, new_min: int, new_max: int) -> None:
+ """ Change the min_rank and max_rank fields to respect the
+ given boundaries.
+ """
+ assert new_min <= new_max
+ self.min_rank = max(self.min_rank, new_min)
+ self.max_rank = min(self.max_rank, new_max)
+
+ def is_impossible(self) -> bool:
+ """ Check if the parameter configuration is contradictionary and
+ cannot yield any results.
+ """
+ return (self.min_rank > self.max_rank
+ or (self.bounded_viewbox
+ and self.viewbox is not None and self.near is not None
+ and self.viewbox.contains(self.near))
+ or (self.layers is not None and not self.layers)
+ or (self.max_rank <= 4 and
+ self.layers is not None and not self.layers & DataLayer.ADDRESS))
+
+ def layer_enabled(self, layer: DataLayer) -> bool:
+ """ Check if the given layer has been chosen. Also returns
+ true when layer restriction has been disabled completely.
+ """
+ return self.layers is None or bool(self.layers & layer)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Type definitions for typing annotations.
+
+Complex type definitions are moved here, to keep the source files readable.
+"""
+from typing import Union, TYPE_CHECKING
+
+# flake8: noqa
+
+# SQLAlchemy introduced generic types in version 2.0 making typing
+# incompatible with older versions. Add wrappers here so we don't have
+# to litter the code with bare-string types.
+
+if TYPE_CHECKING:
+ from typing import Any
+ import sqlalchemy as sa
+ import os
+ from typing_extensions import (TypeAlias as TypeAlias,
+ Protocol as Protocol)
+else:
+ TypeAlias = str
+ Protocol = object
+
+StrPath = Union[str, 'os.PathLike[str]']
+
+SaLambdaSelect: TypeAlias = 'Union[sa.Select[Any], sa.StatementLambdaElement]'
+SaSelect: TypeAlias = 'sa.Select[Any]'
+SaScalarSelect: TypeAlias = 'sa.ScalarSelect[Any]'
+SaRow: TypeAlias = 'sa.Row[Any]'
+SaColumn: TypeAlias = 'sa.ColumnElement[Any]'
+SaExpression: TypeAlias = 'sa.ColumnElement[bool]'
+SaLabel: TypeAlias = 'sa.Label[Any]'
+SaFromClause: TypeAlias = 'sa.FromClause'
+SaSelectable: TypeAlias = 'sa.Selectable'
+SaBind: TypeAlias = 'sa.BindParameter[Any]'
+SaDialect: TypeAlias = 'sa.Dialect'
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Streaming JSON encoder.
+"""
+from typing import Any, TypeVar, Optional, Callable
+import io
+try:
+ import ujson as json
+except ModuleNotFoundError:
+ import json # type: ignore[no-redef]
+
+T = TypeVar('T')
+
+
+class JsonWriter:
+ """ JSON encoder that renders the output directly into an output
+ stream. This is a very simple writer which produces JSON in a
+ compact as possible form.
+
+ The writer does not check for syntactic correctness. It is the
+ responsibility of the caller to call the write functions in an
+ order that produces correct JSON.
+
+ All functions return the writer object itself so that function
+ calls can be chained.
+ """
+
+ def __init__(self) -> None:
+ self.data = io.StringIO()
+ self.pending = ''
+
+ def __call__(self) -> str:
+ """ Return the rendered JSON content as a string.
+ The writer remains usable after calling this function.
+ """
+ if self.pending:
+ assert self.pending in (']', '}')
+ self.data.write(self.pending)
+ self.pending = ''
+ return self.data.getvalue()
+
+ def start_object(self) -> 'JsonWriter':
+ """ Write the open bracket of a JSON object.
+ """
+ if self.pending:
+ self.data.write(self.pending)
+ self.pending = '{'
+ return self
+
+ def end_object(self) -> 'JsonWriter':
+ """ Write the closing bracket of a JSON object.
+ """
+ assert self.pending in (',', '{', '')
+ if self.pending == '{':
+ self.data.write(self.pending)
+ self.pending = '}'
+ return self
+
+ def start_array(self) -> 'JsonWriter':
+ """ Write the opening bracket of a JSON array.
+ """
+ if self.pending:
+ self.data.write(self.pending)
+ self.pending = '['
+ return self
+
+ def end_array(self) -> 'JsonWriter':
+ """ Write the closing bracket of a JSON array.
+ """
+ assert self.pending in (',', '[', ']', ')', '')
+ if self.pending not in (',', ''):
+ self.data.write(self.pending)
+ self.pending = ']'
+ return self
+
+ def key(self, name: str) -> 'JsonWriter':
+ """ Write the key string of a JSON object.
+ """
+ assert self.pending
+ self.data.write(self.pending)
+ self.data.write(json.dumps(name, ensure_ascii=False))
+ self.pending = ':'
+ return self
+
+ def value(self, value: Any) -> 'JsonWriter':
+ """ Write out a value as JSON. The function uses the json.dumps()
+ function for encoding the JSON. Thus any value that can be
+ encoded by that function is permissible here.
+ """
+ return self.raw(json.dumps(value, ensure_ascii=False))
+
+ def float(self, value: float, precision: int) -> 'JsonWriter':
+ """ Write out a float value with the given precision.
+ """
+ return self.raw(f"{value:0.{precision}f}")
+
+ def next(self) -> 'JsonWriter':
+ """ Write out a delimiter comma between JSON object or array elements.
+ """
+ if self.pending:
+ self.data.write(self.pending)
+ self.pending = ','
+ return self
+
+ def raw(self, raw_json: str) -> 'JsonWriter':
+ """ Write out the given value as is. This function is useful if
+ a value is already available in JSON format.
+ """
+ if self.pending:
+ self.data.write(self.pending)
+ self.pending = ''
+ self.data.write(raw_json)
+ return self
+
+ def keyval(self, key: str, value: Any) -> 'JsonWriter':
+ """ Write out an object element with the given key and value.
+ This is a shortcut for calling 'key()', 'value()' and 'next()'.
+ """
+ self.key(key)
+ self.value(value)
+ return self.next()
+
+ def keyval_not_none(self, key: str, value: Optional[T],
+ transform: Optional[Callable[[T], Any]] = None) -> 'JsonWriter':
+ """ Write out an object element only if the value is not None.
+ If 'transform' is given, it must be a function that takes the
+ value type and returns a JSON encodable type. The transform
+ function will be called before the value is written out.
+ """
+ if value is not None:
+ self.key(key)
+ self.value(transform(value) if transform else value)
+ self.next()
+ return self
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of API version v1 (aka the legacy version).
+"""
+
+from .server_glue import get_routes as get_routes
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Hard-coded information about tag categories.
+
+These tables have been copied verbatim from the old PHP code. For future
+version a more flexible formatting is required.
+"""
+from typing import Tuple, Optional, Mapping, Union
+
+from ..results import ReverseResult, SearchResult
+from ..types import Bbox
+
+
+def get_label_tag(category: Tuple[str, str], extratags: Optional[Mapping[str, str]],
+ rank: int, country: Optional[str]) -> str:
+ """ Create a label tag for the given place that can be used as an XML name.
+ """
+ if rank < 26 and extratags and 'place' in extratags:
+ label = extratags['place']
+ elif rank < 26 and extratags and 'linked_place' in extratags:
+ label = extratags['linked_place']
+ elif category == ('boundary', 'administrative'):
+ label = ADMIN_LABELS.get((country or '', int(rank/2)))\
+ or ADMIN_LABELS.get(('', int(rank/2)))\
+ or 'Administrative'
+ elif category[1] == 'postal_code':
+ label = 'postcode'
+ elif rank < 26:
+ label = category[1] if category[1] != 'yes' else category[0]
+ elif rank < 28:
+ label = 'road'
+ elif (category[0] == 'place'
+ and category[1] in ('house_number', 'house_name', 'country_code')):
+ label = category[1]
+ else:
+ label = category[0]
+
+ return label.lower().replace(' ', '_')
+
+
+def bbox_from_result(result: Union[ReverseResult, SearchResult]) -> Bbox:
+ """ Compute a bounding box for the result. For ways and relations
+ a given boundingbox is used. For all other object, a box is computed
+ around the centroid according to dimensions derived from the
+ search rank.
+ """
+ if result.category == ('place', 'postcode') and result.bbox is None:
+ return Bbox.from_point(result.centroid,
+ 0.05 - 0.012 * (result.rank_search - 21))
+
+ if (result.osm_object and result.osm_object[0] == 'N') or result.bbox is None:
+
+ extent = NODE_EXTENT.get(result.category, 0.00005)
+ return Bbox.from_point(result.centroid, extent)
+
+ return result.bbox
+
+
+OSM_ATTRIBUTION = 'Data © OpenStreetMap contributors, ODbL 1.0. http://osm.org/copyright'
+
+
+OSM_TYPE_NAME = {
+ 'N': 'node',
+ 'W': 'way',
+ 'R': 'relation'
+}
+
+
+ADMIN_LABELS = {
+ ('', 1): 'Continent',
+ ('', 2): 'Country',
+ ('', 3): 'Region',
+ ('', 4): 'State',
+ ('', 5): 'State District',
+ ('', 6): 'County',
+ ('', 7): 'Municipality',
+ ('', 8): 'City',
+ ('', 9): 'City District',
+ ('', 10): 'Suburb',
+ ('', 11): 'Neighbourhood',
+ ('', 12): 'City Block',
+ ('no', 3): 'State',
+ ('no', 4): 'County',
+ ('se', 3): 'State',
+ ('se', 4): 'County'
+}
+
+
+ICONS = {
+ ('boundary', 'administrative'): 'poi_boundary_administrative',
+ ('place', 'city'): 'poi_place_city',
+ ('place', 'town'): 'poi_place_town',
+ ('place', 'village'): 'poi_place_village',
+ ('place', 'hamlet'): 'poi_place_village',
+ ('place', 'suburb'): 'poi_place_village',
+ ('place', 'locality'): 'poi_place_village',
+ ('place', 'airport'): 'transport_airport2',
+ ('aeroway', 'aerodrome'): 'transport_airport2',
+ ('railway', 'station'): 'transport_train_station2',
+ ('amenity', 'place_of_worship'): 'place_of_worship_unknown3',
+ ('amenity', 'pub'): 'food_pub',
+ ('amenity', 'bar'): 'food_bar',
+ ('amenity', 'university'): 'education_university',
+ ('tourism', 'museum'): 'tourist_museum',
+ ('amenity', 'arts_centre'): 'tourist_art_gallery2',
+ ('tourism', 'zoo'): 'tourist_zoo',
+ ('tourism', 'theme_park'): 'poi_point_of_interest',
+ ('tourism', 'attraction'): 'poi_point_of_interest',
+ ('leisure', 'golf_course'): 'sport_golf',
+ ('historic', 'castle'): 'tourist_castle',
+ ('amenity', 'hospital'): 'health_hospital',
+ ('amenity', 'school'): 'education_school',
+ ('amenity', 'theatre'): 'tourist_theatre',
+ ('amenity', 'library'): 'amenity_library',
+ ('amenity', 'fire_station'): 'amenity_firestation3',
+ ('amenity', 'police'): 'amenity_police2',
+ ('amenity', 'bank'): 'money_bank2',
+ ('amenity', 'post_office'): 'amenity_post_office',
+ ('tourism', 'hotel'): 'accommodation_hotel2',
+ ('amenity', 'cinema'): 'tourist_cinema',
+ ('tourism', 'artwork'): 'tourist_art_gallery2',
+ ('historic', 'archaeological_site'): 'tourist_archaeological2',
+ ('amenity', 'doctors'): 'health_doctors',
+ ('leisure', 'sports_centre'): 'sport_leisure_centre',
+ ('leisure', 'swimming_pool'): 'sport_swimming_outdoor',
+ ('shop', 'supermarket'): 'shopping_supermarket',
+ ('shop', 'convenience'): 'shopping_convenience',
+ ('amenity', 'restaurant'): 'food_restaurant',
+ ('amenity', 'fast_food'): 'food_fastfood',
+ ('amenity', 'cafe'): 'food_cafe',
+ ('tourism', 'guest_house'): 'accommodation_bed_and_breakfast',
+ ('amenity', 'pharmacy'): 'health_pharmacy_dispensing',
+ ('amenity', 'fuel'): 'transport_fuel',
+ ('natural', 'peak'): 'poi_peak',
+ ('natural', 'wood'): 'landuse_coniferous_and_deciduous',
+ ('shop', 'bicycle'): 'shopping_bicycle',
+ ('shop', 'clothes'): 'shopping_clothes',
+ ('shop', 'hairdresser'): 'shopping_hairdresser',
+ ('shop', 'doityourself'): 'shopping_diy',
+ ('shop', 'estate_agent'): 'shopping_estateagent2',
+ ('shop', 'car'): 'shopping_car',
+ ('shop', 'garden_centre'): 'shopping_garden_centre',
+ ('shop', 'car_repair'): 'shopping_car_repair',
+ ('shop', 'bakery'): 'shopping_bakery',
+ ('shop', 'butcher'): 'shopping_butcher',
+ ('shop', 'apparel'): 'shopping_clothes',
+ ('shop', 'laundry'): 'shopping_laundrette',
+ ('shop', 'beverages'): 'shopping_alcohol',
+ ('shop', 'alcohol'): 'shopping_alcohol',
+ ('shop', 'optician'): 'health_opticians',
+ ('shop', 'chemist'): 'health_pharmacy',
+ ('shop', 'gallery'): 'tourist_art_gallery2',
+ ('shop', 'jewelry'): 'shopping_jewelry',
+ ('tourism', 'information'): 'amenity_information',
+ ('historic', 'ruins'): 'tourist_ruin',
+ ('amenity', 'college'): 'education_school',
+ ('historic', 'monument'): 'tourist_monument',
+ ('historic', 'memorial'): 'tourist_monument',
+ ('historic', 'mine'): 'poi_mine',
+ ('tourism', 'caravan_site'): 'accommodation_caravan_park',
+ ('amenity', 'bus_station'): 'transport_bus_station',
+ ('amenity', 'atm'): 'money_atm2',
+ ('tourism', 'viewpoint'): 'tourist_view_point',
+ ('tourism', 'guesthouse'): 'accommodation_bed_and_breakfast',
+ ('railway', 'tram'): 'transport_tram_stop',
+ ('amenity', 'courthouse'): 'amenity_court',
+ ('amenity', 'recycling'): 'amenity_recycling',
+ ('amenity', 'dentist'): 'health_dentist',
+ ('natural', 'beach'): 'tourist_beach',
+ ('railway', 'tram_stop'): 'transport_tram_stop',
+ ('amenity', 'prison'): 'amenity_prison',
+ ('highway', 'bus_stop'): 'transport_bus_stop2'
+}
+
+NODE_EXTENT = {
+ ('place', 'continent'): 25,
+ ('place', 'country'): 7,
+ ('place', 'state'): 2.6,
+ ('place', 'province'): 2.6,
+ ('place', 'region'): 1.0,
+ ('place', 'county'): 0.7,
+ ('place', 'city'): 0.16,
+ ('place', 'municipality'): 0.16,
+ ('place', 'island'): 0.32,
+ ('place', 'postcode'): 0.16,
+ ('place', 'town'): 0.04,
+ ('place', 'village'): 0.02,
+ ('place', 'hamlet'): 0.02,
+ ('place', 'district'): 0.02,
+ ('place', 'borough'): 0.02,
+ ('place', 'suburb'): 0.02,
+ ('place', 'locality'): 0.01,
+ ('place', 'neighbourhood'): 0.01,
+ ('place', 'quarter'): 0.01,
+ ('place', 'city_block'): 0.01,
+ ('landuse', 'farm'): 0.01,
+ ('place', 'farm'): 0.01,
+ ('place', 'airport'): 0.015,
+ ('aeroway', 'aerodrome'): 0.015,
+ ('railway', 'station'): 0.005
+}
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Output formatters for API version v1.
+"""
+from typing import List, Dict, Mapping, Any
+import collections
+import datetime as dt
+
+from ..utils.json_writer import JsonWriter
+from ..status import StatusResult
+from ..results import DetailedResult, ReverseResults, SearchResults, \
+ AddressLines, AddressLine
+from ..localization import Locales
+from ..result_formatting import FormatDispatcher
+from .classtypes import ICONS
+from . import format_json, format_xml
+from .. import logging as loglib
+from ..server import content_types as ct
+
+
+class RawDataList(List[Dict[str, Any]]):
+ """ Data type for formatting raw data lists 'as is' in json.
+ """
+
+
+dispatch = FormatDispatcher({'text': ct.CONTENT_TEXT,
+ 'xml': ct.CONTENT_XML,
+ 'debug': ct.CONTENT_HTML})
+
+
+@dispatch.error_format_func
+def _format_error(content_type: str, msg: str, status: int) -> str:
+ if content_type == ct.CONTENT_XML:
+ return f"""<?xml version="1.0" encoding="UTF-8" ?>
+ <error>
+ <code>{status}</code>
+ <message>{msg}</message>
+ </error>
+ """
+
+ if content_type == ct.CONTENT_JSON:
+ return f"""{{"error":{{"code":{status},"message":"{msg}"}}}}"""
+
+ if content_type == ct.CONTENT_HTML:
+ loglib.log().section('Execution error')
+ loglib.log().var_dump('Status', status)
+ loglib.log().var_dump('Message', msg)
+ return loglib.get_and_disable()
+
+ return f"ERROR {status}: {msg}"
+
+
+@dispatch.format_func(StatusResult, 'text')
+def _format_status_text(result: StatusResult, _: Mapping[str, Any]) -> str:
+ if result.status:
+ return f"ERROR: {result.message}"
+
+ return 'OK'
+
+
+@dispatch.format_func(StatusResult, 'json')
+def _format_status_json(result: StatusResult, _: Mapping[str, Any]) -> str:
+ out = JsonWriter()
+
+ out.start_object()\
+ .keyval('status', result.status)\
+ .keyval('message', result.message)\
+ .keyval_not_none('data_updated', result.data_updated,
+ lambda v: v.isoformat())\
+ .keyval('software_version', str(result.software_version))\
+ .keyval_not_none('database_version', result.database_version, str)\
+ .end_object()
+
+ return out()
+
+
+def _add_address_row(writer: JsonWriter, row: AddressLine,
+ locales: Locales) -> None:
+ writer.start_object()\
+ .keyval('localname', locales.display_name(row.names))\
+ .keyval_not_none('place_id', row.place_id)
+
+ if row.osm_object is not None:
+ writer.keyval('osm_id', row.osm_object[1])\
+ .keyval('osm_type', row.osm_object[0])
+
+ if row.extratags:
+ writer.keyval_not_none('place_type', row.extratags.get('place_type'))
+
+ writer.keyval('class', row.category[0])\
+ .keyval('type', row.category[1])\
+ .keyval_not_none('admin_level', row.admin_level)\
+ .keyval('rank_address', row.rank_address)\
+ .keyval('distance', row.distance)\
+ .keyval('isaddress', row.isaddress)\
+ .end_object()
+
+
+def _add_address_rows(writer: JsonWriter, section: str, rows: AddressLines,
+ locales: Locales) -> None:
+ writer.key(section).start_array()
+ for row in rows:
+ _add_address_row(writer, row, locales)
+ writer.next()
+ writer.end_array().next()
+
+
+def _add_parent_rows_grouped(writer: JsonWriter, rows: AddressLines,
+ locales: Locales) -> None:
+ # group by category type
+ data = collections.defaultdict(list)
+ for row in rows:
+ sub = JsonWriter()
+ _add_address_row(sub, row, locales)
+ data[row.category[1]].append(sub())
+
+ writer.key('hierarchy').start_object()
+ for group, grouped in data.items():
+ writer.key(group).start_array()
+ grouped.sort() # sorts alphabetically by local name
+ for line in grouped:
+ writer.raw(line).next()
+ writer.end_array().next()
+
+ writer.end_object().next()
+
+
+@dispatch.format_func(DetailedResult, 'json')
+def _format_details_json(result: DetailedResult, options: Mapping[str, Any]) -> str:
+ locales = options.get('locales', Locales())
+ geom = result.geometry.get('geojson')
+ centroid = result.centroid.to_geojson()
+
+ out = JsonWriter()
+ out.start_object()\
+ .keyval_not_none('place_id', result.place_id)\
+ .keyval_not_none('parent_place_id', result.parent_place_id)
+
+ if result.osm_object is not None:
+ out.keyval('osm_type', result.osm_object[0])\
+ .keyval('osm_id', result.osm_object[1])
+
+ out.keyval('category', result.category[0])\
+ .keyval('type', result.category[1])\
+ .keyval('admin_level', result.admin_level)\
+ .keyval('localname', result.locale_name or '')\
+ .keyval('names', result.names or {})\
+ .keyval('addresstags', result.address or {})\
+ .keyval_not_none('housenumber', result.housenumber)\
+ .keyval_not_none('calculated_postcode', result.postcode)\
+ .keyval_not_none('country_code', result.country_code)\
+ .keyval_not_none('indexed_date', result.indexed_date, lambda v: v.isoformat())\
+ .keyval_not_none('importance', result.importance)\
+ .keyval('calculated_importance', result.calculated_importance())\
+ .keyval('extratags', result.extratags or {})\
+ .keyval_not_none('calculated_wikipedia', result.wikipedia)\
+ .keyval('rank_address', result.rank_address)\
+ .keyval('rank_search', result.rank_search)\
+ .keyval('isarea', 'Polygon' in (geom or result.geometry.get('type') or ''))\
+ .key('centroid').raw(centroid).next()\
+ .key('geometry').raw(geom or centroid).next()
+
+ if options.get('icon_base_url', None):
+ icon = ICONS.get(result.category)
+ if icon:
+ out.keyval('icon', f"{options['icon_base_url']}/{icon}.p.20.png")
+
+ if result.address_rows is not None:
+ _add_address_rows(out, 'address', result.address_rows, locales)
+
+ if result.linked_rows:
+ _add_address_rows(out, 'linked_places', result.linked_rows, locales)
+
+ if result.name_keywords is not None or result.address_keywords is not None:
+ out.key('keywords').start_object()
+
+ for sec, klist in (('name', result.name_keywords), ('address', result.address_keywords)):
+ out.key(sec).start_array()
+ for word in (klist or []):
+ out.start_object()\
+ .keyval('id', word.word_id)\
+ .keyval('token', word.word_token)\
+ .end_object().next()
+ out.end_array().next()
+
+ out.end_object().next()
+
+ if result.parented_rows is not None:
+ if options.get('group_hierarchy', False):
+ _add_parent_rows_grouped(out, result.parented_rows, locales)
+ else:
+ _add_address_rows(out, 'hierarchy', result.parented_rows, locales)
+
+ out.end_object()
+
+ return out()
+
+
+@dispatch.format_func(ReverseResults, 'xml')
+def _format_reverse_xml(results: ReverseResults, options: Mapping[str, Any]) -> str:
+ return format_xml.format_base_xml(results,
+ options, True, 'reversegeocode',
+ {'querystring': options.get('query', '')})
+
+
+@dispatch.format_func(ReverseResults, 'geojson')
+def _format_reverse_geojson(results: ReverseResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_geojson(results, options, True)
+
+
+@dispatch.format_func(ReverseResults, 'geocodejson')
+def _format_reverse_geocodejson(results: ReverseResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_geocodejson(results, options, True)
+
+
+@dispatch.format_func(ReverseResults, 'json')
+def _format_reverse_json(results: ReverseResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_json(results, options, True,
+ class_label='class')
+
+
+@dispatch.format_func(ReverseResults, 'jsonv2')
+def _format_reverse_jsonv2(results: ReverseResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_json(results, options, True,
+ class_label='category')
+
+
+@dispatch.format_func(SearchResults, 'xml')
+def _format_search_xml(results: SearchResults, options: Mapping[str, Any]) -> str:
+ extra = {'querystring': options.get('query', '')}
+ for attr in ('more_url', 'exclude_place_ids', 'viewbox'):
+ if options.get(attr):
+ extra[attr] = options[attr]
+ return format_xml.format_base_xml(results, options, False, 'searchresults',
+ extra)
+
+
+@dispatch.format_func(SearchResults, 'geojson')
+def _format_search_geojson(results: SearchResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_geojson(results, options, False)
+
+
+@dispatch.format_func(SearchResults, 'geocodejson')
+def _format_search_geocodejson(results: SearchResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_geocodejson(results, options, False)
+
+
+@dispatch.format_func(SearchResults, 'json')
+def _format_search_json(results: SearchResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_json(results, options, False,
+ class_label='class')
+
+
+@dispatch.format_func(SearchResults, 'jsonv2')
+def _format_search_jsonv2(results: SearchResults,
+ options: Mapping[str, Any]) -> str:
+ return format_json.format_base_json(results, options, False,
+ class_label='category')
+
+
+@dispatch.format_func(RawDataList, 'json')
+def _format_raw_data_json(results: RawDataList, _: Mapping[str, Any]) -> str:
+ out = JsonWriter()
+ out.start_array()
+ for res in results:
+ out.start_object()
+ for k, v in res.items():
+ if isinstance(v, dt.datetime):
+ out.keyval(k, v.isoformat(sep=' ', timespec='seconds'))
+ else:
+ out.keyval(k, v)
+ out.end_object().next()
+
+ out.end_array()
+
+ return out()
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper functions for output of results in json formats.
+"""
+from typing import Mapping, Any, Optional, Tuple, Union
+
+from ..utils.json_writer import JsonWriter
+from ..results import AddressLines, ReverseResults, SearchResults
+from . import classtypes as cl
+
+
+def _write_osm_id(out: JsonWriter, osm_object: Optional[Tuple[str, int]]) -> None:
+ if osm_object is not None:
+ out.keyval_not_none('osm_type', cl.OSM_TYPE_NAME.get(osm_object[0], None))\
+ .keyval('osm_id', osm_object[1])
+
+
+def _write_typed_address(out: JsonWriter, address: Optional[AddressLines],
+ country_code: Optional[str]) -> None:
+ parts = {}
+ for line in (address or []):
+ if line.isaddress:
+ if line.local_name:
+ label = cl.get_label_tag(line.category, line.extratags,
+ line.rank_address, country_code)
+ if label not in parts:
+ parts[label] = line.local_name
+ if line.names and 'ISO3166-2' in line.names and line.admin_level:
+ parts[f"ISO3166-2-lvl{line.admin_level}"] = line.names['ISO3166-2']
+
+ for k, v in parts.items():
+ out.keyval(k, v)
+
+ if country_code:
+ out.keyval('country_code', country_code)
+
+
+def _write_geocodejson_address(out: JsonWriter,
+ address: Optional[AddressLines],
+ obj_place_id: Optional[int],
+ country_code: Optional[str]) -> None:
+ extra = {}
+ for line in (address or []):
+ if line.isaddress and line.local_name:
+ if line.category[1] in ('postcode', 'postal_code'):
+ out.keyval('postcode', line.local_name)
+ elif line.category[1] == 'house_number':
+ out.keyval('housenumber', line.local_name)
+ elif ((obj_place_id is None or obj_place_id != line.place_id)
+ and line.rank_address >= 4 and line.rank_address < 28):
+ rank_name = GEOCODEJSON_RANKS[line.rank_address]
+ if rank_name not in extra:
+ extra[rank_name] = line.local_name
+
+ for k, v in extra.items():
+ out.keyval(k, v)
+
+ if country_code:
+ out.keyval('country_code', country_code)
+
+
+def format_base_json(results: Union[ReverseResults, SearchResults],
+ options: Mapping[str, Any], simple: bool,
+ class_label: str) -> str:
+ """ Return the result list as a simple json string in custom Nominatim format.
+ """
+ out = JsonWriter()
+
+ if simple:
+ if not results:
+ return '{"error":"Unable to geocode"}'
+ else:
+ out.start_array()
+
+ for result in results:
+ out.start_object()\
+ .keyval_not_none('place_id', result.place_id)\
+ .keyval('licence', cl.OSM_ATTRIBUTION)\
+
+ _write_osm_id(out, result.osm_object)
+
+ out.keyval('lat', f"{result.centroid.lat}")\
+ .keyval('lon', f"{result.centroid.lon}")\
+ .keyval(class_label, result.category[0])\
+ .keyval('type', result.category[1])\
+ .keyval('place_rank', result.rank_search)\
+ .keyval('importance', result.calculated_importance())\
+ .keyval('addresstype', cl.get_label_tag(result.category, result.extratags,
+ result.rank_address,
+ result.country_code))\
+ .keyval('name', result.locale_name or '')\
+ .keyval('display_name', result.display_name or '')
+
+ if options.get('icon_base_url', None):
+ icon = cl.ICONS.get(result.category)
+ if icon:
+ out.keyval('icon', f"{options['icon_base_url']}/{icon}.p.20.png")
+
+ if options.get('addressdetails', False):
+ out.key('address').start_object()
+ _write_typed_address(out, result.address_rows, result.country_code)
+ out.end_object().next()
+
+ if options.get('extratags', False):
+ out.keyval('extratags', result.extratags)
+
+ if options.get('namedetails', False):
+ out.keyval('namedetails', result.names)
+
+ bbox = cl.bbox_from_result(result)
+ out.key('boundingbox').start_array()\
+ .value(f"{bbox.minlat:0.7f}").next()\
+ .value(f"{bbox.maxlat:0.7f}").next()\
+ .value(f"{bbox.minlon:0.7f}").next()\
+ .value(f"{bbox.maxlon:0.7f}").next()\
+ .end_array().next()
+
+ if result.geometry:
+ for key in ('text', 'kml'):
+ out.keyval_not_none('geo' + key, result.geometry.get(key))
+ if 'geojson' in result.geometry:
+ out.key('geojson').raw(result.geometry['geojson']).next()
+ out.keyval_not_none('svg', result.geometry.get('svg'))
+
+ out.end_object()
+
+ if simple:
+ return out()
+
+ out.next()
+
+ out.end_array()
+
+ return out()
+
+
+def format_base_geojson(results: Union[ReverseResults, SearchResults],
+ options: Mapping[str, Any],
+ simple: bool) -> str:
+ """ Return the result list as a geojson string.
+ """
+ if not results and simple:
+ return '{"error":"Unable to geocode"}'
+
+ out = JsonWriter()
+
+ out.start_object()\
+ .keyval('type', 'FeatureCollection')\
+ .keyval('licence', cl.OSM_ATTRIBUTION)\
+ .key('features').start_array()
+
+ for result in results:
+ out.start_object()\
+ .keyval('type', 'Feature')\
+ .key('properties').start_object()
+
+ out.keyval_not_none('place_id', result.place_id)
+
+ _write_osm_id(out, result.osm_object)
+
+ out.keyval('place_rank', result.rank_search)\
+ .keyval('category', result.category[0])\
+ .keyval('type', result.category[1])\
+ .keyval('importance', result.calculated_importance())\
+ .keyval('addresstype', cl.get_label_tag(result.category, result.extratags,
+ result.rank_address,
+ result.country_code))\
+ .keyval('name', result.locale_name or '')\
+ .keyval('display_name', result.display_name or '')
+
+ if options.get('addressdetails', False):
+ out.key('address').start_object()
+ _write_typed_address(out, result.address_rows, result.country_code)
+ out.end_object().next()
+
+ if options.get('extratags', False):
+ out.keyval('extratags', result.extratags)
+
+ if options.get('namedetails', False):
+ out.keyval('namedetails', result.names)
+
+ out.end_object().next() # properties
+
+ out.key('bbox').start_array()
+ for coord in cl.bbox_from_result(result).coords:
+ out.float(coord, 7).next()
+ out.end_array().next()
+
+ out.key('geometry').raw(result.geometry.get('geojson')
+ or result.centroid.to_geojson()).next()
+
+ out.end_object().next()
+
+ out.end_array().next().end_object()
+
+ return out()
+
+
+def format_base_geocodejson(results: Union[ReverseResults, SearchResults],
+ options: Mapping[str, Any], simple: bool) -> str:
+ """ Return the result list as a geocodejson string.
+ """
+ if not results and simple:
+ return '{"error":"Unable to geocode"}'
+
+ out = JsonWriter()
+
+ out.start_object()\
+ .keyval('type', 'FeatureCollection')\
+ .key('geocoding').start_object()\
+ .keyval('version', '0.1.0')\
+ .keyval('attribution', cl.OSM_ATTRIBUTION)\
+ .keyval('licence', 'ODbL')\
+ .keyval_not_none('query', options.get('query'))\
+ .end_object().next()\
+ .key('features').start_array()
+
+ for result in results:
+ out.start_object()\
+ .keyval('type', 'Feature')\
+ .key('properties').start_object()\
+ .key('geocoding').start_object()
+
+ out.keyval_not_none('place_id', result.place_id)
+
+ _write_osm_id(out, result.osm_object)
+
+ out.keyval('osm_key', result.category[0])\
+ .keyval('osm_value', result.category[1])\
+ .keyval('type', GEOCODEJSON_RANKS[max(3, min(28, result.rank_address))])\
+ .keyval_not_none('accuracy', getattr(result, 'distance', None), transform=int)\
+ .keyval('label', result.display_name or '')\
+ .keyval_not_none('name', result.locale_name or None)\
+
+ if options.get('addressdetails', False):
+ _write_geocodejson_address(out, result.address_rows, result.place_id,
+ result.country_code)
+
+ out.key('admin').start_object()
+ if result.address_rows:
+ for line in result.address_rows:
+ if line.isaddress and (line.admin_level or 15) < 15 and line.local_name \
+ and line.category[0] == 'boundary' and line.category[1] == 'administrative':
+ out.keyval(f"level{line.admin_level}", line.local_name)
+ out.end_object().next()
+
+ if options.get('extratags', False):
+ out.keyval('extra', result.extratags)
+
+ out.end_object().next().end_object().next()
+
+ out.key('geometry').raw(result.geometry.get('geojson')
+ or result.centroid.to_geojson()).next()
+
+ out.end_object().next()
+
+ out.end_array().next().end_object()
+
+ return out()
+
+
+GEOCODEJSON_RANKS = {
+ 3: 'locality',
+ 4: 'country',
+ 5: 'state', 6: 'state', 7: 'state', 8: 'state', 9: 'state',
+ 10: 'county', 11: 'county', 12: 'county',
+ 13: 'city', 14: 'city', 15: 'city', 16: 'city',
+ 17: 'district', 18: 'district', 19: 'district', 20: 'district', 21: 'district',
+ 22: 'locality', 23: 'locality', 24: 'locality',
+ 25: 'street', 26: 'street', 27: 'street', 28: 'house'}
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper functions for output of results in XML format.
+"""
+from typing import Mapping, Any, Optional, Union
+import datetime as dt
+import xml.etree.ElementTree as ET
+
+from ..results import AddressLines, ReverseResult, ReverseResults, \
+ SearchResult, SearchResults
+from . import classtypes as cl
+
+
+def _write_xml_address(root: ET.Element, address: AddressLines,
+ country_code: Optional[str]) -> None:
+ parts = {}
+ for line in address:
+ if line.isaddress:
+ if line.local_name:
+ label = cl.get_label_tag(line.category, line.extratags,
+ line.rank_address, country_code)
+ if label not in parts:
+ parts[label] = line.local_name
+ if line.names and 'ISO3166-2' in line.names and line.admin_level:
+ parts[f"ISO3166-2-lvl{line.admin_level}"] = line.names['ISO3166-2']
+
+ for k, v in parts.items():
+ ET.SubElement(root, k).text = v
+
+ if country_code:
+ ET.SubElement(root, 'country_code').text = country_code
+
+
+def _create_base_entry(result: Union[ReverseResult, SearchResult],
+ root: ET.Element, simple: bool) -> ET.Element:
+ place = ET.SubElement(root, 'result' if simple else 'place')
+ if result.place_id is not None:
+ place.set('place_id', str(result.place_id))
+ if result.osm_object:
+ osm_type = cl.OSM_TYPE_NAME.get(result.osm_object[0], None)
+ if osm_type is not None:
+ place.set('osm_type', osm_type)
+ place.set('osm_id', str(result.osm_object[1]))
+ if result.names and 'ref' in result.names:
+ place.set('ref', result.names['ref'])
+ elif result.locale_name:
+ # bug reproduced from PHP
+ place.set('ref', result.locale_name)
+ place.set('lat', f"{result.centroid.lat:.7f}")
+ place.set('lon', f"{result.centroid.lon:.7f}")
+
+ bbox = cl.bbox_from_result(result)
+ place.set('boundingbox',
+ f"{bbox.minlat:.7f},{bbox.maxlat:.7f},{bbox.minlon:.7f},{bbox.maxlon:.7f}")
+
+ place.set('place_rank', str(result.rank_search))
+ place.set('address_rank', str(result.rank_address))
+
+ if result.geometry:
+ for key in ('text', 'svg'):
+ if key in result.geometry:
+ place.set('geo' + key, result.geometry[key])
+ if 'kml' in result.geometry:
+ ET.SubElement(root if simple else place, 'geokml')\
+ .append(ET.fromstring(result.geometry['kml']))
+ if 'geojson' in result.geometry:
+ place.set('geojson', result.geometry['geojson'])
+
+ if simple:
+ place.text = result.display_name or ''
+ else:
+ place.set('display_name', result.display_name or '')
+ place.set('class', result.category[0])
+ place.set('type', result.category[1])
+ place.set('importance', str(result.calculated_importance()))
+
+ return place
+
+
+def format_base_xml(results: Union[ReverseResults, SearchResults],
+ options: Mapping[str, Any],
+ simple: bool, xml_root_tag: str,
+ xml_extra_info: Mapping[str, str]) -> str:
+ """ Format the result into an XML response. With 'simple' exactly one
+ result will be output, otherwise a list.
+ """
+ root = ET.Element(xml_root_tag)
+ root.set('timestamp', dt.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S +00:00'))
+ root.set('attribution', cl.OSM_ATTRIBUTION)
+ for k, v in xml_extra_info.items():
+ root.set(k, v)
+
+ if simple and not results:
+ ET.SubElement(root, 'error').text = 'Unable to geocode'
+
+ for result in results:
+ place = _create_base_entry(result, root, simple)
+
+ if not simple and options.get('icon_base_url', None):
+ icon = cl.ICONS.get(result.category)
+ if icon:
+ place.set('icon', icon)
+
+ if options.get('addressdetails', False) and result.address_rows:
+ _write_xml_address(ET.SubElement(root, 'addressparts') if simple else place,
+ result.address_rows, result.country_code)
+
+ if options.get('extratags', False):
+ eroot = ET.SubElement(root if simple else place, 'extratags')
+ if result.extratags:
+ for k, v in result.extratags.items():
+ ET.SubElement(eroot, 'tag', attrib={'key': k, 'value': v})
+
+ if options.get('namedetails', False):
+ eroot = ET.SubElement(root if simple else place, 'namedetails')
+ if result.names:
+ for k, v in result.names.items():
+ ET.SubElement(eroot, 'name', attrib={'desc': k}).text = v
+
+ return '<?xml version="1.0" encoding="UTF-8" ?>\n' + ET.tostring(root, encoding='unicode')
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper function for parsing parameters and and outputting data
+specifically for the v1 version of the API.
+"""
+from typing import Tuple, Optional, Any, Dict, Iterable
+from itertools import chain
+import re
+
+from ..results import SearchResult, SearchResults, SourceTable
+from ..types import SearchDetails, GeometryFormat
+
+
+REVERSE_MAX_RANKS = [2, 2, 2, # 0-2 Continent/Sea
+ 4, 4, # 3-4 Country
+ 8, # 5 State
+ 10, 10, # 6-7 Region
+ 12, 12, # 8-9 County
+ 16, 17, # 10-11 City
+ 18, # 12 Town
+ 19, # 13 Village/Suburb
+ 22, # 14 Hamlet/Neighbourhood
+ 25, # 15 Localities
+ 26, # 16 Major Streets
+ 27, # 17 Minor Streets
+ 30 # 18 Building
+ ]
+
+
+def zoom_to_rank(zoom: int) -> int:
+ """ Convert a zoom parameter into a rank according to the v1 API spec.
+ """
+ return REVERSE_MAX_RANKS[max(0, min(18, zoom))]
+
+
+FEATURE_TYPE_TO_RANK: Dict[Optional[str], Tuple[int, int]] = {
+ 'country': (4, 4),
+ 'state': (8, 8),
+ 'city': (14, 16),
+ 'settlement': (8, 20)
+}
+
+
+def feature_type_to_rank(feature_type: Optional[str]) -> Tuple[int, int]:
+ """ Convert a feature type parameter to a tuple of
+ feature type name, minimum rank and maximum rank.
+ """
+ return FEATURE_TYPE_TO_RANK.get(feature_type, (0, 30))
+
+
+def extend_query_parts(queryparts: Dict[str, Any], details: Dict[str, Any],
+ feature_type: Optional[str],
+ namedetails: bool, extratags: bool,
+ excluded: Iterable[str]) -> None:
+ """ Add parameters from details dictionary to the query parts
+ dictionary which is suitable as URL parameter dictionary.
+ """
+ parsed = SearchDetails.from_kwargs(details)
+ if parsed.geometry_output != GeometryFormat.NONE:
+ if GeometryFormat.GEOJSON & parsed.geometry_output:
+ queryparts['polygon_geojson'] = '1'
+ if GeometryFormat.KML & parsed.geometry_output:
+ queryparts['polygon_kml'] = '1'
+ if GeometryFormat.SVG & parsed.geometry_output:
+ queryparts['polygon_svg'] = '1'
+ if GeometryFormat.TEXT & parsed.geometry_output:
+ queryparts['polygon_text'] = '1'
+ if parsed.address_details:
+ queryparts['addressdetails'] = '1'
+ if namedetails:
+ queryparts['namedetails'] = '1'
+ if extratags:
+ queryparts['extratags'] = '1'
+ if parsed.geometry_simplification > 0.0:
+ queryparts['polygon_threshold'] = f"{parsed.geometry_simplification:.6g}"
+ if parsed.max_results != 10:
+ queryparts['limit'] = str(parsed.max_results)
+ if parsed.countries:
+ queryparts['countrycodes'] = ','.join(parsed.countries)
+ queryparts['exclude_place_ids'] = \
+ ','.join(chain(excluded, map(str, (e for e in parsed.excluded if e > 0))))
+ if parsed.viewbox:
+ queryparts['viewbox'] = ','.join(f"{c:.7g}" for c in parsed.viewbox.coords)
+ if parsed.bounded_viewbox:
+ queryparts['bounded'] = '1'
+ if not details['dedupe']:
+ queryparts['dedupe'] = '0'
+ if feature_type in FEATURE_TYPE_TO_RANK:
+ queryparts['featureType'] = feature_type
+
+
+def deduplicate_results(results: SearchResults, max_results: int) -> SearchResults:
+ """ Remove results that look like duplicates.
+
+ Two results are considered the same if they have the same OSM ID
+ or if they have the same category, display name and rank.
+ """
+ osm_ids_done = set()
+ classification_done = set()
+ deduped = SearchResults()
+ for result in results:
+ if result.source_table == SourceTable.POSTCODE:
+ assert result.names and 'ref' in result.names
+ if any(_is_postcode_relation_for(r, result.names['ref']) for r in results):
+ continue
+ if result.source_table == SourceTable.PLACEX:
+ classification = (result.osm_object[0] if result.osm_object else None,
+ result.category,
+ result.display_name,
+ result.rank_address)
+ if result.osm_object not in osm_ids_done \
+ and classification not in classification_done:
+ deduped.append(result)
+ osm_ids_done.add(result.osm_object)
+ classification_done.add(classification)
+ else:
+ deduped.append(result)
+ if len(deduped) >= max_results:
+ break
+
+ return deduped
+
+
+def _is_postcode_relation_for(result: SearchResult, postcode: str) -> bool:
+ return result.source_table == SourceTable.PLACEX \
+ and result.osm_object is not None \
+ and result.osm_object[0] == 'R' \
+ and result.category == ('boundary', 'postal_code') \
+ and result.names is not None \
+ and result.names.get('ref') == postcode
+
+
+def _deg(axis: str) -> str:
+ return f"(?P<{axis}_deg>\\d+\\.\\d+)°?"
+
+
+def _deg_min(axis: str) -> str:
+ return f"(?P<{axis}_deg>\\d+)[°\\s]+(?P<{axis}_min>[\\d.]+)[′']*"
+
+
+def _deg_min_sec(axis: str) -> str:
+ return f"(?P<{axis}_deg>\\d+)[°\\s]+(?P<{axis}_min>\\d+)[′'\\s]+(?P<{axis}_sec>[\\d.]+)[\"″]*"
+
+
+COORD_REGEX = [re.compile(r'(?:(?P<pre>.*?)\s+)??' + r + r'(?:\s+(?P<post>.*))?') for r in (
+ r"(?P<ns>[NS])\s*" + _deg('lat') + r"[\s,]+" + r"(?P<ew>[EW])\s*" + _deg('lon'),
+ _deg('lat') + r"\s*(?P<ns>[NS])[\s,]+" + _deg('lon') + r"\s*(?P<ew>[EW])",
+ r"(?P<ns>[NS])\s*" + _deg_min('lat') + r"[\s,]+" + r"(?P<ew>[EW])\s*" + _deg_min('lon'),
+ _deg_min('lat') + r"\s*(?P<ns>[NS])[\s,]+" + _deg_min('lon') + r"\s*(?P<ew>[EW])",
+ r"(?P<ns>[NS])\s*" + _deg_min_sec('lat') + r"[\s,]+" + r"(?P<ew>[EW])\s*" + _deg_min_sec('lon'),
+ _deg_min_sec('lat') + r"\s*(?P<ns>[NS])[\s,]+" + _deg_min_sec('lon') + r"\s*(?P<ew>[EW])",
+ r"\[?(?P<lat_deg>[+-]?\d+\.\d+)[\s,]+(?P<lon_deg>[+-]?\d+\.\d+)\]?"
+)]
+
+
+def extract_coords_from_query(query: str) -> Tuple[str, Optional[float], Optional[float]]:
+ """ Look for something that is formatted like a coordinate at the
+ beginning or end of the query. If found, extract the coordinate and
+ return the remaining query (or the empty string if the query
+ consisted of nothing but a coordinate).
+
+ Only the first match will be returned.
+ """
+ for regex in COORD_REGEX:
+ match = regex.fullmatch(query)
+ if match is None:
+ continue
+ groups = match.groupdict()
+ if not groups['pre'] or not groups['post']:
+ x = float(groups['lon_deg']) \
+ + float(groups.get('lon_min', 0.0)) / 60.0 \
+ + float(groups.get('lon_sec', 0.0)) / 3600.0
+ if groups.get('ew') == 'W':
+ x = -x
+ y = float(groups['lat_deg']) \
+ + float(groups.get('lat_min', 0.0)) / 60.0 \
+ + float(groups.get('lat_sec', 0.0)) / 3600.0
+ if groups.get('ns') == 'S':
+ y = -y
+ return groups['pre'] or groups['post'] or '', x, y
+
+ return query, None, None
+
+
+CATEGORY_REGEX = re.compile(r'(?P<pre>.*?)\[(?P<cls>[a-zA-Z_]+)=(?P<typ>[a-zA-Z_]+)\](?P<post>.*)')
+
+
+def extract_category_from_query(query: str) -> Tuple[str, Optional[str], Optional[str]]:
+ """ Extract a hidden category specification of the form '[key=value]' from
+ the query. If found, extract key and value and
+ return the remaining query (or the empty string if the query
+ consisted of nothing but a category).
+
+ Only the first match will be returned.
+ """
+ match = CATEGORY_REGEX.search(query)
+ if match is not None:
+ return (match.group('pre').strip() + ' ' + match.group('post').strip()).strip(), \
+ match.group('cls'), match.group('typ')
+
+ return query, None, None
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Generic part of the server implementation of the v1 API.
+Combine with the scaffolding provided for the various Python ASGI frameworks.
+"""
+from typing import Optional, Any, Type, Dict, cast, Sequence, Tuple
+from functools import reduce
+import dataclasses
+from urllib.parse import urlencode
+
+import sqlalchemy as sa
+
+from ..errors import UsageError
+from .. import logging as loglib
+from ..core import NominatimAPIAsync
+from .format import RawDataList
+from ..types import DataLayer, GeometryFormat, PlaceRef, PlaceID, OsmID, Point
+from ..status import StatusResult
+from ..results import DetailedResult, ReverseResults, SearchResult, SearchResults
+from ..localization import Locales
+from . import helpers
+from ..server import content_types as ct
+from ..server.asgi_adaptor import ASGIAdaptor, EndpointFunc
+from ..sql.async_core_library import PGCORE_ERROR
+
+
+def build_response(adaptor: ASGIAdaptor, output: str, status: int = 200,
+ num_results: int = 0) -> Any:
+ """ Create a response from the given output. Wraps a JSONP function
+ around the response, if necessary.
+ """
+ if adaptor.content_type == ct.CONTENT_JSON and status == 200:
+ jsonp = adaptor.get('json_callback')
+ if jsonp is not None:
+ if any(not part.isidentifier() for part in jsonp.split('.')):
+ adaptor.raise_error('Invalid json_callback value')
+ output = f"{jsonp}({output})"
+ adaptor.content_type = 'application/javascript; charset=utf-8'
+
+ return adaptor.create_response(status, output, num_results)
+
+
+def get_accepted_languages(adaptor: ASGIAdaptor) -> str:
+ """ Return the accepted languages.
+ """
+ return adaptor.get('accept-language')\
+ or adaptor.get_header('accept-language')\
+ or adaptor.config().DEFAULT_LANGUAGE
+
+
+def setup_debugging(adaptor: ASGIAdaptor) -> bool:
+ """ Set up collection of debug information if requested.
+
+ Return True when debugging was requested.
+ """
+ if adaptor.get_bool('debug', False):
+ loglib.set_log_output('html')
+ adaptor.content_type = ct.CONTENT_HTML
+ return True
+
+ return False
+
+
+def get_layers(adaptor: ASGIAdaptor) -> Optional[DataLayer]:
+ """ Return a parsed version of the layer parameter.
+ """
+ param = adaptor.get('layer', None)
+ if param is None:
+ return None
+
+ return cast(DataLayer,
+ reduce(DataLayer.__or__,
+ (getattr(DataLayer, s.upper()) for s in param.split(','))))
+
+
+def parse_format(adaptor: ASGIAdaptor, result_type: Type[Any], default: str) -> str:
+ """ Get and check the 'format' parameter and prepare the formatter.
+ `result_type` is the type of result to be returned by the function
+ and `default` the format value to assume when no parameter is present.
+ """
+ fmt = adaptor.get('format', default=default)
+ assert fmt is not None
+
+ formatting = adaptor.formatting()
+
+ if not formatting.supports_format(result_type, fmt):
+ adaptor.raise_error("Parameter 'format' must be one of: " +
+ ', '.join(formatting.list_formats(result_type)))
+
+ adaptor.content_type = formatting.get_content_type(fmt)
+ return fmt
+
+
+def parse_geometry_details(adaptor: ASGIAdaptor, fmt: str) -> Dict[str, Any]:
+ """ Create details structure from the supplied geometry parameters.
+ """
+ numgeoms = 0
+ output = GeometryFormat.NONE
+ if adaptor.get_bool('polygon_geojson', False):
+ output |= GeometryFormat.GEOJSON
+ numgeoms += 1
+ if fmt not in ('geojson', 'geocodejson'):
+ if adaptor.get_bool('polygon_text', False):
+ output |= GeometryFormat.TEXT
+ numgeoms += 1
+ if adaptor.get_bool('polygon_kml', False):
+ output |= GeometryFormat.KML
+ numgeoms += 1
+ if adaptor.get_bool('polygon_svg', False):
+ output |= GeometryFormat.SVG
+ numgeoms += 1
+
+ if numgeoms > adaptor.config().get_int('POLYGON_OUTPUT_MAX_TYPES'):
+ adaptor.raise_error('Too many polygon output options selected.')
+
+ return {'address_details': True,
+ 'geometry_simplification': adaptor.get_float('polygon_threshold', 0.0),
+ 'geometry_output': output
+ }
+
+
+async def status_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
+ """ Server glue for /status endpoint. See API docs for details.
+ """
+ result = await api.status()
+
+ fmt = parse_format(params, StatusResult, 'text')
+
+ if fmt == 'text' and result.status:
+ status_code = 500
+ else:
+ status_code = 200
+
+ return build_response(params, params.formatting().format_result(result, fmt, {}),
+ status=status_code)
+
+
+async def details_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
+ """ Server glue for /details endpoint. See API docs for details.
+ """
+ fmt = parse_format(params, DetailedResult, 'json')
+ place_id = params.get_int('place_id', 0)
+ place: PlaceRef
+ if place_id:
+ place = PlaceID(place_id)
+ else:
+ osmtype = params.get('osmtype')
+ if osmtype is None:
+ params.raise_error("Missing ID parameter 'place_id' or 'osmtype'.")
+ place = OsmID(osmtype, params.get_int('osmid'), params.get('class'))
+
+ debug = setup_debugging(params)
+
+ locales = Locales.from_accept_languages(get_accepted_languages(params))
+
+ result = await api.details(place,
+ address_details=params.get_bool('addressdetails', False),
+ linked_places=params.get_bool('linkedplaces', True),
+ parented_places=params.get_bool('hierarchy', False),
+ keywords=params.get_bool('keywords', False),
+ geometry_output=(GeometryFormat.GEOJSON
+ if params.get_bool('polygon_geojson', False)
+ else GeometryFormat.NONE),
+ locales=locales
+ )
+
+ if debug:
+ return build_response(params, loglib.get_and_disable())
+
+ if result is None:
+ params.raise_error('No place with that OSM ID found.', status=404)
+
+ output = params.formatting().format_result(
+ result, fmt,
+ {'locales': locales,
+ 'group_hierarchy': params.get_bool('group_hierarchy', False),
+ 'icon_base_url': params.config().MAPICON_URL})
+
+ return build_response(params, output, num_results=1)
+
+
+async def reverse_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
+ """ Server glue for /reverse endpoint. See API docs for details.
+ """
+ fmt = parse_format(params, ReverseResults, 'xml')
+ debug = setup_debugging(params)
+ coord = Point(params.get_float('lon'), params.get_float('lat'))
+
+ details = parse_geometry_details(params, fmt)
+ details['max_rank'] = helpers.zoom_to_rank(params.get_int('zoom', 18))
+ details['layers'] = get_layers(params)
+ details['locales'] = Locales.from_accept_languages(get_accepted_languages(params))
+
+ result = await api.reverse(coord, **details)
+
+ if debug:
+ return build_response(params, loglib.get_and_disable(), num_results=1 if result else 0)
+
+ if fmt == 'xml':
+ queryparts = {'lat': str(coord.lat), 'lon': str(coord.lon), 'format': 'xml'}
+ zoom = params.get('zoom', None)
+ if zoom:
+ queryparts['zoom'] = zoom
+ query = urlencode(queryparts)
+ else:
+ query = ''
+
+ fmt_options = {'query': query,
+ 'extratags': params.get_bool('extratags', False),
+ 'namedetails': params.get_bool('namedetails', False),
+ 'addressdetails': params.get_bool('addressdetails', True)}
+
+ output = params.formatting().format_result(ReverseResults([result] if result else []),
+ fmt, fmt_options)
+
+ return build_response(params, output, num_results=1 if result else 0)
+
+
+async def lookup_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
+ """ Server glue for /lookup endpoint. See API docs for details.
+ """
+ fmt = parse_format(params, SearchResults, 'xml')
+ debug = setup_debugging(params)
+ details = parse_geometry_details(params, fmt)
+ details['locales'] = Locales.from_accept_languages(get_accepted_languages(params))
+
+ places = []
+ for oid in (params.get('osm_ids') or '').split(','):
+ oid = oid.strip()
+ if len(oid) > 1 and oid[0] in 'RNWrnw' and oid[1:].isdigit():
+ places.append(OsmID(oid[0].upper(), int(oid[1:])))
+
+ if len(places) > params.config().get_int('LOOKUP_MAX_COUNT'):
+ params.raise_error('Too many object IDs.')
+
+ if places:
+ results = await api.lookup(places, **details)
+ else:
+ results = SearchResults()
+
+ if debug:
+ return build_response(params, loglib.get_and_disable(), num_results=len(results))
+
+ fmt_options = {'extratags': params.get_bool('extratags', False),
+ 'namedetails': params.get_bool('namedetails', False),
+ 'addressdetails': params.get_bool('addressdetails', True)}
+
+ output = params.formatting().format_result(results, fmt, fmt_options)
+
+ return build_response(params, output, num_results=len(results))
+
+
+async def _unstructured_search(query: str, api: NominatimAPIAsync,
+ details: Dict[str, Any]) -> SearchResults:
+ if not query:
+ return SearchResults()
+
+ # Extract special format for coordinates from query.
+ query, x, y = helpers.extract_coords_from_query(query)
+ if x is not None:
+ assert y is not None
+ details['near'] = Point(x, y)
+ details['near_radius'] = 0.1
+
+ # If no query is left, revert to reverse search.
+ if x is not None and not query:
+ result = await api.reverse(details['near'], **details)
+ if not result:
+ return SearchResults()
+
+ return SearchResults(
+ [SearchResult(**{f.name: getattr(result, f.name)
+ for f in dataclasses.fields(SearchResult)
+ if hasattr(result, f.name)})])
+
+ query, cls, typ = helpers.extract_category_from_query(query)
+ if cls is not None:
+ assert typ is not None
+ return await api.search_category([(cls, typ)], near_query=query, **details)
+
+ return await api.search(query, **details)
+
+
+async def search_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
+ """ Server glue for /search endpoint. See API docs for details.
+ """
+ fmt = parse_format(params, SearchResults, 'jsonv2')
+ debug = setup_debugging(params)
+ details = parse_geometry_details(params, fmt)
+
+ details['countries'] = params.get('countrycodes', None)
+ details['excluded'] = params.get('exclude_place_ids', None)
+ details['viewbox'] = params.get('viewbox', None) or params.get('viewboxlbrt', None)
+ details['bounded_viewbox'] = params.get_bool('bounded', False)
+ details['dedupe'] = params.get_bool('dedupe', True)
+
+ max_results = max(1, min(50, params.get_int('limit', 10)))
+ details['max_results'] = (max_results + min(10, max_results)
+ if details['dedupe'] else max_results)
+
+ details['min_rank'], details['max_rank'] = \
+ helpers.feature_type_to_rank(params.get('featureType', ''))
+ if params.get('featureType', None) is not None:
+ details['layers'] = DataLayer.ADDRESS
+ else:
+ details['layers'] = get_layers(params)
+
+ details['locales'] = Locales.from_accept_languages(get_accepted_languages(params))
+
+ # unstructured query parameters
+ query = params.get('q', None)
+ # structured query parameters
+ queryparts = {}
+ for key in ('amenity', 'street', 'city', 'county', 'state', 'postalcode', 'country'):
+ details[key] = params.get(key, None)
+ if details[key]:
+ queryparts[key] = details[key]
+
+ try:
+ if query is not None:
+ if queryparts:
+ params.raise_error("Structured query parameters"
+ "(amenity, street, city, county, state, postalcode, country)"
+ " cannot be used together with 'q' parameter.")
+ queryparts['q'] = query
+ results = await _unstructured_search(query, api, details)
+ else:
+ query = ', '.join(queryparts.values())
+
+ results = await api.search_address(**details)
+ except UsageError as err:
+ params.raise_error(str(err))
+
+ if details['dedupe'] and len(results) > 1:
+ results = helpers.deduplicate_results(results, max_results)
+
+ if debug:
+ return build_response(params, loglib.get_and_disable(), num_results=len(results))
+
+ if fmt == 'xml':
+ helpers.extend_query_parts(queryparts, details,
+ params.get('featureType', ''),
+ params.get_bool('namedetails', False),
+ params.get_bool('extratags', False),
+ (str(r.place_id) for r in results if r.place_id))
+ queryparts['format'] = fmt
+
+ moreurl = params.base_uri() + '/search?' + urlencode(queryparts)
+ else:
+ moreurl = ''
+
+ fmt_options = {'query': query, 'more_url': moreurl,
+ 'exclude_place_ids': queryparts.get('exclude_place_ids'),
+ 'viewbox': queryparts.get('viewbox'),
+ 'extratags': params.get_bool('extratags', False),
+ 'namedetails': params.get_bool('namedetails', False),
+ 'addressdetails': params.get_bool('addressdetails', False)}
+
+ output = params.formatting().format_result(results, fmt, fmt_options)
+
+ return build_response(params, output, num_results=len(results))
+
+
+async def deletable_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
+ """ Server glue for /deletable endpoint.
+ This is a special endpoint that shows polygons that have been
+ deleted or are broken in the OSM data but are kept in the
+ Nominatim database to minimize disruption.
+ """
+ fmt = parse_format(params, RawDataList, 'json')
+
+ async with api.begin() as conn:
+ sql = sa.text(""" SELECT p.place_id, country_code,
+ name->'name' as name, i.*
+ FROM placex p, import_polygon_delete i
+ WHERE p.osm_id = i.osm_id AND p.osm_type = i.osm_type
+ AND p.class = i.class AND p.type = i.type
+ """)
+ results = RawDataList(r._asdict() for r in await conn.execute(sql))
+
+ return build_response(params, params.formatting().format_result(results, fmt, {}))
+
+
+async def polygons_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
+ """ Server glue for /polygons endpoint.
+ This is a special endpoint that shows polygons that have changed
+ their size but are kept in the Nominatim database with their
+ old area to minimize disruption.
+ """
+ fmt = parse_format(params, RawDataList, 'json')
+ sql_params: Dict[str, Any] = {
+ 'days': params.get_int('days', -1),
+ 'cls': params.get('class')
+ }
+ reduced = params.get_bool('reduced', False)
+
+ async with api.begin() as conn:
+ sql = sa.select(sa.text("""osm_type, osm_id, class, type,
+ name->'name' as name,
+ country_code, errormessage, updated"""))\
+ .select_from(sa.text('import_polygon_error'))
+ if sql_params['days'] > 0:
+ sql = sql.where(sa.text("updated > 'now'::timestamp - make_interval(days => :days)"))
+ if reduced:
+ sql = sql.where(sa.text("errormessage like 'Area reduced%'"))
+ if sql_params['cls'] is not None:
+ sql = sql.where(sa.text("class = :cls"))
+
+ sql = sql.order_by(sa.literal_column('updated').desc()).limit(1000)
+
+ results = RawDataList(r._asdict() for r in await conn.execute(sql, sql_params))
+
+ return build_response(params, params.formatting().format_result(results, fmt, {}))
+
+
+async def get_routes(api: NominatimAPIAsync) -> Sequence[Tuple[str, EndpointFunc]]:
+ routes = [
+ ('status', status_endpoint),
+ ('details', details_endpoint),
+ ('reverse', reverse_endpoint),
+ ('lookup', lookup_endpoint),
+ ('deletable', deletable_endpoint),
+ ('polygons', polygons_endpoint),
+ ]
+
+ def has_search_name(conn: sa.engine.Connection) -> bool:
+ insp = sa.inspect(conn)
+ return insp.has_table('search_name')
+
+ try:
+ async with api.begin() as conn:
+ if await conn.connection.run_sync(has_search_name):
+ routes.append(('search', search_endpoint))
+ except (PGCORE_ERROR, sa.exc.OperationalError):
+ pass # ignored
+
+ return routes
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Version information for the Nominatim API.
+"""
+
+NOMINATIM_API_VERSION = '5.0.0'
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Command-line interface to the Nominatim functions for import, update,
database administration and querying.
"""
-from typing import Optional, Any, List, Union
+from typing import Optional, Any
+import importlib
import logging
import os
import sys
import argparse
+import asyncio
from pathlib import Path
-from nominatim.config import Configuration
-from nominatim.tools.exec_utils import run_legacy_script, run_php_server
-from nominatim.errors import UsageError
-from nominatim import clicmd
-from nominatim import version
-from nominatim.clicmd.args import NominatimArgs, Subcommand
+from .config import Configuration
+from .errors import UsageError
+from . import clicmd
+from . import version
+from .clicmd.args import NominatimArgs, Subcommand
LOG = logging.getLogger()
+
class CommandlineParser:
""" Wraps some of the common functions for parsing the command line
and setting up subcommands.
group.add_argument('-j', '--threads', metavar='NUM', type=int,
help='Number of parallel threads to use')
-
def nominatim_version_text(self) -> str:
""" Program name and version number as string
"""
- text = f'Nominatim version {version.version_str()}'
+ text = f'Nominatim version {version.NOMINATIM_VERSION!s}'
if version.GIT_COMMIT_HASH is not None:
text += f' ({version.GIT_COMMIT_HASH})'
return text
-
def add_subcommand(self, name: str, cmd: Subcommand) -> None:
""" Add a subcommand to the parser. The subcommand must be a class
with a function add_args() that adds the parameters for the
parser.set_defaults(command=cmd)
cmd.add_args(parser)
-
def run(self, **kwargs: Any) -> int:
""" Parse the command line arguments of the program and execute the
appropriate subcommand.
self.parser.print_help()
return 1
- for arg in ('module_dir', 'osm2pgsql_path', 'phplib_dir', 'sqllib_dir',
- 'data_dir', 'config_dir', 'phpcgi_path'):
- setattr(args, arg, Path(kwargs[arg]))
args.project_dir = Path(args.project_dir).resolve()
if 'cli_args' not in kwargs:
datefmt='%Y-%m-%d %H:%M:%S',
level=max(4 - args.verbose, 1) * 10)
- args.config = Configuration(args.project_dir, args.config_dir,
+ args.config = Configuration(args.project_dir,
environ=kwargs.get('environ', os.environ))
- args.config.set_libdirs(module=args.module_dir,
- osm2pgsql=args.osm2pgsql_path,
- php=args.phplib_dir,
- sql=args.sqllib_dir,
- data=args.data_dir)
+ args.config.set_libdirs(osm2pgsql=kwargs['osm2pgsql_path'])
log = logging.getLogger()
log.warning('Using project directory: %s', str(args.project_dir))
try:
- return args.command.run(args)
+ ret = args.command.run(args)
+
+ return ret
except UsageError as exception:
if log.isEnabledFor(logging.DEBUG):
- raise # use Python's exception printing
+ raise # use Python's exception printing
log.fatal('FATAL: %s', exception)
# If we get here, then execution has failed in some way.
# a subcommand.
#
# No need to document the functions each time.
-# pylint: disable=C0111
-class QueryExport:
- """\
- Export addresses as CSV file from the database.
- """
-
- def add_args(self, parser: argparse.ArgumentParser) -> None:
- group = parser.add_argument_group('Output arguments')
- group.add_argument('--output-type', default='street',
- choices=('continent', 'country', 'state', 'county',
- 'city', 'suburb', 'street', 'path'),
- help='Type of places to output (default: street)')
- group.add_argument('--output-format',
- default='street;suburb;city;county;state;country',
- help=("Semicolon-separated list of address types "
- "(see --output-type). Multiple ranks can be "
- "merged into one column by simply using a "
- "comma-separated list."))
- group.add_argument('--output-all-postcodes', action='store_true',
- help=("List all postcodes for address instead of "
- "just the most likely one"))
- group.add_argument('--language',
- help=("Preferred language for output "
- "(use local name, if omitted)"))
- group = parser.add_argument_group('Filter arguments')
- group.add_argument('--restrict-to-country', metavar='COUNTRY_CODE',
- help='Export only objects within country')
- group.add_argument('--restrict-to-osm-node', metavar='ID', type=int,
- help='Export only children of this OSM node')
- group.add_argument('--restrict-to-osm-way', metavar='ID', type=int,
- help='Export only children of this OSM way')
- group.add_argument('--restrict-to-osm-relation', metavar='ID', type=int,
- help='Export only children of this OSM relation')
-
-
- def run(self, args: NominatimArgs) -> int:
- params: List[Union[int, str]] = [
- '--output-type', args.output_type,
- '--output-format', args.output_format]
- if args.output_all_postcodes:
- params.append('--output-all-postcodes')
- if args.language:
- params.extend(('--language', args.language))
- if args.restrict_to_country:
- params.extend(('--restrict-to-country', args.restrict_to_country))
- if args.restrict_to_osm_node:
- params.extend(('--restrict-to-osm-node', args.restrict_to_osm_node))
- if args.restrict_to_osm_way:
- params.extend(('--restrict-to-osm-way', args.restrict_to_osm_way))
- if args.restrict_to_osm_relation:
- params.extend(('--restrict-to-osm-relation', args.restrict_to_osm_relation))
-
- return run_legacy_script('export.php', *params, nominatim_env=args)
-
-
class AdminServe:
"""\
Start a simple web server for serving the API.
- This command starts the built-in PHP webserver to serve the website
+ This command starts a built-in webserver to serve the website
from the current project directory. This webserver is only suitable
for testing and development. Do not use it in production setups!
+ There are two different webserver implementations for Python available:
+ falcon (the default) and starlette. You need to make sure the
+ appropriate Python packages as well as the uvicorn package are
+ installed to use this function.
+
By the default, the webserver can be accessed at: http://127.0.0.1:8088
"""
group = parser.add_argument_group('Server arguments')
group.add_argument('--server', default='127.0.0.1:8088',
help='The address the server will listen to.')
-
+ group.add_argument('--engine', default='falcon',
+ choices=('falcon', 'starlette'),
+ help='Webserver framework to run. (default: falcon)')
def run(self, args: NominatimArgs) -> int:
- run_php_server(args.server, args.project_dir / 'website')
+ asyncio.run(self.run_uvicorn(args))
+
return 0
+ async def run_uvicorn(self, args: NominatimArgs) -> None:
+ import uvicorn
+
+ server_info = args.server.split(':', 1)
+ host = server_info[0]
+ if len(server_info) > 1:
+ if not server_info[1].isdigit():
+ raise UsageError('Invalid format for --server parameter. Use <host>:<port>')
+ port = int(server_info[1])
+ else:
+ port = 8088
-def get_set_parser(**kwargs: Any) -> CommandlineParser:
+ server_module = importlib.import_module(f'nominatim_api.server.{args.engine}.server')
+
+ app = server_module.get_application(args.project_dir)
+
+ config = uvicorn.Config(app, host=host, port=port)
+ server = uvicorn.Server(config)
+ await server.serve()
+
+
+def get_set_parser() -> CommandlineParser:
"""\
Initializes the parser and adds various subcommands for
nominatim cli.
parser.add_subcommand('admin', clicmd.AdminFuncs())
- parser.add_subcommand('export', QueryExport())
- parser.add_subcommand('serve', AdminServe())
-
- if kwargs.get('phpcgi_path'):
- parser.add_subcommand('search', clicmd.APISearch())
- parser.add_subcommand('reverse', clicmd.APIReverse())
- parser.add_subcommand('lookup', clicmd.APILookup())
- parser.add_subcommand('details', clicmd.APIDetails())
- parser.add_subcommand('status', clicmd.APIStatus())
- else:
- parser.parser.epilog = 'php-cgi not found. Query commands not available.'
+ try:
+ exportcmd = importlib.import_module('nominatim_db.clicmd.export')
+ apicmd = importlib.import_module('nominatim_db.clicmd.api')
+ convertcmd = importlib.import_module('nominatim_db.clicmd.convert')
+
+ parser.add_subcommand('export', exportcmd.QueryExport())
+ parser.add_subcommand('convert', convertcmd.ConvertDB())
+ parser.add_subcommand('serve', AdminServe())
+
+ parser.add_subcommand('search', apicmd.APISearch())
+ parser.add_subcommand('reverse', apicmd.APIReverse())
+ parser.add_subcommand('lookup', apicmd.APILookup())
+ parser.add_subcommand('details', apicmd.APIDetails())
+ parser.add_subcommand('status', apicmd.APIStatus())
+ except ModuleNotFoundError as ex:
+ if not ex.name or 'nominatim_api' not in ex.name:
+ raise ex
+
+ parser.parser.epilog = \
+ f'\n\nNominatim API package not found (was looking for module: {ex.name}).'\
+ '\nThe following commands are not available:'\
+ '\n export, convert, serve, search, reverse, lookup, details, status'\
+ "\n\nRun 'pip install nominatim-api' to install the package."
return parser
Command-line tools for importing, updating, administrating and
querying the Nominatim database.
"""
- parser = get_set_parser(**kwargs)
-
- return parser.run(**kwargs)
+ return get_set_parser().run(**kwargs)
--- /dev/null
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2023 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Subcommand definitions for the command-line tool.
+"""
+
+from .setup import SetupAll as SetupAll
+from .replication import UpdateReplication as UpdateReplication
+from .index import UpdateIndex as UpdateIndex
+from .refresh import UpdateRefresh as UpdateRefresh
+from .add_data import UpdateAddData as UpdateAddData
+from .admin import AdminFuncs as AdminFuncs
+from .freeze import SetupFreeze as SetupFreeze
+from .special_phrases import ImportSpecialPhrases as ImportSpecialPhrases
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'add-data' subcommand.
from typing import cast
import argparse
import logging
+import asyncio
import psutil
-from nominatim.clicmd.args import NominatimArgs
+from .args import NominatimArgs
+from ..db.connection import connect
+from ..tools.freeze import is_frozen
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=E0012,C0415
LOG = logging.getLogger()
+
class UpdateAddData:
"""\
Add additional data from a file or an online source.
The command can also be used to add external non-OSM data to the
database. At the moment the only supported format is TIGER housenumber
data. See the online documentation at
- https://nominatim.org/release-docs/latest/admin/Import/#installing-tiger-housenumber-data-for-the-us
+ https://nominatim.org/release-docs/latest/customize/Tiger/
for more information.
"""
group2.add_argument('--socket-timeout', dest='socket_timeout', type=int, default=60,
help='Set timeout for file downloads')
-
def run(self, args: NominatimArgs) -> int:
- from nominatim.tokenizer import factory as tokenizer_factory
- from nominatim.tools import tiger_data, add_osm_data
+ from ..tools import add_osm_data
+
+ with connect(args.config.get_libpq_dsn()) as conn:
+ if is_frozen(conn):
+ print('Database is marked frozen. New data can\'t be added.')
+ return 1
if args.tiger_data:
- tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
- return tiger_data.add_tiger_data(args.tiger_data,
- args.config,
- args.threads or psutil.cpu_count() or 1,
- tokenizer)
+ return asyncio.run(self._add_tiger_data(args))
osm2pgsql_params = args.osm2pgsql_options(default_cache=1000, default_threads=1)
if args.file or args.diff:
- return add_osm_data.add_data_from_file(cast(str, args.file or args.diff),
+ return add_osm_data.add_data_from_file(args.config.get_libpq_dsn(),
+ cast(str, args.file or args.diff),
osm2pgsql_params)
if args.node:
- return add_osm_data.add_osm_object('node', args.node,
+ return add_osm_data.add_osm_object(args.config.get_libpq_dsn(),
+ 'node', args.node,
args.use_main_api,
osm2pgsql_params)
if args.way:
- return add_osm_data.add_osm_object('way', args.way,
+ return add_osm_data.add_osm_object(args.config.get_libpq_dsn(),
+ 'way', args.way,
args.use_main_api,
osm2pgsql_params)
if args.relation:
- return add_osm_data.add_osm_object('relation', args.relation,
+ return add_osm_data.add_osm_object(args.config.get_libpq_dsn(),
+ 'relation', args.relation,
args.use_main_api,
osm2pgsql_params)
return 0
+
+ async def _add_tiger_data(self, args: NominatimArgs) -> int:
+ from ..tokenizer import factory as tokenizer_factory
+ from ..tools import tiger_data
+
+ assert args.tiger_data
+
+ tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+ return await tiger_data.add_tiger_data(args.tiger_data,
+ args.config,
+ args.threads or psutil.cpu_count() or 1,
+ tokenizer)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'admin' subcommand.
"""
import logging
import argparse
+import random
-from nominatim.tools.exec_utils import run_legacy_script
-from nominatim.clicmd.args import NominatimArgs
+from ..errors import UsageError
+from ..db.connection import connect, table_exists
+from .args import NominatimArgs
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=E0012,C0415
LOG = logging.getLogger()
help='Print performance analysis of the indexing process')
objs.add_argument('--collect-os-info', action="store_true",
help="Generate a report about the host system information")
+ objs.add_argument('--clean-deleted', action='store', metavar='AGE',
+ help='Clean up deleted relations')
group = parser.add_argument_group('Arguments for cache warming')
group.add_argument('--search-only', action='store_const', dest='target',
const='search',
collect_os_info.report_system_information(args.config)
return 0
+ if args.clean_deleted:
+ LOG.warning('Cleaning up deleted relations')
+ from ..tools import admin
+ admin.clean_deleted_relations(args.config, age=args.clean_deleted)
+ return 0
+
return 1
def _warm(self, args: NominatimArgs) -> int:
+ try:
+ import nominatim_api as napi
+ except ModuleNotFoundError as exp:
+ raise UsageError("Warming requires nominatim API. "
+ "Install with 'pip install nominatim-api'.") from exp
LOG.warning('Warming database caches')
- params = ['warm.php']
- if args.target == 'reverse':
- params.append('--reverse-only')
- if args.target == 'search':
- params.append('--search-only')
- return run_legacy_script(*params, nominatim_env=args)
+
+ api = napi.NominatimAPI(args.project_dir)
+
+ try:
+ if args.target != 'search':
+ for _ in range(1000):
+ api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)),
+ address_details=True)
+
+ if args.target != 'reverse':
+ from ..tokenizer import factory as tokenizer_factory
+
+ tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+ with connect(args.config.get_libpq_dsn()) as conn:
+ if table_exists(conn, 'search_name'):
+ words = tokenizer.most_frequent_words(conn, 1000)
+ else:
+ words = []
+
+ for word in words:
+ api.search(word)
+ finally:
+ api.close()
+
+ return 0
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Subcommand definitions for API calls from the command line.
+"""
+from typing import Dict, Any, Optional, Type, Mapping
+import argparse
+import logging
+import json
+import sys
+import pprint
+from functools import reduce
+
+import nominatim_api as napi
+from nominatim_api.v1.helpers import zoom_to_rank, deduplicate_results
+from nominatim_api.server.content_types import CONTENT_JSON
+import nominatim_api.logging as loglib
+from ..errors import UsageError
+from .args import NominatimArgs
+
+
+LOG = logging.getLogger()
+
+
+STRUCTURED_QUERY = (
+ ('amenity', 'name and/or type of POI'),
+ ('street', 'housenumber and street'),
+ ('city', 'city, town or village'),
+ ('county', 'county'),
+ ('state', 'state'),
+ ('country', 'country'),
+ ('postalcode', 'postcode')
+)
+
+
+EXTRADATA_PARAMS = (
+ ('addressdetails', 'Include a breakdown of the address into elements'),
+ ('extratags', ("Include additional information if available "
+ "(e.g. wikipedia link, opening hours)")),
+ ('namedetails', 'Include a list of alternative names')
+)
+
+
+def _add_list_format(parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Other options')
+ group.add_argument('--list-formats', action='store_true',
+ help='List supported output formats and exit.')
+
+
+def _add_api_output_arguments(parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Output formatting')
+ group.add_argument('--format', type=str, default='jsonv2',
+ help='Format of result (use --list-format to see supported formats)')
+ for name, desc in EXTRADATA_PARAMS:
+ group.add_argument('--' + name, action='store_true', help=desc)
+
+ group.add_argument('--lang', '--accept-language', metavar='LANGS',
+ help='Preferred language order for presenting search results')
+ group.add_argument('--polygon-output',
+ choices=['geojson', 'kml', 'svg', 'text'],
+ help='Output geometry of results as a GeoJSON, KML, SVG or WKT')
+ group.add_argument('--polygon-threshold', type=float, default=0.0,
+ metavar='TOLERANCE',
+ help=("Simplify output geometry."
+ "Parameter is difference tolerance in degrees."))
+
+
+def _get_geometry_output(args: NominatimArgs) -> napi.GeometryFormat:
+ """ Get the requested geometry output format in a API-compatible
+ format.
+ """
+ if not args.polygon_output:
+ return napi.GeometryFormat.NONE
+ if args.polygon_output == 'geojson':
+ return napi.GeometryFormat.GEOJSON
+ if args.polygon_output == 'kml':
+ return napi.GeometryFormat.KML
+ if args.polygon_output == 'svg':
+ return napi.GeometryFormat.SVG
+ if args.polygon_output == 'text':
+ return napi.GeometryFormat.TEXT
+
+ try:
+ return napi.GeometryFormat[args.polygon_output.upper()]
+ except KeyError as exp:
+ raise UsageError(f"Unknown polygon output format '{args.polygon_output}'.") from exp
+
+
+def _get_locales(args: NominatimArgs, default: Optional[str]) -> napi.Locales:
+ """ Get the locales from the language parameter.
+ """
+ if args.lang:
+ return napi.Locales.from_accept_languages(args.lang)
+ if default:
+ return napi.Locales.from_accept_languages(default)
+
+ return napi.Locales()
+
+
+def _get_layers(args: NominatimArgs, default: napi.DataLayer) -> Optional[napi.DataLayer]:
+ """ Get the list of selected layers as a DataLayer enum.
+ """
+ if not args.layers:
+ return default
+
+ return reduce(napi.DataLayer.__or__,
+ (napi.DataLayer[s.upper()] for s in args.layers))
+
+
+def _list_formats(formatter: napi.FormatDispatcher, rtype: Type[Any]) -> int:
+ for fmt in formatter.list_formats(rtype):
+ print(fmt)
+ print('debug')
+ print('raw')
+
+ return 0
+
+
+def _print_output(formatter: napi.FormatDispatcher, result: Any,
+ fmt: str, options: Mapping[str, Any]) -> None:
+
+ if fmt == 'raw':
+ pprint.pprint(result)
+ else:
+ output = formatter.format_result(result, fmt, options)
+ if formatter.get_content_type(fmt) == CONTENT_JSON:
+ # reformat the result, so it is pretty-printed
+ try:
+ json.dump(json.loads(output), sys.stdout, indent=4, ensure_ascii=False)
+ except json.decoder.JSONDecodeError as err:
+ # Catch the error here, so that data can be debugged,
+ # when people are developping custom result formatters.
+ LOG.fatal("Parsing json failed: %s\nUnformatted output:\n%s", err, output)
+ else:
+ sys.stdout.write(output)
+ sys.stdout.write('\n')
+
+
+class APISearch:
+ """\
+ Execute a search query.
+
+ This command works exactly the same as if calling the /search endpoint on
+ the web API. See the online documentation for more details on the
+ various parameters:
+ https://nominatim.org/release-docs/latest/api/Search/
+ """
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Query arguments')
+ group.add_argument('--query',
+ help='Free-form query string')
+ for name, desc in STRUCTURED_QUERY:
+ group.add_argument('--' + name, help='Structured query: ' + desc)
+
+ _add_api_output_arguments(parser)
+
+ group = parser.add_argument_group('Result limitation')
+ group.add_argument('--countrycodes', metavar='CC,..',
+ help='Limit search results to one or more countries')
+ group.add_argument('--exclude_place_ids', metavar='ID,..',
+ help='List of search object to be excluded')
+ group.add_argument('--limit', type=int, default=10,
+ help='Limit the number of returned results')
+ group.add_argument('--viewbox', metavar='X1,Y1,X2,Y2',
+ help='Preferred area to find search results')
+ group.add_argument('--bounded', action='store_true',
+ help='Strictly restrict results to viewbox area')
+ group.add_argument('--no-dedupe', action='store_false', dest='dedupe',
+ help='Do not remove duplicates from the result list')
+ _add_list_format(parser)
+
+ def run(self, args: NominatimArgs) -> int:
+ formatter = napi.load_format_dispatcher('v1', args.project_dir)
+
+ if args.list_formats:
+ return _list_formats(formatter, napi.SearchResults)
+
+ if args.format in ('debug', 'raw'):
+ loglib.set_log_output('text')
+ elif not formatter.supports_format(napi.SearchResults, args.format):
+ raise UsageError(f"Unsupported format '{args.format}'. "
+ 'Use --list-formats to see supported formats.')
+
+ try:
+ with napi.NominatimAPI(args.project_dir) as api:
+ params: Dict[str, Any] = {'max_results': args.limit + min(args.limit, 10),
+ 'address_details': True, # needed for display name
+ 'geometry_output': _get_geometry_output(args),
+ 'geometry_simplification': args.polygon_threshold,
+ 'countries': args.countrycodes,
+ 'excluded': args.exclude_place_ids,
+ 'viewbox': args.viewbox,
+ 'bounded_viewbox': args.bounded,
+ 'locales': _get_locales(args, api.config.DEFAULT_LANGUAGE)
+ }
+
+ if args.query:
+ results = api.search(args.query, **params)
+ else:
+ results = api.search_address(amenity=args.amenity,
+ street=args.street,
+ city=args.city,
+ county=args.county,
+ state=args.state,
+ postalcode=args.postalcode,
+ country=args.country,
+ **params)
+ except napi.UsageError as ex:
+ raise UsageError(ex) from ex
+
+ if args.dedupe and len(results) > 1:
+ results = deduplicate_results(results, args.limit)
+
+ if args.format == 'debug':
+ print(loglib.get_and_disable())
+ return 0
+
+ _print_output(formatter, results, args.format,
+ {'extratags': args.extratags,
+ 'namedetails': args.namedetails,
+ 'addressdetails': args.addressdetails})
+ return 0
+
+
+class APIReverse:
+ """\
+ Execute API reverse query.
+
+ This command works exactly the same as if calling the /reverse endpoint on
+ the web API. See the online documentation for more details on the
+ various parameters:
+ https://nominatim.org/release-docs/latest/api/Reverse/
+ """
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Query arguments')
+ group.add_argument('--lat', type=float,
+ help='Latitude of coordinate to look up (in WGS84)')
+ group.add_argument('--lon', type=float,
+ help='Longitude of coordinate to look up (in WGS84)')
+ group.add_argument('--zoom', type=int,
+ help='Level of detail required for the address')
+ group.add_argument('--layer', metavar='LAYER',
+ choices=[n.name.lower() for n in napi.DataLayer if n.name],
+ action='append', required=False, dest='layers',
+ help='OSM id to lookup in format <NRW><id> (may be repeated)')
+
+ _add_api_output_arguments(parser)
+ _add_list_format(parser)
+
+ def run(self, args: NominatimArgs) -> int:
+ formatter = napi.load_format_dispatcher('v1', args.project_dir)
+
+ if args.list_formats:
+ return _list_formats(formatter, napi.ReverseResults)
+
+ if args.format in ('debug', 'raw'):
+ loglib.set_log_output('text')
+ elif not formatter.supports_format(napi.ReverseResults, args.format):
+ raise UsageError(f"Unsupported format '{args.format}'. "
+ 'Use --list-formats to see supported formats.')
+
+ if args.lat is None or args.lon is None:
+ raise UsageError("lat' and 'lon' parameters are required.")
+
+ layers = _get_layers(args, napi.DataLayer.ADDRESS | napi.DataLayer.POI)
+
+ try:
+ with napi.NominatimAPI(args.project_dir) as api:
+ result = api.reverse(napi.Point(args.lon, args.lat),
+ max_rank=zoom_to_rank(args.zoom or 18),
+ layers=layers,
+ address_details=True, # needed for display name
+ geometry_output=_get_geometry_output(args),
+ geometry_simplification=args.polygon_threshold,
+ locales=_get_locales(args, api.config.DEFAULT_LANGUAGE))
+ except napi.UsageError as ex:
+ raise UsageError(ex) from ex
+
+ if args.format == 'debug':
+ print(loglib.get_and_disable())
+ return 0
+
+ if result:
+ _print_output(formatter, napi.ReverseResults([result]), args.format,
+ {'extratags': args.extratags,
+ 'namedetails': args.namedetails,
+ 'addressdetails': args.addressdetails})
+
+ return 0
+
+ LOG.error("Unable to geocode.")
+ return 42
+
+
+class APILookup:
+ """\
+ Execute API lookup query.
+
+ This command works exactly the same as if calling the /lookup endpoint on
+ the web API. See the online documentation for more details on the
+ various parameters:
+ https://nominatim.org/release-docs/latest/api/Lookup/
+ """
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Query arguments')
+ group.add_argument('--id', metavar='OSMID',
+ action='append', dest='ids',
+ help='OSM id to lookup in format <NRW><id> (may be repeated)')
+
+ _add_api_output_arguments(parser)
+ _add_list_format(parser)
+
+ def run(self, args: NominatimArgs) -> int:
+ formatter = napi.load_format_dispatcher('v1', args.project_dir)
+
+ if args.list_formats:
+ return _list_formats(formatter, napi.ReverseResults)
+
+ if args.format in ('debug', 'raw'):
+ loglib.set_log_output('text')
+ elif not formatter.supports_format(napi.ReverseResults, args.format):
+ raise UsageError(f"Unsupported format '{args.format}'. "
+ 'Use --list-formats to see supported formats.')
+
+ if args.ids is None:
+ raise UsageError("'id' parameter required.")
+
+ places = [napi.OsmID(o[0], int(o[1:])) for o in args.ids]
+
+ try:
+ with napi.NominatimAPI(args.project_dir) as api:
+ results = api.lookup(places,
+ address_details=True, # needed for display name
+ geometry_output=_get_geometry_output(args),
+ geometry_simplification=args.polygon_threshold or 0.0,
+ locales=_get_locales(args, api.config.DEFAULT_LANGUAGE))
+ except napi.UsageError as ex:
+ raise UsageError(ex) from ex
+
+ if args.format == 'debug':
+ print(loglib.get_and_disable())
+ return 0
+
+ _print_output(formatter, results, args.format,
+ {'extratags': args.extratags,
+ 'namedetails': args.namedetails,
+ 'addressdetails': args.addressdetails})
+ return 0
+
+
+class APIDetails:
+ """\
+ Execute API details query.
+
+ This command works exactly the same as if calling the /details endpoint on
+ the web API. See the online documentation for more details on the
+ various parameters:
+ https://nominatim.org/release-docs/latest/api/Details/
+ """
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Query arguments')
+ group.add_argument('--node', '-n', type=int,
+ help="Look up the OSM node with the given ID.")
+ group.add_argument('--way', '-w', type=int,
+ help="Look up the OSM way with the given ID.")
+ group.add_argument('--relation', '-r', type=int,
+ help="Look up the OSM relation with the given ID.")
+ group.add_argument('--place_id', '-p', type=int,
+ help='Database internal identifier of the OSM object to look up')
+ group.add_argument('--class', dest='object_class',
+ help=("Class type to disambiguated multiple entries "
+ "of the same object."))
+
+ group = parser.add_argument_group('Output arguments')
+ group.add_argument('--format', type=str, default='json',
+ help='Format of result (use --list-formats to see supported formats)')
+ group.add_argument('--addressdetails', action='store_true',
+ help='Include a breakdown of the address into elements')
+ group.add_argument('--keywords', action='store_true',
+ help='Include a list of name keywords and address keywords')
+ group.add_argument('--linkedplaces', action='store_true',
+ help='Include a details of places that are linked with this one')
+ group.add_argument('--hierarchy', action='store_true',
+ help='Include details of places lower in the address hierarchy')
+ group.add_argument('--group_hierarchy', action='store_true',
+ help='Group the places by type')
+ group.add_argument('--polygon_geojson', action='store_true',
+ help='Include geometry of result')
+ group.add_argument('--lang', '--accept-language', metavar='LANGS',
+ help='Preferred language order for presenting search results')
+ _add_list_format(parser)
+
+ def run(self, args: NominatimArgs) -> int:
+ formatter = napi.load_format_dispatcher('v1', args.project_dir)
+
+ if args.list_formats:
+ return _list_formats(formatter, napi.DetailedResult)
+
+ if args.format in ('debug', 'raw'):
+ loglib.set_log_output('text')
+ elif not formatter.supports_format(napi.DetailedResult, args.format):
+ raise UsageError(f"Unsupported format '{args.format}'. "
+ 'Use --list-formats to see supported formats.')
+
+ place: napi.PlaceRef
+ if args.node:
+ place = napi.OsmID('N', args.node, args.object_class)
+ elif args.way:
+ place = napi.OsmID('W', args.way, args.object_class)
+ elif args.relation:
+ place = napi.OsmID('R', args.relation, args.object_class)
+ elif args.place_id is not None:
+ place = napi.PlaceID(args.place_id)
+ else:
+ raise UsageError('One of the arguments --node/-n --way/-w '
+ '--relation/-r --place_id/-p is required/')
+
+ try:
+ with napi.NominatimAPI(args.project_dir) as api:
+ locales = _get_locales(args, api.config.DEFAULT_LANGUAGE)
+ result = api.details(place,
+ address_details=args.addressdetails,
+ linked_places=args.linkedplaces,
+ parented_places=args.hierarchy,
+ keywords=args.keywords,
+ geometry_output=(napi.GeometryFormat.GEOJSON
+ if args.polygon_geojson
+ else napi.GeometryFormat.NONE),
+ locales=locales)
+ except napi.UsageError as ex:
+ raise UsageError(ex) from ex
+
+ if args.format == 'debug':
+ print(loglib.get_and_disable())
+ return 0
+
+ if result:
+ _print_output(formatter, result, args.format or 'json',
+ {'locales': locales,
+ 'group_hierarchy': args.group_hierarchy})
+ return 0
+
+ LOG.error("Object not found in database.")
+ return 42
+
+
+class APIStatus:
+ """
+ Execute API status query.
+
+ This command works exactly the same as if calling the /status endpoint on
+ the web API. See the online documentation for more details on the
+ various parameters:
+ https://nominatim.org/release-docs/latest/api/Status/
+ """
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('API parameters')
+ group.add_argument('--format', type=str, default='text',
+ help='Format of result (use --list-formats to see supported formats)')
+ _add_list_format(parser)
+
+ def run(self, args: NominatimArgs) -> int:
+ formatter = napi.load_format_dispatcher('v1', args.project_dir)
+
+ if args.list_formats:
+ return _list_formats(formatter, napi.StatusResult)
+
+ if args.format in ('debug', 'raw'):
+ loglib.set_log_output('text')
+ elif not formatter.supports_format(napi.StatusResult, args.format):
+ raise UsageError(f"Unsupported format '{args.format}'. "
+ 'Use --list-formats to see supported formats.')
+
+ try:
+ with napi.NominatimAPI(args.project_dir) as api:
+ status = api.status()
+ except napi.UsageError as ex:
+ raise UsageError(ex) from ex
+
+ if args.format == 'debug':
+ print(loglib.get_and_disable())
+ return 0
+
+ _print_output(formatter, status, args.format, {})
+
+ return 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Provides custom functions over command-line arguments.
import logging
from pathlib import Path
-from nominatim.errors import UsageError
-from nominatim.config import Configuration
-from nominatim.typing import Protocol
+from ..errors import UsageError
+from ..config import Configuration
+from ..typing import Protocol
+
LOG = logging.getLogger()
+
class Subcommand(Protocol):
"""
Interface to be implemented by classes implementing a CLI subcommand.
# Basic environment set by root program.
config: Configuration
project_dir: Path
- module_dir: Path
- osm2pgsql_path: Path
- phplib_dir: Path
- sqllib_dir: Path
- data_dir: Path
- config_dir: Path
- phpcgi_path: Path
# Global switches
version: bool
check_database: bool
migrate: bool
collect_os_info: bool
+ clean_deleted: str
analyse_indexing: bool
target: Optional[str]
osm_id: Optional[str]
offline: bool
ignore_errors: bool
index_noanalyse: bool
+ prepare_database: bool
# Arguments to 'index'
boundaries_only: bool
output_all_postcodes: bool
language: Optional[str]
restrict_to_country: Optional[str]
- restrict_to_osm_node: Optional[int]
- restrict_to_osm_way: Optional[int]
- restrict_to_osm_relation: Optional[int]
+
+ # Arguments to 'convert'
+ output: Path
# Arguments to 'refresh'
postcodes: bool
# Arguments to 'serve'
server: str
+ engine: str
# Arguments to 'special-phrases
import_from_wiki: bool
# Arguments to all query functions
format: str
+ list_formats: bool
addressdetails: bool
extratags: bool
namedetails: bool
# Arguments to 'search'
query: Optional[str]
+ amenity: Optional[str]
street: Optional[str]
city: Optional[str]
county: Optional[str]
postalcode: Optional[str]
countrycodes: Optional[str]
exclude_place_ids: Optional[str]
- limit: Optional[int]
+ limit: int
viewbox: Optional[str]
bounded: bool
dedupe: bool
lat: float
lon: float
zoom: Optional[int]
+ layers: Optional[Sequence[str]]
# Arguments to 'lookup'
ids: Sequence[str]
# Arguments to 'details'
object_class: Optional[str]
-
+ linkedplaces: bool
+ hierarchy: bool
+ keywords: bool
+ polygon_geojson: bool
+ group_hierarchy: bool
def osm2pgsql_options(self, default_cache: int,
default_threads: int) -> Dict[str, Any]:
from the command line arguments. The resulting dict can be
further customized and then used in `run_osm2pgsql()`.
"""
- return dict(osm2pgsql=self.config.OSM2PGSQL_BINARY or self.osm2pgsql_path,
+ return dict(osm2pgsql=self.config.OSM2PGSQL_BINARY or self.config.lib_dir.osm2pgsql,
osm2pgsql_cache=self.osm2pgsql_cache or default_cache,
osm2pgsql_style=self.config.get_import_style_file(),
+ osm2pgsql_style_path=self.config.lib_dir.lua,
threads=self.threads or default_threads,
dsn=self.config.get_libpq_dsn(),
flatnode_file=str(self.config.get_path('FLATNODE_FILE') or ''),
slim_index=self.config.TABLESPACE_OSM_INDEX,
main_data=self.config.TABLESPACE_PLACE_DATA,
main_index=self.config.TABLESPACE_PLACE_INDEX
- )
- )
-
+ )
+ )
def get_osm_file_list(self) -> Optional[List[Path]]:
""" Return the --osm-file argument as a list of Paths or None
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of the 'convert' subcommand.
+"""
+from typing import Set, Any, Union, Optional, Sequence
+import argparse
+import asyncio
+from pathlib import Path
+
+from ..errors import UsageError
+from .args import NominatimArgs
+
+
+class WithAction(argparse.Action):
+ """ Special action that saves a list of flags, given on the command-line
+ as `--with-foo` or `--without-foo`.
+ """
+ def __init__(self, option_strings: Sequence[str], dest: Any,
+ default: bool = True, **kwargs: Any) -> None:
+ if 'nargs' in kwargs:
+ raise ValueError("nargs not allowed.")
+ if option_strings is None:
+ raise ValueError("Positional parameter not allowed.")
+
+ self.dest_set = kwargs.pop('dest_set')
+ full_option_strings = []
+ for opt in option_strings:
+ if not opt.startswith('--'):
+ raise ValueError("short-form options not allowed")
+ if default:
+ self.dest_set.add(opt[2:])
+ full_option_strings.append(f"--with-{opt[2:]}")
+ full_option_strings.append(f"--without-{opt[2:]}")
+
+ super().__init__(full_option_strings, argparse.SUPPRESS, nargs=0, **kwargs)
+
+ def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+ values: Union[str, Sequence[Any], None],
+ option_string: Optional[str] = None) -> None:
+ assert option_string
+ if option_string.startswith('--with-'):
+ self.dest_set.add(option_string[7:])
+ if option_string.startswith('--without-'):
+ self.dest_set.discard(option_string[10:])
+
+
+class ConvertDB:
+ """ Convert an existing database into a different format. (EXPERIMENTAL)
+
+ Dump a read-only version of the database in a different format.
+ At the moment only a SQLite database suitable for reverse lookup
+ can be created.
+ """
+
+ def __init__(self) -> None:
+ self.options: Set[str] = set()
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ parser.add_argument('--format', default='sqlite',
+ choices=('sqlite', ),
+ help='Format of the output database (must be sqlite currently)')
+ parser.add_argument('--output', '-o', required=True, type=Path,
+ help='File to write the database to.')
+ group = parser.add_argument_group('Switches to define database layout'
+ '(currently no effect)')
+ group.add_argument('--reverse', action=WithAction, dest_set=self.options, default=True,
+ help='Enable/disable support for reverse and lookup API'
+ ' (default: enabled)')
+ group.add_argument('--search', action=WithAction, dest_set=self.options, default=True,
+ help='Enable/disable support for search API (default: disabled)')
+ group.add_argument('--details', action=WithAction, dest_set=self.options, default=True,
+ help='Enable/disable support for details API (default: enabled)')
+
+ def run(self, args: NominatimArgs) -> int:
+ if args.output.exists():
+ raise UsageError(f"File '{args.output}' already exists. Refusing to overwrite.")
+
+ if args.format == 'sqlite':
+ from ..tools import convert_sqlite
+
+ asyncio.run(convert_sqlite.convert(args.project_dir, args.output, self.options))
+ return 0
+
+ return 1
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of the 'export' subcommand.
+"""
+from typing import Optional, List, cast
+import logging
+import argparse
+import asyncio
+import csv
+import sys
+
+import nominatim_api as napi
+from nominatim_api.results import create_from_placex_row, ReverseResult, add_result_details
+from nominatim_api.types import LookupDetails
+
+import sqlalchemy as sa
+
+from ..errors import UsageError
+from .args import NominatimArgs
+
+
+LOG = logging.getLogger()
+
+
+RANK_RANGE_MAP = {
+ 'country': (4, 4),
+ 'state': (5, 9),
+ 'county': (10, 12),
+ 'city': (13, 16),
+ 'suburb': (17, 21),
+ 'street': (26, 26),
+ 'path': (27, 27)
+}
+
+
+RANK_TO_OUTPUT_MAP = {
+ 4: 'country',
+ 5: 'state', 6: 'state', 7: 'state', 8: 'state', 9: 'state',
+ 10: 'county', 11: 'county', 12: 'county',
+ 13: 'city', 14: 'city', 15: 'city', 16: 'city',
+ 17: 'suburb', 18: 'suburb', 19: 'suburb', 20: 'suburb', 21: 'suburb',
+ 26: 'street', 27: 'path'}
+
+
+class QueryExport:
+ """\
+ Export places as CSV file from the database.
+
+
+ """
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Output arguments')
+ group.add_argument('--output-type', default='street',
+ choices=('country', 'state', 'county',
+ 'city', 'suburb', 'street', 'path'),
+ help='Type of places to output (default: street)')
+ group.add_argument('--output-format',
+ default='street;suburb;city;county;state;country',
+ help=("Semicolon-separated list of address types "
+ "(see --output-type). Additionally accepts:"
+ "placeid,postcode"))
+ group.add_argument('--language',
+ help=("Preferred language for output "
+ "(use local name, if omitted)"))
+ group = parser.add_argument_group('Filter arguments')
+ group.add_argument('--restrict-to-country', metavar='COUNTRY_CODE',
+ help='Export only objects within country')
+ group.add_argument('--restrict-to-osm-node', metavar='ID', type=int,
+ dest='node',
+ help='Export only children of this OSM node')
+ group.add_argument('--restrict-to-osm-way', metavar='ID', type=int,
+ dest='way',
+ help='Export only children of this OSM way')
+ group.add_argument('--restrict-to-osm-relation', metavar='ID', type=int,
+ dest='relation',
+ help='Export only children of this OSM relation')
+
+ def run(self, args: NominatimArgs) -> int:
+ return asyncio.run(export(args))
+
+
+async def export(args: NominatimArgs) -> int:
+ """ The actual export as a asynchronous function.
+ """
+
+ api = napi.NominatimAPIAsync(args.project_dir)
+
+ try:
+ output_range = RANK_RANGE_MAP[args.output_type]
+
+ writer = init_csv_writer(args.output_format)
+
+ async with api.begin() as conn, api.begin() as detail_conn:
+ t = conn.t.placex
+
+ sql = sa.select(t.c.place_id, t.c.parent_place_id,
+ t.c.osm_type, t.c.osm_id, t.c.name,
+ t.c.class_, t.c.type, t.c.admin_level,
+ t.c.address, t.c.extratags,
+ t.c.housenumber, t.c.postcode, t.c.country_code,
+ t.c.importance, t.c.wikipedia, t.c.indexed_date,
+ t.c.rank_address, t.c.rank_search,
+ t.c.centroid)\
+ .where(t.c.linked_place_id == None)\
+ .where(t.c.rank_address.between(*output_range))
+
+ parent_place_id = await get_parent_id(conn, args.node, args.way, args.relation)
+ if parent_place_id:
+ taddr = conn.t.addressline
+
+ sql = sql.join(taddr, taddr.c.place_id == t.c.place_id)\
+ .where(taddr.c.address_place_id == parent_place_id)\
+ .where(taddr.c.isaddress)
+
+ if args.restrict_to_country:
+ sql = sql.where(t.c.country_code == args.restrict_to_country.lower())
+
+ results = []
+ for row in await conn.execute(sql):
+ result = create_from_placex_row(row, ReverseResult)
+ if result is not None:
+ results.append(result)
+
+ if len(results) == 1000:
+ await dump_results(detail_conn, results, writer, args.language)
+ results = []
+
+ if results:
+ await dump_results(detail_conn, results, writer, args.language)
+ finally:
+ await api.close()
+
+ return 0
+
+
+def init_csv_writer(output_format: str) -> 'csv.DictWriter[str]':
+ fields = output_format.split(';')
+ writer = csv.DictWriter(sys.stdout, fieldnames=fields, extrasaction='ignore')
+ writer.writeheader()
+
+ return writer
+
+
+async def dump_results(conn: napi.SearchConnection,
+ results: List[ReverseResult],
+ writer: 'csv.DictWriter[str]',
+ lang: Optional[str]) -> None:
+ locale = napi.Locales([lang] if lang else None)
+ await add_result_details(conn, results,
+ LookupDetails(address_details=True, locales=locale))
+
+ for result in results:
+ data = {'placeid': result.place_id,
+ 'postcode': result.postcode}
+
+ for line in (result.address_rows or []):
+ if line.isaddress and line.local_name:
+ if line.category[1] == 'postcode':
+ data['postcode'] = line.local_name
+ elif line.rank_address in RANK_TO_OUTPUT_MAP:
+ data[RANK_TO_OUTPUT_MAP[line.rank_address]] = line.local_name
+
+ writer.writerow(data)
+
+
+async def get_parent_id(conn: napi.SearchConnection, node_id: Optional[int],
+ way_id: Optional[int],
+ relation_id: Optional[int]) -> Optional[int]:
+ """ Get the place ID for the given OSM object.
+ """
+ if node_id is not None:
+ osm_type, osm_id = 'N', node_id
+ elif way_id is not None:
+ osm_type, osm_id = 'W', way_id
+ elif relation_id is not None:
+ osm_type, osm_id = 'R', relation_id
+ else:
+ return None
+
+ t = conn.t.placex
+ sql = sa.select(t.c.place_id).limit(1)\
+ .where(t.c.osm_type == osm_type)\
+ .where(t.c.osm_id == osm_id)\
+ .where(t.c.rank_address > 0)\
+ .order_by(t.c.rank_address)
+
+ for result in await conn.execute(sql):
+ return cast(int, result[0])
+
+ raise UsageError(f'Cannot find a place {osm_type}{osm_id}.')
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'freeze' subcommand.
"""
import argparse
-from nominatim.db.connection import connect
-from nominatim.clicmd.args import NominatimArgs
+from ..db.connection import connect
+from .args import NominatimArgs
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=E0012,C0415
class SetupFreeze:
"""\
"""
def add_args(self, parser: argparse.ArgumentParser) -> None:
- pass # No options
-
+ pass # No options
def run(self, args: NominatimArgs) -> int:
from ..tools import freeze
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'index' subcommand.
"""
import argparse
+import asyncio
import psutil
-from nominatim.db import status
-from nominatim.db.connection import connect
-from nominatim.clicmd.args import NominatimArgs
-
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=E0012,C0415
+from ..db import status
+from ..db.connection import connect
+from .args import NominatimArgs
class UpdateIndex:
group.add_argument('--maxrank', '-R', type=int, metavar='RANK', default=30,
help='Maximum/finishing rank')
-
def run(self, args: NominatimArgs) -> int:
- from ..indexer.indexer import Indexer
- from ..tokenizer import factory as tokenizer_factory
-
- tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
-
- indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
- args.threads or psutil.cpu_count() or 1)
-
- if not args.no_boundaries:
- indexer.index_boundaries(args.minrank, args.maxrank)
- if not args.boundaries_only:
- indexer.index_by_rank(args.minrank, args.maxrank)
+ asyncio.run(self._do_index(args))
if not args.no_boundaries and not args.boundaries_only \
and args.minrank == 0 and args.maxrank == 30:
status.set_indexed(conn, True)
return 0
+
+ async def _do_index(self, args: NominatimArgs) -> None:
+ from ..tokenizer import factory as tokenizer_factory
+
+ tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+ from ..indexer.indexer import Indexer
+
+ indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
+ args.threads or psutil.cpu_count() or 1)
+
+ has_pending = True # run at least once
+ while has_pending:
+ if not args.no_boundaries:
+ await indexer.index_boundaries(args.minrank, args.maxrank)
+ if not args.boundaries_only:
+ await indexer.index_by_rank(args.minrank, args.maxrank)
+ await indexer.index_postcodes()
+ has_pending = indexer.has_pending()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of 'refresh' subcommand.
import argparse
import logging
from pathlib import Path
+import asyncio
-from nominatim.config import Configuration
-from nominatim.db.connection import connect
-from nominatim.tokenizer.base import AbstractTokenizer
-from nominatim.clicmd.args import NominatimArgs
+from ..config import Configuration
+from ..db.connection import connect, table_exists
+from ..tokenizer.base import AbstractTokenizer
+from .args import NominatimArgs
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=E0012,C0415
LOG = logging.getLogger()
+
def _parse_osm_object(obj: str) -> Tuple[str, int]:
""" Parse the given argument into a tuple of OSM type and ID.
Raises an ArgumentError if the format is not recognized.
group.add_argument('--importance', action='store_true',
help='Recompute place importances (expensive!)')
group.add_argument('--website', action='store_true',
- help='Refresh the directory that serves the scripts for the web API')
+ help='DEPRECATED. This function has no function anymore'
+ ' and will be removed in a future version.')
group.add_argument('--data-object', action='append',
type=_parse_osm_object, metavar='OBJECT',
help='Mark the given OSM object as requiring an update'
group.add_argument('--enable-debug-statements', action='store_true',
help='Enable debug warning statements in functions')
-
- def run(self, args: NominatimArgs) -> int: #pylint: disable=too-many-branches, too-many-statements
+ def run(self, args: NominatimArgs) -> int:
from ..tools import refresh, postcodes
from ..indexer.indexer import Indexer
+ need_function_refresh = args.functions
if args.postcodes:
if postcodes.can_compute(args.config.get_libpq_dsn()):
args.project_dir, tokenizer)
indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
args.threads or 1)
- indexer.index_postcodes()
+ asyncio.run(indexer.index_postcodes())
else:
LOG.error("The place table doesn't exist. "
"Postcode updates on a frozen database is not possible.")
if args.word_counts:
LOG.warning('Recompute word statistics')
- self._get_tokenizer(args.config).update_statistics()
+ self._get_tokenizer(args.config).update_statistics(args.config,
+ threads=args.threads or 1)
if args.address_levels:
LOG.warning('Updating address levels')
with connect(args.config.get_libpq_dsn()) as conn:
# If the table did not exist before, then the importance code
# needs to be enabled.
- if not conn.table_exists('secondary_importance'):
+ if not table_exists(conn, 'secondary_importance'):
args.functions = True
LOG.warning('Import secondary importance raster data from %s', args.project_dir)
if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
- args.project_dir) > 0:
- LOG.fatal('FATAL: Cannot update sendary importance raster data')
+ args.project_dir) > 0:
+ LOG.fatal('FATAL: Cannot update secondary importance raster data')
return 1
-
- if args.functions:
- LOG.warning('Create functions')
- with connect(args.config.get_libpq_dsn()) as conn:
- refresh.create_functions(conn, args.config,
- args.diffs, args.enable_debug_statements)
- self._get_tokenizer(args.config).update_sql_functions(args.config)
+ need_function_refresh = True
if args.wiki_data:
data_path = Path(args.config.WIKIPEDIA_DATA_PATH
or args.project_dir)
- LOG.warning('Import wikipdia article importance from %s', data_path)
+ LOG.warning('Import wikipedia article importance from %s', data_path)
if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
data_path) > 0:
- LOG.fatal('FATAL: Wikipedia importance dump file not found')
+ LOG.fatal('FATAL: Wikipedia importance file not found in %s', data_path)
return 1
+ need_function_refresh = True
- # Attention: importance MUST come after wiki data import.
+ if need_function_refresh:
+ LOG.warning('Create functions')
+ with connect(args.config.get_libpq_dsn()) as conn:
+ refresh.create_functions(conn, args.config,
+ args.diffs, args.enable_debug_statements)
+ self._get_tokenizer(args.config).update_sql_functions(args.config)
+
+ # Attention: importance MUST come after wiki data import and after functions.
if args.importance:
LOG.warning('Update importance values for database')
with connect(args.config.get_libpq_dsn()) as conn:
refresh.recompute_importance(conn)
if args.website:
- webdir = args.project_dir / 'website'
- LOG.warning('Setting up website directory at %s', webdir)
- # This is a little bit hacky: call the tokenizer setup, so that
- # the tokenizer directory gets repopulated as well, in case it
- # wasn't there yet.
- self._get_tokenizer(args.config)
- with connect(args.config.get_libpq_dsn()) as conn:
- refresh.setup_website(webdir, args.config, conn)
+ LOG.error('WARNING: Website setup is no longer required. '
+ 'This function will be removed in future version of Nominatim.')
if args.data_object or args.data_area:
with connect(args.config.get_libpq_dsn()) as conn:
return 0
-
def _get_tokenizer(self, config: Configuration) -> AbstractTokenizer:
if self.tokenizer is None:
from ..tokenizer import factory as tokenizer_factory
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'replication' sub-command.
import logging
import socket
import time
+import asyncio
-from nominatim.db import status
-from nominatim.db.connection import connect
-from nominatim.errors import UsageError
-from nominatim.clicmd.args import NominatimArgs
+from ..db import status
+from ..db.connection import connect
+from ..errors import UsageError
+from .args import NominatimArgs
LOG = logging.getLogger()
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to make pyosmium optional for replication only.
-# pylint: disable=C0415
class UpdateReplication:
"""\
group.add_argument('--socket-timeout', dest='socket_timeout', type=int, default=60,
help='Set timeout for file downloads')
-
def _init_replication(self, args: NominatimArgs) -> int:
from ..tools import replication, refresh
LOG.warning("Initialising replication updates")
with connect(args.config.get_libpq_dsn()) as conn:
- replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
+ replication.init_replication(conn, base_url=args.config.REPLICATION_URL,
+ socket_timeout=args.socket_timeout)
if args.update_functions:
LOG.warning("Create functions")
refresh.create_functions(conn, args.config, True, False)
return 0
-
def _check_for_updates(self, args: NominatimArgs) -> int:
from ..tools import replication
with connect(args.config.get_libpq_dsn()) as conn:
- return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL)
-
+ return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL,
+ socket_timeout=args.socket_timeout)
def _report_update(self, batchdate: dt.datetime,
start_import: dt.datetime,
round_time(end - start_import),
round_time(end - batchdate))
-
def _compute_update_interval(self, args: NominatimArgs) -> int:
if args.catch_up:
return 0
and update_interval < 86400:
LOG.fatal("Update interval too low for download.geofabrik.de.\n"
"Please check install documentation "
- "(https://nominatim.org/release-docs/latest/admin/Import-and-Update#"
+ "(https://nominatim.org/release-docs/latest/admin/Update/#"
"setting-up-the-update-process).")
raise UsageError("Invalid replication update interval setting.")
return update_interval
-
- def _update(self, args: NominatimArgs) -> None:
- # pylint: disable=too-many-locals
+ async def _update(self, args: NominatimArgs) -> None:
from ..tools import replication
from ..indexer.indexer import Indexer
from ..tokenizer import factory as tokenizer_factory
if not args.do_index:
LOG.fatal("Indexing cannot be disabled when running updates continuously.")
raise UsageError("Bad argument '--no-index'.")
- recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
+ recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, args.threads or 1)
+ dsn = args.config.get_libpq_dsn()
+
while True:
- with connect(args.config.get_libpq_dsn()) as conn:
- start = dt.datetime.now(dt.timezone.utc)
- state = replication.update(conn, params)
+ start = dt.datetime.now(dt.timezone.utc)
+ state = replication.update(dsn, params, socket_timeout=args.socket_timeout)
+
+ with connect(dsn) as conn:
if state is not replication.UpdateState.NO_CHANGES:
status.log_status(conn, start, 'import')
batchdate, _, _ = status.get_status(conn)
if state is not replication.UpdateState.NO_CHANGES and args.do_index:
index_start = dt.datetime.now(dt.timezone.utc)
- indexer.index_full(analyse=False)
+ await indexer.index_full(analyse=False)
- with connect(args.config.get_libpq_dsn()) as conn:
+ with connect(dsn) as conn:
status.set_indexed(conn, True)
status.log_status(conn, index_start, 'index')
conn.commit()
if state is replication.UpdateState.NO_CHANGES and \
args.catch_up or update_interval > 40*60:
- while indexer.has_pending():
- indexer.index_full(analyse=False)
+ await indexer.index_full(analyse=False)
if LOG.isEnabledFor(logging.WARNING):
assert batchdate is not None
LOG.warning("No new changes. Sleeping for %d sec.", recheck_interval)
time.sleep(recheck_interval)
-
def run(self, args: NominatimArgs) -> int:
socket.setdefaulttimeout(args.socket_timeout)
if args.check_for_updates:
return self._check_for_updates(args)
- self._update(args)
+ asyncio.run(self._update(args))
return 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'import' subcommand.
import argparse
import logging
from pathlib import Path
+import asyncio
import psutil
-from nominatim.config import Configuration
-from nominatim.db.connection import connect
-from nominatim.db import status, properties
-from nominatim.tokenizer.base import AbstractTokenizer
-from nominatim.version import version_str
-from nominatim.clicmd.args import NominatimArgs
-from nominatim.errors import UsageError
+from ..errors import UsageError
+from ..config import Configuration
+from ..db.connection import connect
+from ..db import status, properties
+from ..tokenizer.base import AbstractTokenizer
+from ..version import NOMINATIM_VERSION
+from .args import NominatimArgs
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=C0415
LOG = logging.getLogger()
+
class SetupAll:
"""\
Create a new Nominatim database from an OSM file.
"""
def add_args(self, parser: argparse.ArgumentParser) -> None:
- group_name = parser.add_argument_group('Required arguments')
- group1 = group_name.add_mutually_exclusive_group(required=True)
+ group1 = parser.add_argument_group('Required arguments')
group1.add_argument('--osm-file', metavar='FILE', action='append',
- help='OSM file to be imported'
- ' (repeat for importing multiple files)')
+ help='OSM file to be imported'
+ ' (repeat for importing multiple files)',
+ default=None)
group1.add_argument('--continue', dest='continue_at',
- choices=['load-data', 'indexing', 'db-postprocess'],
- help='Continue an import that was interrupted')
+ choices=['import-from-file', 'load-data', 'indexing', 'db-postprocess'],
+ help='Continue an import that was interrupted',
+ default=None)
group2 = parser.add_argument_group('Optional arguments')
group2.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
- help='Size of cache to be used by osm2pgsql (in MB)')
+ help='Size of cache to be used by osm2pgsql (in MB)')
group2.add_argument('--reverse-only', action='store_true',
- help='Do not create tables and indexes for searching')
+ help='Do not create tables and indexes for searching')
group2.add_argument('--no-partitions', action='store_true',
- help=("Do not partition search indices "
- "(speeds up import of single country extracts)"))
+ help="Do not partition search indices "
+ "(speeds up import of single country extracts)")
group2.add_argument('--no-updates', action='store_true',
- help="Do not keep tables that are only needed for "
- "updating the database later")
+ help="Do not keep tables that are only needed for "
+ "updating the database later")
group2.add_argument('--offline', action='store_true',
help="Do not attempt to load any additional data from the internet")
group3 = parser.add_argument_group('Expert options')
group3.add_argument('--ignore-errors', action='store_true',
- help='Continue import even when errors in SQL are present')
+ help='Continue import even when errors in SQL are present')
group3.add_argument('--index-noanalyse', action='store_true',
- help='Do not perform analyse operations during index (expert only)')
+ help='Do not perform analyse operations during index (expert only)')
+ group3.add_argument('--prepare-database', action='store_true',
+ help='Create the database but do not import any data')
+
+ def run(self, args: NominatimArgs) -> int:
+ if args.osm_file is None and args.continue_at is None and not args.prepare_database:
+ raise UsageError("No input files (use --osm-file).")
+
+ if args.osm_file is not None and args.continue_at not in ('import-from-file', None):
+ raise UsageError(f"Cannot use --continue {args.continue_at} and --osm-file together.")
+
+ if args.continue_at is not None and args.prepare_database:
+ raise UsageError(
+ "Cannot use --continue and --prepare-database together."
+ )
+ return asyncio.run(self.async_run(args))
- def run(self, args: NominatimArgs) -> int: # pylint: disable=too-many-statements
+ async def async_run(self, args: NominatimArgs) -> int:
from ..data import country_info
- from ..tools import database_import, refresh, postcodes, freeze
+ from ..tools import database_import, postcodes, freeze
from ..indexer.indexer import Indexer
num_threads = args.threads or psutil.cpu_count() or 1
-
country_info.setup_country_config(args.config)
- if args.continue_at is None:
- files = args.get_osm_file_list()
- if not files:
- raise UsageError("No input files (use --osm-file).")
-
+ if args.prepare_database or args.continue_at is None:
LOG.warning('Creating database')
database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
rouser=args.config.DATABASE_WEBUSER)
+ if args.prepare_database:
+ return 0
- LOG.warning('Setting up country tables')
- country_info.setup_country_tables(args.config.get_libpq_dsn(),
- args.data_dir,
- args.no_partitions)
-
- LOG.warning('Importing OSM data file')
- database_import.import_osm_data(files,
- args.osm2pgsql_options(0, 1),
- drop=args.no_updates,
- ignore_errors=args.ignore_errors)
-
- LOG.warning('Importing wikipedia importance data')
- data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
- if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
- data_path) > 0:
- LOG.error('Wikipedia importance dump file not found. '
- 'Calculating importance values of locations will not '
- 'use Wikipedia importance data.')
-
- LOG.warning('Importing secondary importance raster data')
- if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
- args.project_dir) != 0:
- LOG.error('Secondary importance file not imported. '
- 'Falling back to default ranking.')
-
- self._setup_tables(args.config, args.reverse_only)
+ if args.continue_at in (None, 'import-from-file'):
+ self._base_import(args)
- if args.continue_at is None or args.continue_at == 'load-data':
+ if args.continue_at in ('import-from-file', 'load-data', None):
LOG.warning('Initialise tables')
with connect(args.config.get_libpq_dsn()) as conn:
database_import.truncate_data_tables(conn)
LOG.warning('Load data into placex table')
- database_import.load_data(args.config.get_libpq_dsn(), num_threads)
+ await database_import.load_data(args.config.get_libpq_dsn(), num_threads)
LOG.warning("Setting up tokenizer")
tokenizer = self._get_tokenizer(args.continue_at, args.config)
- if args.continue_at is None or args.continue_at == 'load-data':
+ if args.continue_at in ('import-from-file', 'load-data', None):
LOG.warning('Calculate postcodes')
postcodes.update_postcodes(args.config.get_libpq_dsn(),
args.project_dir, tokenizer)
- if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
+ if args.continue_at in ('import-from-file', 'load-data', 'indexing', None):
LOG.warning('Indexing places')
indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, num_threads)
- indexer.index_full(analyse=not args.index_noanalyse)
+ await indexer.index_full(analyse=not args.index_noanalyse)
LOG.warning('Post-process tables')
with connect(args.config.get_libpq_dsn()) as conn:
- database_import.create_search_indices(conn, args.config,
- drop=args.no_updates,
- threads=num_threads)
+ conn.autocommit = True
+ await database_import.create_search_indices(conn, args.config,
+ drop=args.no_updates,
+ threads=num_threads)
LOG.warning('Create search index for default country names.')
+ conn.autocommit = False
country_info.create_country_names(conn, tokenizer,
args.config.get_str_list('LANGUAGES'))
if args.no_updates:
+ conn.autocommit = True
freeze.drop_update_tables(conn)
tokenizer.finalize_import(args.config)
LOG.warning('Recompute word counts')
- tokenizer.update_statistics()
-
- webdir = args.project_dir / 'website'
- LOG.warning('Setup website at %s', webdir)
- with connect(args.config.get_libpq_dsn()) as conn:
- refresh.setup_website(webdir, args.config, conn)
+ tokenizer.update_statistics(args.config, threads=num_threads)
self._finalize_database(args.config.get_libpq_dsn(), args.offline)
return 0
+ def _base_import(self, args: NominatimArgs) -> None:
+ from ..tools import database_import, refresh
+ from ..data import country_info
+
+ files = args.get_osm_file_list()
+ if not files:
+ raise UsageError("No input files (use --osm-file).")
+
+ if args.continue_at in ('import-from-file', None):
+ # Check if the correct plugins are installed
+ database_import.check_existing_database_plugins(args.config.get_libpq_dsn())
+ LOG.warning('Setting up country tables')
+ country_info.setup_country_tables(args.config.get_libpq_dsn(),
+ args.config.lib_dir.data,
+ args.no_partitions)
+
+ LOG.warning('Importing OSM data file')
+ database_import.import_osm_data(files,
+ args.osm2pgsql_options(0, 1),
+ drop=args.no_updates,
+ ignore_errors=args.ignore_errors)
+
+ LOG.warning('Importing wikipedia importance data')
+ data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
+ if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
+ data_path) > 0:
+ LOG.error('Wikipedia importance dump file not found. '
+ 'Calculating importance values of locations will not '
+ 'use Wikipedia importance data.')
+
+ LOG.warning('Importing secondary importance raster data')
+ if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
+ args.project_dir) != 0:
+ LOG.error('Secondary importance file not imported. '
+ 'Falling back to default ranking.')
+
+ self._setup_tables(args.config, args.reverse_only)
def _setup_tables(self, config: Configuration, reverse_only: bool) -> None:
""" Set up the basic database layout: tables, indexes and functions.
from ..tools import database_import, refresh
with connect(config.get_libpq_dsn()) as conn:
+ conn.autocommit = True
LOG.warning('Create functions (1st pass)')
refresh.create_functions(conn, config, False, False)
LOG.warning('Create tables')
LOG.warning('Create functions (3rd pass)')
refresh.create_functions(conn, config, False, False)
-
def _get_tokenizer(self, continue_at: Optional[str],
config: Configuration) -> AbstractTokenizer:
""" Set up a new tokenizer or load an already initialised one.
"""
from ..tokenizer import factory as tokenizer_factory
- if continue_at is None or continue_at == 'load-data':
+ if continue_at in ('import-from-file', 'load-data', None):
# (re)initialise the tokenizer data
return tokenizer_factory.create_tokenizer(config)
# just load the tokenizer
return tokenizer_factory.get_tokenizer_for_db(config)
-
def _finalize_database(self, dsn: str, offline: bool) -> None:
""" Determine the database date and set the status accordingly.
"""
with connect(dsn) as conn:
- if not offline:
- try:
- dbdate = status.compute_database_date(conn)
- status.set_status(conn, dbdate)
- LOG.info('Database is at %s.', dbdate)
- except Exception as exc: # pylint: disable=broad-except
- LOG.error('Cannot determine date of database: %s', exc)
-
- properties.set_property(conn, 'database_version', version_str())
+ properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))
+
+ try:
+ dbdate = status.compute_database_date(conn, offline)
+ status.set_status(conn, dbdate)
+ LOG.info('Database is at %s.', dbdate)
+ except Exception as exc:
+ LOG.error('Cannot determine date of database: %s', exc)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'special-phrases' command.
import logging
from pathlib import Path
-from nominatim.errors import UsageError
-from nominatim.db.connection import connect
-from nominatim.tools.special_phrases.sp_importer import SPImporter, SpecialPhraseLoader
-from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
-from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
-from nominatim.clicmd.args import NominatimArgs
+from ..errors import UsageError
+from ..db.connection import connect
+from ..tools.special_phrases.sp_importer import SPImporter, SpecialPhraseLoader
+from ..tools.special_phrases.sp_wiki_loader import SPWikiLoader
+from ..tools.special_phrases.sp_csv_loader import SPCsvLoader
+from .args import NominatimArgs
+
LOG = logging.getLogger()
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=E0012,C0415
class ImportSpecialPhrases:
"""\
group.add_argument('--no-replace', action='store_true',
help='Keep the old phrases and only add the new ones')
-
def run(self, args: NominatimArgs) -> int:
if args.import_from_wiki:
return 0
-
def start_import(self, args: NominatimArgs, loader: SpecialPhraseLoader) -> None:
"""
Create the SPImporter object containing the right
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Nominatim configuration accessor.
"""
-from typing import Dict, Any, List, Mapping, Optional
+from typing import Union, Dict, Any, List, Mapping, Optional
import importlib.util
import logging
import os
from dotenv import dotenv_values
-from nominatim.typing import StrPath
-from nominatim.errors import UsageError
+from psycopg.conninfo import conninfo_to_dict
+
+from .typing import StrPath
+from .errors import UsageError
+from . import paths
LOG = logging.getLogger()
-CONFIG_CACHE : Dict[str, Any] = {}
+CONFIG_CACHE: Dict[str, Any] = {}
+
def flatten_config_list(content: Any, section: str = '') -> List[Any]:
""" Flatten YAML configuration lists that contain include sections
class Configuration:
- """ Load and manage the project configuration.
-
- Nominatim uses dotenv to configure the software. Configuration options
- are resolved in the following order:
-
- * from the OS environment (or the dirctionary given in `environ`
- * from the .env file in the project directory of the installation
- * from the default installation in the configuration directory
+ """ This class wraps access to the configuration settings
+ for the Nominatim instance in use.
All Nominatim configuration options are prefixed with 'NOMINATIM_' to
- avoid conflicts with other environment variables.
+ avoid conflicts with other environment variables. All settings can
+ be accessed as properties of the class under the same name as the
+ setting but with the `NOMINATIM_` prefix removed. In addition, there
+ are accessor functions that convert the setting values to types
+ other than string.
"""
- def __init__(self, project_dir: Path, config_dir: Path,
+ def __init__(self, project_dir: Optional[Union[Path, str]],
environ: Optional[Mapping[str, str]] = None) -> None:
- self.environ = environ or os.environ
- self.project_dir = project_dir
- self.config_dir = config_dir
- self._config = dotenv_values(str((config_dir / 'env.defaults').resolve()))
- if project_dir is not None and (project_dir / '.env').is_file():
- self._config.update(dotenv_values(str((project_dir / '.env').resolve())))
+ self.environ = os.environ if environ is None else environ
+ self.config_dir = paths.CONFIG_DIR
+ self._config = dotenv_values(str(self.config_dir / 'env.defaults'))
+ if project_dir is not None:
+ self.project_dir: Optional[Path] = Path(project_dir).resolve()
+ if (self.project_dir / '.env').is_file():
+ self._config.update(dotenv_values(str(self.project_dir / '.env')))
+ else:
+ self.project_dir = None
class _LibDirs:
- module: Path
osm2pgsql: Path
- php: Path
- sql: Path
- data: Path
+ sql = paths.SQLLIB_DIR
+ lua = paths.LUALIB_DIR
+ data = paths.DATA_DIR
self.lib_dir = _LibDirs()
self._private_plugins: Dict[str, object] = {}
-
def set_libdirs(self, **kwargs: StrPath) -> None:
""" Set paths to library functions and data.
"""
for key, value in kwargs.items():
- setattr(self.lib_dir, key, Path(value).resolve())
-
+ setattr(self.lib_dir, key, None if value is None else Path(value))
def __getattr__(self, name: str) -> str:
name = 'NOMINATIM_' + name
return self._config[name] or ''
-
def get_bool(self, name: str) -> bool:
""" Return the given configuration parameter as a boolean.
- Values of '1', 'yes' and 'true' are accepted as truthy values,
- everything else is interpreted as false.
+
+ Parameters:
+ name: Name of the configuration parameter with the NOMINATIM_
+ prefix removed.
+
+ Returns:
+ `True` for values of '1', 'yes' and 'true', `False` otherwise.
"""
return getattr(self, name).lower() in ('1', 'yes', 'true')
-
def get_int(self, name: str) -> int:
""" Return the given configuration parameter as an int.
+
+ Parameters:
+ name: Name of the configuration parameter with the NOMINATIM_
+ prefix removed.
+
+ Returns:
+ The configuration value converted to int.
+
+ Raises:
+ ValueError: when the value is not a number.
"""
try:
return int(getattr(self, name))
LOG.fatal("Invalid setting NOMINATIM_%s. Needs to be a number.", name)
raise UsageError("Configuration error.") from exp
-
def get_str_list(self, name: str) -> Optional[List[str]]:
""" Return the given configuration parameter as a list of strings.
The values are assumed to be given as a comma-sparated list and
- will be stripped before returning them. On empty values None
- is returned.
+ will be stripped before returning them.
+
+ Parameters:
+ name: Name of the configuration parameter with the NOMINATIM_
+ prefix removed.
+
+ Returns:
+ (List[str]): The comma-split parameter as a list. The
+ elements are stripped of leading and final spaces before
+ being returned.
+ (None): The configuration parameter was unset or empty.
"""
raw = getattr(self, name)
return [v.strip() for v in raw.split(',')] if raw else None
-
def get_path(self, name: str) -> Optional[Path]:
""" Return the given configuration parameter as a Path.
- If a relative path is configured, then the function converts this
- into an absolute path with the project directory as root path.
- If the configuration is unset, None is returned.
+
+ Parameters:
+ name: Name of the configuration parameter with the NOMINATIM_
+ prefix removed.
+
+ Returns:
+ (Path): A Path object of the parameter value.
+ If a relative path is configured, then the function converts this
+ into an absolute path with the project directory as root path.
+ (None): The configuration parameter was unset or empty.
"""
value = getattr(self, name)
if not value:
cfgpath = Path(value)
if not cfgpath.is_absolute():
+ assert self.project_dir is not None
cfgpath = self.project_dir / cfgpath
return cfgpath.resolve()
-
def get_libpq_dsn(self) -> str:
""" Get configured database DSN converted into the key/value format
understood by libpq and psycopg.
return dsn
+ def get_database_params(self) -> Mapping[str, Union[str, int, None]]:
+ """ Get the configured parameters for the database connection
+ as a mapping.
+ """
+ dsn = self.DATABASE_DSN
+
+ if dsn.startswith('pgsql:'):
+ return dict((p.split('=', 1) for p in dsn[6:].split(';')))
+
+ return conninfo_to_dict(dsn)
def get_import_style_file(self) -> Path:
""" Return the import style file as a path object. Translates the
style = getattr(self, 'IMPORT_STYLE')
if style in ('admin', 'street', 'address', 'full', 'extratags'):
- return self.config_dir / f'import-{style}.style'
+ return self.lib_dir.lua / f'import-{style}.lua'
return self.find_config_file('', 'IMPORT_STYLE')
-
- def get_os_env(self) -> Dict[str, Optional[str]]:
+ def get_os_env(self) -> Dict[str, str]:
""" Return a copy of the OS environment with the Nominatim configuration
merged in.
"""
- env = dict(self._config)
+ env = {k: v for k, v in self._config.items() if v is not None}
env.update(self.environ)
return env
-
def load_sub_configuration(self, filename: StrPath,
config: Optional[str] = None) -> Any:
""" Load additional configuration from a file. `filename` is the name
CONFIG_CACHE[str(configfile)] = result
return result
-
def load_plugin_module(self, module_name: str, internal_path: str) -> Any:
""" Load a Python module as a plugin.
return sys.modules.get(module_name) or importlib.import_module(module_name)
-
def find_config_file(self, filename: StrPath,
config: Optional[str] = None) -> Path:
""" Resolve the location of a configuration file given a filename and
filename = cfg_filename
-
search_paths = [self.project_dir, self.config_dir]
for path in search_paths:
if path is not None and (path / filename).is_file():
filename, search_paths)
raise UsageError("Config file not found.")
-
def _load_from_yaml(self, cfgfile: Path) -> Any:
""" Load a YAML configuration file. This installs a special handler that
allows to include other YAML files using the '!include' operator.
Loader=yaml.SafeLoader)
return yaml.safe_load(cfgfile.read_text(encoding='utf-8'))
-
def _yaml_include_representer(self, loader: Any, node: yaml.Node) -> Any:
""" Handler for the '!include' operator in YAML files.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for importing and managing static country information.
"""
from typing import Dict, Any, Iterable, Tuple, Optional, Container, overload
from pathlib import Path
-import psycopg2.extras
-from nominatim.db import utils as db_utils
-from nominatim.db.connection import connect, Connection
-from nominatim.errors import UsageError
-from nominatim.config import Configuration
-from nominatim.tokenizer.base import AbstractTokenizer
+from ..db import utils as db_utils
+from ..db.connection import connect, Connection, register_hstore
+from ..errors import UsageError
+from ..config import Configuration
+from ..tokenizer.base import AbstractTokenizer
+
def _flatten_name_list(names: Any) -> Dict[str, str]:
if names is None:
return flat
-
class _CountryInfo:
""" Caches country-specific properties from the configuration file.
"""
def __init__(self) -> None:
self._info: Dict[str, Dict[str, Any]] = {}
-
def load(self, config: Configuration) -> None:
""" Load the country properties from the configuration files,
if they are not loaded yet.
for x in prop['languages'].split(',')]
prop['names'] = _flatten_name_list(prop.get('names'))
-
def items(self) -> Iterable[Tuple[str, Dict[str, Any]]]:
""" Return tuples of (country_code, property dict) as iterable.
"""
return self._info.get(country_code, {})
-
_COUNTRY_INFO = _CountryInfo()
"""
_COUNTRY_INFO.load(config)
+
@overload
def iterate() -> Iterable[Tuple[str, Dict[str, Any]]]:
...
+
@overload
def iterate(prop: str) -> Iterable[Tuple[str, Any]]:
...
+
def iterate(prop: Optional[str] = None) -> Iterable[Tuple[str, Dict[str, Any]]]:
""" Iterate over country code and properties.
params.append((ccode, props['names'], lang, partition))
with connect(dsn) as conn:
+ register_hstore(conn)
with conn.cursor() as cur:
- psycopg2.extras.register_hstore(cur)
cur.execute(
""" CREATE TABLE public.country_name (
country_code character varying(2),
country_default_language_code text,
partition integer
); """)
- cur.execute_values(
+ cur.executemany(
""" INSERT INTO public.country_name
- (country_code, name, country_default_language_code, partition) VALUES %s
+ (country_code, name, country_default_language_code, partition)
+ VALUES (%s, %s, %s, %s)
""", params)
conn.commit()
return ':' not in key or not languages or \
key[key.index(':') + 1:] in languages
+ register_hstore(conn)
with conn.cursor() as cur:
- psycopg2.extras.register_hstore(cur)
cur.execute("""SELECT country_code, name FROM country_name
WHERE country_code is not null""")
# country names (only in languages as provided)
if name:
- names.update({k : v for k, v in name.items() if _include_key(k)})
+ names.update({k: v for k, v in name.items() if _include_key(k)})
analyzer.add_country_names(code, names)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Wrapper around place information the indexer gets from the database and hands to
"""
from typing import Optional, Mapping, Any, Tuple
+
class PlaceInfo:
""" This data class contains all information the tokenizer can access
about a place.
def __init__(self, info: Mapping[str, Any]) -> None:
self._info = info
-
@property
def name(self) -> Optional[Mapping[str, str]]:
""" A dictionary with the names of the place. Keys and values represent
"""
return self._info.get('name')
-
@property
def address(self) -> Optional[Mapping[str, str]]:
""" A dictionary with the address elements of the place. They key
"""
return self._info.get('address')
-
@property
def country_code(self) -> Optional[str]:
""" The country code of the country the place is in. Guaranteed
"""
return self._info.get('country_code')
-
@property
def rank_address(self) -> int:
- """ The [rank address][1] before ant rank correction is applied.
+ """ The [rank address][1] before any rank correction is applied.
[1]: ../customize/Ranking.md#address-rank
"""
return self._info.get('rank_address', 0)
-
@property
def centroid(self) -> Optional[Tuple[float, float]]:
""" A center point of the place in WGS84. May be None when the
x, y = self._info.get('centroid_x'), self._info.get('centroid_y')
return None if x is None or y is None else (x, y)
-
def is_a(self, key: str, value: str) -> bool:
""" Set to True when the place's primary tag corresponds to the given
key and value.
"""
return self._info.get('class') == key and self._info.get('type') == value
-
def is_country(self) -> bool:
""" Set to True when the place is a valid country boundary.
"""
return self.rank_address == 4 \
- and self.is_a('boundary', 'administrative') \
- and self.country_code is not None
+ and self.is_a('boundary', 'administrative') \
+ and self.country_code is not None
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Data class for a single name of a place.
"""
from typing import Optional, Dict, Mapping
+
class PlaceName:
""" Each name and address part of a place is encapsulated in an object of
this class. It saves not only the name proper but also describes the
In addition to that, a name may have arbitrary additional attributes.
How attributes are used, depends on the sanitizers and token analysers.
- The exception is is the 'analyzer' attribute. This attribute determines
+ The exception is the 'analyzer' attribute. This attribute determines
which token analysis module will be used to finalize the treatment of
names.
"""
self.suffix = suffix
self.attr: Dict[str, str] = {}
-
def __repr__(self) -> str:
- return f"PlaceName(name='{self.name}',kind='{self.kind}',suffix='{self.suffix}')"
-
+ return f"PlaceName(name={self.name!r},kind={self.kind!r},suffix={self.suffix!r})"
def clone(self, name: Optional[str] = None,
kind: Optional[str] = None,
return newobj
-
def set_attr(self, key: str, value: str) -> None:
""" Add the given property to the name. If the property was already
set, then the value is overwritten.
"""
self.attr[key] = value
-
def get_attr(self, key: str, default: Optional[str] = None) -> Optional[str]:
""" Return the given property or the value of 'default' if it
is not set.
"""
return self.attr.get(key, default)
-
def has_attr(self, key: str) -> bool:
""" Check if the given attribute is set.
"""
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for formatting postcodes according to their country-specific
from typing import Any, Mapping, Optional, Set, Match
import re
-from nominatim.errors import UsageError
-from nominatim.data import country_info
+from ..errors import UsageError
+from . import country_info
+
class CountryPostcodeMatcher:
""" Matches and formats a postcode according to a format definition
pc_pattern = config['pattern'].replace('d', '[0-9]').replace('l', '[A-Z]')
- self.norm_pattern = re.compile(f'\\s*(?:{country_code.upper()}[ -]?)?(.*)\\s*')
+ self.norm_pattern = re.compile(f'\\s*(?:{country_code.upper()}[ -]?)?({pc_pattern})\\s*')
self.pattern = re.compile(pc_pattern)
self.output = config.get('output', r'\g<0>')
-
def match(self, postcode: str) -> Optional[Match[str]]:
""" Match the given postcode against the postcode pattern for this
matcher. Returns a `re.Match` object if the match was successful
return None
-
def normalize(self, match: Match[str]) -> str:
""" Return the default format of the postcode for the given match.
`match` must be a `re.Match` object previously returned by
else:
raise UsageError(f"Invalid entry 'postcode' for country '{ccode}'")
-
def set_default_pattern(self, pattern: str) -> None:
""" Set the postcode match pattern to use, when a country does not
have a specific pattern.
"""
self.default_matcher = CountryPostcodeMatcher('', {'pattern': pattern})
-
def get_matcher(self, country_code: Optional[str]) -> Optional[CountryPostcodeMatcher]:
""" Return the CountryPostcodeMatcher for the given country.
Returns None if the country doesn't have a postcode and the
return self.country_matcher.get(country_code, self.default_matcher)
-
def match(self, country_code: Optional[str], postcode: str) -> Optional[Match[str]]:
""" Match the given postcode against the postcode pattern for this
matcher. Returns a `re.Match` object if the country has a pattern
return self.country_matcher.get(country_code, self.default_matcher).match(postcode)
-
def normalize(self, country_code: str, match: Match[str]) -> str:
""" Return the default format of the postcode for the given match.
`match` must be a `re.Match` object previously returned by
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Specialised connection and cursor functions.
+"""
+from typing import Optional, Any, Dict, Tuple
+import logging
+import os
+
+import psycopg
+import psycopg.types.hstore
+from psycopg import sql as pysql
+
+from ..typing import SysEnv
+from ..errors import UsageError
+
+LOG = logging.getLogger()
+
+Cursor = psycopg.Cursor[Any]
+Connection = psycopg.Connection[Any]
+
+
+def execute_scalar(conn: Connection, sql: psycopg.abc.Query, args: Any = None) -> Any:
+ """ Execute query that returns a single value. The value is returned.
+ If the query yields more than one row, a ValueError is raised.
+ """
+ with conn.cursor(row_factory=psycopg.rows.tuple_row) as cur:
+ cur.execute(sql, args)
+
+ if cur.rowcount != 1:
+ raise RuntimeError("Query did not return a single row.")
+
+ result = cur.fetchone()
+
+ assert result is not None
+ return result[0]
+
+
+def table_exists(conn: Connection, table: str) -> bool:
+ """ Check that a table with the given name exists in the database.
+ """
+ num = execute_scalar(
+ conn,
+ """SELECT count(*) FROM pg_tables
+ WHERE tablename = %s and schemaname = 'public'""", (table, ))
+ return num == 1 if isinstance(num, int) else False
+
+
+def table_has_column(conn: Connection, table: str, column: str) -> bool:
+ """ Check if the table 'table' exists and has a column with name 'column'.
+ """
+ has_column = execute_scalar(conn,
+ """SELECT count(*) FROM information_schema.columns
+ WHERE table_name = %s and column_name = %s""",
+ (table, column))
+ return has_column > 0 if isinstance(has_column, int) else False
+
+
+def index_exists(conn: Connection, index: str, table: Optional[str] = None) -> bool:
+ """ Check that an index with the given name exists in the database.
+ If table is not None then the index must relate to the given
+ table.
+ """
+ with conn.cursor() as cur:
+ cur.execute("""SELECT tablename FROM pg_indexes
+ WHERE indexname = %s and schemaname = 'public'""", (index, ))
+ if cur.rowcount == 0:
+ return False
+
+ if table is not None:
+ row = cur.fetchone()
+ if row is None or not isinstance(row[0], str):
+ return False
+ return row[0] == table
+
+ return True
+
+
+def drop_tables(conn: Connection, *names: str,
+ if_exists: bool = True, cascade: bool = False) -> None:
+ """ Drop one or more tables with the given names.
+ Set `if_exists` to False if a non-existent table should raise
+ an exception instead of just being ignored. `cascade` will cause
+ depended objects to be dropped as well.
+ The caller needs to take care of committing the change.
+ """
+ sql = pysql.SQL('DROP TABLE%s{}%s' % (
+ ' IF EXISTS ' if if_exists else ' ',
+ ' CASCADE' if cascade else ''))
+
+ with conn.cursor() as cur:
+ for name in names:
+ cur.execute(sql.format(pysql.Identifier(name)))
+
+
+def server_version_tuple(conn: Connection) -> Tuple[int, int]:
+ """ Return the server version as a tuple of (major, minor).
+ Converts correctly for pre-10 and post-10 PostgreSQL versions.
+ """
+ version = conn.info.server_version
+ if version < 100000:
+ return (int(version / 10000), int((version % 10000) / 100))
+
+ return (int(version / 10000), version % 10000)
+
+
+def postgis_version_tuple(conn: Connection) -> Tuple[int, int]:
+ """ Return the postgis version installed in the database as a
+ tuple of (major, minor). Assumes that the PostGIS extension
+ has been installed already.
+ """
+ version = execute_scalar(conn, 'SELECT postgis_lib_version()')
+
+ version_parts = version.split('.')
+ if len(version_parts) < 2:
+ raise UsageError(f"Error fetching Postgis version. Bad format: {version}")
+
+ return (int(version_parts[0]), int(version_parts[1]))
+
+
+def register_hstore(conn: Connection) -> None:
+ """ Register the hstore type with psycopg for the connection.
+ """
+ info = psycopg.types.TypeInfo.fetch(conn, "hstore")
+ if info is None:
+ raise RuntimeError('Hstore extension is requested but not installed.')
+ psycopg.types.hstore.register_hstore(info, conn)
+
+
+def connect(dsn: str, **kwargs: Any) -> Connection:
+ """ Open a connection to the database using the specialised connection
+ factory. The returned object may be used in conjunction with 'with'.
+ When used outside a context manager, use the `connection` attribute
+ to get the connection.
+ """
+ try:
+ return psycopg.connect(dsn, row_factory=psycopg.rows.namedtuple_row, **kwargs)
+ except psycopg.OperationalError as err:
+ raise UsageError(f"Cannot connect to database: {err}") from err
+
+
+# Translation from PG connection string parameters to PG environment variables.
+# Derived from https://www.postgresql.org/docs/current/libpq-envars.html.
+_PG_CONNECTION_STRINGS = {
+ 'host': 'PGHOST',
+ 'hostaddr': 'PGHOSTADDR',
+ 'port': 'PGPORT',
+ 'dbname': 'PGDATABASE',
+ 'user': 'PGUSER',
+ 'password': 'PGPASSWORD',
+ 'passfile': 'PGPASSFILE',
+ 'channel_binding': 'PGCHANNELBINDING',
+ 'service': 'PGSERVICE',
+ 'options': 'PGOPTIONS',
+ 'application_name': 'PGAPPNAME',
+ 'sslmode': 'PGSSLMODE',
+ 'requiressl': 'PGREQUIRESSL',
+ 'sslcompression': 'PGSSLCOMPRESSION',
+ 'sslcert': 'PGSSLCERT',
+ 'sslkey': 'PGSSLKEY',
+ 'sslrootcert': 'PGSSLROOTCERT',
+ 'sslcrl': 'PGSSLCRL',
+ 'requirepeer': 'PGREQUIREPEER',
+ 'ssl_min_protocol_version': 'PGSSLMINPROTOCOLVERSION',
+ 'ssl_max_protocol_version': 'PGSSLMAXPROTOCOLVERSION',
+ 'gssencmode': 'PGGSSENCMODE',
+ 'krbsrvname': 'PGKRBSRVNAME',
+ 'gsslib': 'PGGSSLIB',
+ 'connect_timeout': 'PGCONNECT_TIMEOUT',
+ 'target_session_attrs': 'PGTARGETSESSIONATTRS',
+}
+
+
+def get_pg_env(dsn: str,
+ base_env: Optional[SysEnv] = None) -> Dict[str, str]:
+ """ Return a copy of `base_env` with the environment variables for
+ PostgreSQL set up from the given database connection string.
+ If `base_env` is None, then the OS environment is used as a base
+ environment.
+ """
+ env = dict(base_env if base_env is not None else os.environ)
+
+ for param, value in psycopg.conninfo.conninfo_to_dict(dsn).items():
+ if param in _PG_CONNECTION_STRINGS:
+ env[_PG_CONNECTION_STRINGS[param]] = str(value)
+ else:
+ LOG.error("Unknown connection parameter '%s' ignored.", param)
+
+ return env
+
+
+async def run_async_query(dsn: str, query: psycopg.abc.Query) -> None:
+ """ Open a connection to the database and run a single query
+ asynchronously.
+ """
+ async with await psycopg.AsyncConnection.connect(dsn) as aconn:
+ await aconn.execute(query)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Query and access functions for the in-database property table.
"""
from typing import Optional, cast
-from nominatim.db.connection import Connection
+from .connection import Connection, table_exists
+
def set_property(conn: Connection, name: str, value: str) -> None:
""" Add or replace the property with the given name.
""" Return the current value of the given property or None if the property
is not set.
"""
- if not conn.table_exists('nominatim_properties'):
+ if not table_exists(conn, 'nominatim_properties'):
return None
with conn.cursor() as cur:
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+A connection pool that executes incoming queries in parallel.
+"""
+from typing import Any, Tuple, Optional
+import asyncio
+import logging
+import time
+
+import psycopg
+
+LOG = logging.getLogger()
+
+QueueItem = Optional[Tuple[psycopg.abc.Query, Any]]
+
+
+class QueryPool:
+ """ Pool to run SQL queries in parallel asynchronous execution.
+
+ All queries are run in autocommit mode. If parallel execution leads
+ to a deadlock, then the query is repeated.
+ The results of the queries is discarded.
+ """
+ def __init__(self, dsn: str, pool_size: int = 1, **conn_args: Any) -> None:
+ self.wait_time = 0.0
+ self.query_queue: 'asyncio.Queue[QueueItem]' = asyncio.Queue(maxsize=2 * pool_size)
+
+ self.pool = [asyncio.create_task(self._worker_loop(dsn, **conn_args))
+ for _ in range(pool_size)]
+
+ async def put_query(self, query: psycopg.abc.Query, params: Any) -> None:
+ """ Schedule a query for execution.
+ """
+ tstart = time.time()
+ await self.query_queue.put((query, params))
+ self.wait_time += time.time() - tstart
+ await asyncio.sleep(0)
+
+ async def finish(self) -> None:
+ """ Wait for all queries to finish and close the pool.
+ """
+ for _ in self.pool:
+ await self.query_queue.put(None)
+
+ tstart = time.time()
+ await asyncio.wait(self.pool)
+ self.wait_time += time.time() - tstart
+
+ for task in self.pool:
+ excp = task.exception()
+ if excp is not None:
+ raise excp
+
+ async def _worker_loop(self, dsn: str, **conn_args: Any) -> None:
+ conn_args['autocommit'] = True
+ aconn = await psycopg.AsyncConnection.connect(dsn, **conn_args)
+ async with aconn:
+ async with aconn.cursor() as cur:
+ item = await self.query_queue.get()
+ while item is not None:
+ try:
+ if item[1] is None:
+ await cur.execute(item[0])
+ else:
+ await cur.execute(item[0], item[1])
+
+ item = await self.query_queue.get()
+ except psycopg.errors.DeadlockDetected:
+ assert item is not None
+ LOG.info("Deadlock detected (sql = %s, params = %s), retry.",
+ str(item[0]), str(item[1]))
+ # item is still valid here, causing a retry
+
+ async def __aenter__(self) -> 'QueryPool':
+ return self
+
+ async def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
+ await self.finish()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Preprocessing of SQL files.
"""
-from typing import Set, Dict, Any
+from typing import Set, Dict, Any, cast
+
import jinja2
-from nominatim.db.connection import Connection
-from nominatim.db.async_connection import WorkerPool
-from nominatim.config import Configuration
+from .connection import Connection
+from ..config import Configuration
+from ..db.query_pool import QueryPool
+
def _get_partitions(conn: Connection) -> Set[int]:
""" Get the set of partitions currently in use.
def _get_tables(conn: Connection) -> Set[str]:
""" Return the set of tables currently in use.
- Only includes non-partitioned
"""
with conn.cursor() as cur:
cur.execute("SELECT tablename FROM pg_tables WHERE schemaname = 'public'")
return set((row[0] for row in list(cur)))
+def _get_middle_db_format(conn: Connection, tables: Set[str]) -> str:
+ """ Returns the version of the slim middle tables.
+ """
+ if 'osm2pgsql_properties' not in tables:
+ return '1'
+
+ with conn.cursor() as cur:
+ cur.execute("SELECT value FROM osm2pgsql_properties WHERE property = 'db_format'")
+ row = cur.fetchone()
+
+ return cast(str, row[0]) if row is not None else '1'
+
+
def _setup_tablespace_sql(config: Configuration) -> Dict[str, str]:
""" Returns a dict with tablespace expressions for the different tablespace
kinds depending on whether a tablespace is configured or not.
""" Set up a dictionary with various optional Postgresql/Postgis features that
depend on the database version.
"""
- pg_version = conn.server_version_tuple()
- postgis_version = conn.postgis_version_tuple()
- return {
- 'has_index_non_key_column': pg_version >= (11, 0, 0),
- 'spgist_geom' : 'SPGIST' if postgis_version >= (3, 0) else 'GIST'
- }
+ return {}
+
class SQLPreprocessor:
""" A environment for preprocessing SQL files from the
db_info['tables'] = _get_tables(conn)
db_info['reverse_only'] = 'search_name' not in db_info['tables']
db_info['tablespace'] = _setup_tablespace_sql(config)
+ db_info['middle_db_format'] = _get_middle_db_format(conn, db_info['tables'])
self.env.globals['config'] = config
self.env.globals['db'] = db_info
self.env.globals['postgres'] = _setup_postgresql_features(conn)
+ def run_string(self, conn: Connection, template: str, **kwargs: Any) -> None:
+ """ Execute the given SQL template string on the connection.
+ The keyword arguments may supply additional parameters
+ for preprocessing.
+ """
+ sql = self.env.from_string(template).render(**kwargs)
+
+ with conn.cursor() as cur:
+ cur.execute(sql)
+ conn.commit()
def run_sql_file(self, conn: Connection, name: str, **kwargs: Any) -> None:
""" Execute the given SQL file on the connection. The keyword arguments
cur.execute(sql)
conn.commit()
-
- def run_parallel_sql_file(self, dsn: str, name: str, num_threads: int = 1,
- **kwargs: Any) -> None:
- """ Execure the given SQL files using parallel asynchronous connections.
+ async def run_parallel_sql_file(self, dsn: str, name: str, num_threads: int = 1,
+ **kwargs: Any) -> None:
+ """ Execute the given SQL files using parallel asynchronous connections.
The keyword arguments may supply additional parameters for
preprocessing.
parts = sql.split('\n---\n')
- with WorkerPool(dsn, num_threads) as pool:
+ async with QueryPool(dsn, num_threads) as pool:
for part in parts:
- pool.next_free_worker().perform(part)
+ await pool.put_query(part, None)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Access and helper functions for the status and status log table.
"""
-from typing import Optional, Tuple, cast
+from typing import Optional, Tuple
import datetime as dt
import logging
import re
-from nominatim.db.connection import Connection
-from nominatim.tools.exec_utils import get_url
-from nominatim.errors import UsageError
-from nominatim.typing import TypedDict
+from .connection import Connection, table_exists, execute_scalar
+from ..utils.url_utils import get_url
+from ..errors import UsageError
LOG = logging.getLogger()
ISODATE_FORMAT = '%Y-%m-%dT%H:%M:%S'
-class StatusRow(TypedDict):
- """ Dictionary of columns of the import_status table.
- """
- lastimportdate: dt.datetime
- sequence_id: Optional[int]
- indexed: Optional[bool]
-
-
-def compute_database_date(conn: Connection) -> dt.datetime:
+def compute_database_date(conn: Connection, offline: bool = False) -> dt.datetime:
""" Determine the date of the database from the newest object in the
data base.
"""
- # First, find the node with the highest ID in the database
- with conn.cursor() as cur:
- if conn.table_exists('place'):
- osmid = cur.scalar("SELECT max(osm_id) FROM place WHERE osm_type='N'")
- else:
- osmid = cur.scalar("SELECT max(osm_id) FROM placex WHERE osm_type='N'")
-
- if osmid is None:
- LOG.fatal("No data found in the database.")
- raise UsageError("No data found in the database.")
+ # If there is a date from osm2pgsql available, use that.
+ if table_exists(conn, 'osm2pgsql_properties'):
+ with conn.cursor() as cur:
+ cur.execute(""" SELECT value FROM osm2pgsql_properties
+ WHERE property = 'current_timestamp' """)
+ row = cur.fetchone()
+ if row is not None:
+ return dt.datetime.strptime(row[0], "%Y-%m-%dT%H:%M:%SZ")\
+ .replace(tzinfo=dt.timezone.utc)
+
+ if offline:
+ raise UsageError("Cannot determine database date from data in offline mode.")
+
+ # Else, find the node with the highest ID in the database
+ if table_exists(conn, 'place'):
+ osmid = execute_scalar(conn, "SELECT max(osm_id) FROM place WHERE osm_type='N'")
+ else:
+ osmid = execute_scalar(conn, "SELECT max(osm_id) FROM placex WHERE osm_type='N'")
+
+ if osmid is None:
+ LOG.fatal("No data found in the database.")
+ raise UsageError("No data found in the database.")
LOG.info("Using node id %d for timestamp lookup", osmid)
# Get the node from the API to find the timestamp when it was created.
if cur.rowcount < 1:
return None, None, None
- row = cast(StatusRow, cur.fetchone())
- return row['lastimportdate'], row['sequence_id'], row['indexed']
+ row = cur.fetchone()
+ assert row
+ return row.lastimportdate, row.sequence_id, row.indexed
def set_indexed(conn: Connection, state: bool) -> None:
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Helper functions for handling DB accesses.
"""
-from typing import IO, Optional, Union, Any, Iterable
+from typing import IO, Optional, Union
import subprocess
import logging
import gzip
-import io
from pathlib import Path
-from nominatim.db.connection import get_pg_env, Cursor
-from nominatim.errors import UsageError
+from .connection import get_pg_env
+from ..errors import UsageError
LOG = logging.getLogger()
+
def _pipe_to_proc(proc: 'subprocess.Popen[bytes]',
fdesc: Union[IO[bytes], gzip.GzipFile]) -> int:
assert proc.stdin is not None
return len(chunk)
+
def execute_file(dsn: str, fname: Path,
ignore_errors: bool = False,
pre_code: Optional[str] = None,
if ret != 0 or remain > 0:
raise UsageError("Failed to execute SQL file.")
-
-
-# List of characters that need to be quoted for the copy command.
-_SQL_TRANSLATION = {ord('\\'): '\\\\',
- ord('\t'): '\\t',
- ord('\n'): '\\n'}
-
-
-class CopyBuffer:
- """ Data collector for the copy_from command.
- """
-
- def __init__(self) -> None:
- self.buffer = io.StringIO()
-
-
- def __enter__(self) -> 'CopyBuffer':
- return self
-
-
- def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
- if self.buffer is not None:
- self.buffer.close()
-
-
- def add(self, *data: Any) -> None:
- """ Add another row of data to the copy buffer.
- """
- first = True
- for column in data:
- if first:
- first = False
- else:
- self.buffer.write('\t')
- if column is None:
- self.buffer.write('\\N')
- else:
- self.buffer.write(str(column).translate(_SQL_TRANSLATION))
- self.buffer.write('\n')
-
-
- def copy_out(self, cur: Cursor, table: str, columns: Optional[Iterable[str]] = None) -> None:
- """ Copy all collected data into the given table.
- """
- if self.buffer.tell() > 0:
- self.buffer.seek(0)
- cur.copy_from(self.buffer, table, columns=columns) # type: ignore[no-untyped-call]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Custom exception and error classes for Nominatim.
+"""
+
+
+class UsageError(Exception):
+ """ An error raised because of bad user input. This error will usually
+ not cause a stack trace to be printed unless debugging is enabled.
+ """
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Main work horse for indexing (computing addresses) the database.
+"""
+from typing import cast, List, Any, Optional
+import logging
+import time
+
+import psycopg
+
+from ..db.connection import connect, execute_scalar
+from ..db.query_pool import QueryPool
+from ..tokenizer.base import AbstractTokenizer
+from .progress import ProgressLogger
+from . import runners
+
+LOG = logging.getLogger()
+
+
+class Indexer:
+ """ Main indexing routine.
+ """
+
+ def __init__(self, dsn: str, tokenizer: AbstractTokenizer, num_threads: int):
+ self.dsn = dsn
+ self.tokenizer = tokenizer
+ self.num_threads = num_threads
+
+ def has_pending(self) -> bool:
+ """ Check if any data still needs indexing.
+ This function must only be used after the import has finished.
+ Otherwise it will be very expensive.
+ """
+ with connect(self.dsn) as conn:
+ with conn.cursor() as cur:
+ cur.execute("SELECT 'a' FROM placex WHERE indexed_status > 0 LIMIT 1")
+ return cur.rowcount > 0
+
+ async def index_full(self, analyse: bool = True) -> None:
+ """ Index the complete database. This will first index boundaries
+ followed by all other objects. When `analyse` is True, then the
+ database will be analysed at the appropriate places to
+ ensure that database statistics are updated.
+ """
+ with connect(self.dsn) as conn:
+ conn.autocommit = True
+
+ def _analyze() -> None:
+ if analyse:
+ with conn.cursor() as cur:
+ cur.execute('ANALYZE')
+
+ while True:
+ if await self.index_by_rank(0, 4) > 0:
+ _analyze()
+
+ if await self.index_boundaries(0, 30) > 100:
+ _analyze()
+
+ if await self.index_by_rank(5, 25) > 100:
+ _analyze()
+
+ if await self.index_by_rank(26, 30) > 1000:
+ _analyze()
+
+ if await self.index_postcodes() > 100:
+ _analyze()
+
+ if not self.has_pending():
+ break
+
+ async def index_boundaries(self, minrank: int, maxrank: int) -> int:
+ """ Index only administrative boundaries within the given rank range.
+ """
+ total = 0
+ LOG.warning("Starting indexing boundaries using %s threads",
+ self.num_threads)
+
+ minrank = max(minrank, 4)
+ maxrank = min(maxrank, 25)
+
+ # Precompute number of rows to process for all rows
+ with connect(self.dsn) as conn:
+ hstore_info = psycopg.types.TypeInfo.fetch(conn, "hstore")
+ if hstore_info is None:
+ raise RuntimeError('Hstore extension is requested but not installed.')
+ psycopg.types.hstore.register_hstore(hstore_info)
+
+ with conn.cursor() as cur:
+ cur = conn.execute(""" SELECT rank_search, count(*)
+ FROM placex
+ WHERE rank_search between %s and %s
+ AND class = 'boundary' and type = 'administrative'
+ AND indexed_status > 0
+ GROUP BY rank_search""",
+ (minrank, maxrank))
+ total_tuples = {row.rank_search: row.count for row in cur}
+
+ with self.tokenizer.name_analyzer() as analyzer:
+ for rank in range(minrank, maxrank + 1):
+ total += await self._index(runners.BoundaryRunner(rank, analyzer),
+ total_tuples=total_tuples.get(rank, 0))
+
+ return total
+
+ async def index_by_rank(self, minrank: int, maxrank: int) -> int:
+ """ Index all entries of placex in the given rank range (inclusive)
+ in order of their address rank.
+
+ When rank 30 is requested then also interpolations and
+ places with address rank 0 will be indexed.
+ """
+ total = 0
+ maxrank = min(maxrank, 30)
+ LOG.warning("Starting indexing rank (%i to %i) using %i threads",
+ minrank, maxrank, self.num_threads)
+
+ # Precompute number of rows to process for all rows
+ with connect(self.dsn) as conn:
+ hstore_info = psycopg.types.TypeInfo.fetch(conn, "hstore")
+ if hstore_info is None:
+ raise RuntimeError('Hstore extension is requested but not installed.')
+ psycopg.types.hstore.register_hstore(hstore_info)
+
+ with conn.cursor() as cur:
+ cur = conn.execute(""" SELECT rank_address, count(*)
+ FROM placex
+ WHERE rank_address between %s and %s
+ AND indexed_status > 0
+ GROUP BY rank_address""",
+ (minrank, maxrank))
+ total_tuples = {row.rank_address: row.count for row in cur}
+
+ with self.tokenizer.name_analyzer() as analyzer:
+ for rank in range(max(1, minrank), maxrank + 1):
+ if rank >= 30:
+ batch = 20
+ elif rank >= 26:
+ batch = 5
+ else:
+ batch = 1
+ total += await self._index(runners.RankRunner(rank, analyzer),
+ batch=batch, total_tuples=total_tuples.get(rank, 0))
+
+ if maxrank == 30:
+ total += await self._index(runners.RankRunner(0, analyzer))
+ total += await self._index(runners.InterpolationRunner(analyzer), batch=20)
+
+ return total
+
+ async def index_postcodes(self) -> int:
+ """Index the entries of the location_postcode table.
+ """
+ LOG.warning("Starting indexing postcodes using %s threads", self.num_threads)
+
+ return await self._index(runners.PostcodeRunner(), batch=20)
+
+ def update_status_table(self) -> None:
+ """ Update the status in the status table to 'indexed'.
+ """
+ with connect(self.dsn) as conn:
+ with conn.cursor() as cur:
+ cur.execute('UPDATE import_status SET indexed = true')
+
+ conn.commit()
+
+ async def _index(self, runner: runners.Runner, batch: int = 1,
+ total_tuples: Optional[int] = None) -> int:
+ """ Index a single rank or table. `runner` describes the SQL to use
+ for indexing. `batch` describes the number of objects that
+ should be processed with a single SQL statement.
+
+ `total_tuples` may contain the total number of rows to process.
+ When not supplied, the value will be computed using the
+ approriate runner function.
+ """
+ LOG.warning("Starting %s (using batch size %s)", runner.name(), batch)
+
+ if total_tuples is None:
+ total_tuples = self._prepare_indexing(runner)
+
+ progress = ProgressLogger(runner.name(), total_tuples)
+
+ if total_tuples > 0:
+ async with await psycopg.AsyncConnection.connect(
+ self.dsn, row_factory=psycopg.rows.dict_row) as aconn, \
+ QueryPool(self.dsn, self.num_threads, autocommit=True) as pool:
+ fetcher_time = 0.0
+ tstart = time.time()
+ async with aconn.cursor(name='places') as cur:
+ query = runner.index_places_query(batch)
+ params: List[Any] = []
+ num_places = 0
+ async for place in cur.stream(runner.sql_get_objects()):
+ fetcher_time += time.time() - tstart
+
+ params.extend(runner.index_places_params(place))
+ num_places += 1
+
+ if num_places >= batch:
+ LOG.debug("Processing places: %s", str(params))
+ await pool.put_query(query, params)
+ progress.add(num_places)
+ params = []
+ num_places = 0
+
+ tstart = time.time()
+
+ if num_places > 0:
+ await pool.put_query(runner.index_places_query(num_places), params)
+
+ LOG.info("Wait time: fetcher: %.2fs, pool: %.2fs",
+ fetcher_time, pool.wait_time)
+
+ return progress.done()
+
+ def _prepare_indexing(self, runner: runners.Runner) -> int:
+ with connect(self.dsn) as conn:
+ hstore_info = psycopg.types.TypeInfo.fetch(conn, "hstore")
+ if hstore_info is None:
+ raise RuntimeError('Hstore extension is requested but not installed.')
+ psycopg.types.hstore.register_hstore(hstore_info)
+
+ total_tuples = execute_scalar(conn, runner.sql_count_objects())
+ LOG.debug("Total number of rows: %i", total_tuples)
+ return cast(int, total_tuples)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Helpers for progress logging.
INITIAL_PROGRESS = 10
+
class ProgressLogger:
""" Tracks and prints progress for the indexing process.
`name` is the name of the indexing step being tracked.
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Mix-ins that provide the actual commands for the indexer for various indexing
+tasks.
+"""
+from typing import Any, Sequence
+
+from psycopg import sql as pysql
+from psycopg.abc import Query
+from psycopg.rows import DictRow
+from psycopg.types.json import Json
+
+from ..typing import Protocol
+from ..data.place_info import PlaceInfo
+from ..tokenizer.base import AbstractAnalyzer
+
+
+def _mk_valuelist(template: str, num: int) -> pysql.Composed:
+ return pysql.SQL(',').join([pysql.SQL(template)] * num)
+
+
+def _analyze_place(place: DictRow, analyzer: AbstractAnalyzer) -> Json:
+ return Json(analyzer.process_place(PlaceInfo(place)))
+
+
+class Runner(Protocol):
+ def name(self) -> str: ...
+ def sql_count_objects(self) -> Query: ...
+ def sql_get_objects(self) -> Query: ...
+ def index_places_query(self, batch_size: int) -> Query: ...
+ def index_places_params(self, place: DictRow) -> Sequence[Any]: ...
+
+
+SELECT_SQL = pysql.SQL("""SELECT place_id, extra.*
+ FROM (SELECT * FROM placex {}) as px,
+ LATERAL placex_indexing_prepare(px) as extra """)
+UPDATE_LINE = "(%s, %s::hstore, %s::hstore, %s::int, %s::jsonb)"
+
+
+class AbstractPlacexRunner:
+ """ Returns SQL commands for indexing of the placex table.
+ """
+
+ def __init__(self, rank: int, analyzer: AbstractAnalyzer) -> None:
+ self.rank = rank
+ self.analyzer = analyzer
+
+ def index_places_query(self, batch_size: int) -> Query:
+ return pysql.SQL(
+ """ UPDATE placex
+ SET indexed_status = 0, address = v.addr, token_info = v.ti,
+ name = v.name, linked_place_id = v.linked_place_id
+ FROM (VALUES {}) as v(id, name, addr, linked_place_id, ti)
+ WHERE place_id = v.id
+ """).format(_mk_valuelist(UPDATE_LINE, batch_size))
+
+ def index_places_params(self, place: DictRow) -> Sequence[Any]:
+ return (place['place_id'],
+ place['name'],
+ place['address'],
+ place['linked_place_id'],
+ _analyze_place(place, self.analyzer))
+
+
+class RankRunner(AbstractPlacexRunner):
+ """ Returns SQL commands for indexing one rank within the placex table.
+ """
+
+ def name(self) -> str:
+ return f"rank {self.rank}"
+
+ def sql_count_objects(self) -> pysql.Composed:
+ return pysql.SQL("""SELECT count(*) FROM placex
+ WHERE rank_address = {} and indexed_status > 0
+ """).format(pysql.Literal(self.rank))
+
+ def sql_get_objects(self) -> pysql.Composed:
+ return SELECT_SQL.format(pysql.SQL(
+ """WHERE placex.indexed_status > 0 and placex.rank_address = {}
+ ORDER BY placex.geometry_sector
+ """).format(pysql.Literal(self.rank)))
+
+
+class BoundaryRunner(AbstractPlacexRunner):
+ """ Returns SQL commands for indexing the administrative boundaries
+ of a certain rank.
+ """
+
+ def name(self) -> str:
+ return f"boundaries rank {self.rank}"
+
+ def sql_count_objects(self) -> Query:
+ return pysql.SQL("""SELECT count(*) FROM placex
+ WHERE indexed_status > 0
+ AND rank_search = {}
+ AND class = 'boundary' and type = 'administrative'
+ """).format(pysql.Literal(self.rank))
+
+ def sql_get_objects(self) -> Query:
+ return SELECT_SQL.format(pysql.SQL(
+ """WHERE placex.indexed_status > 0 and placex.rank_search = {}
+ and placex.class = 'boundary' and placex.type = 'administrative'
+ ORDER BY placex.partition, placex.admin_level
+ """).format(pysql.Literal(self.rank)))
+
+
+class InterpolationRunner:
+ """ Returns SQL commands for indexing the address interpolation table
+ location_property_osmline.
+ """
+
+ def __init__(self, analyzer: AbstractAnalyzer) -> None:
+ self.analyzer = analyzer
+
+ def name(self) -> str:
+ return "interpolation lines (location_property_osmline)"
+
+ def sql_count_objects(self) -> Query:
+ return """SELECT count(*) FROM location_property_osmline
+ WHERE indexed_status > 0"""
+
+ def sql_get_objects(self) -> Query:
+ return """SELECT place_id, get_interpolation_address(address, osm_id) as address
+ FROM location_property_osmline
+ WHERE indexed_status > 0
+ ORDER BY geometry_sector"""
+
+ def index_places_query(self, batch_size: int) -> Query:
+ return pysql.SQL("""UPDATE location_property_osmline
+ SET indexed_status = 0, address = v.addr, token_info = v.ti
+ FROM (VALUES {}) as v(id, addr, ti)
+ WHERE place_id = v.id
+ """).format(_mk_valuelist("(%s, %s::hstore, %s::jsonb)", batch_size))
+
+ def index_places_params(self, place: DictRow) -> Sequence[Any]:
+ return (place['place_id'], place['address'],
+ _analyze_place(place, self.analyzer))
+
+
+class PostcodeRunner(Runner):
+ """ Provides the SQL commands for indexing the location_postcode table.
+ """
+
+ def name(self) -> str:
+ return "postcodes (location_postcode)"
+
+ def sql_count_objects(self) -> Query:
+ return 'SELECT count(*) FROM location_postcode WHERE indexed_status > 0'
+
+ def sql_get_objects(self) -> Query:
+ return """SELECT place_id FROM location_postcode
+ WHERE indexed_status > 0
+ ORDER BY country_code, postcode"""
+
+ def index_places_query(self, batch_size: int) -> Query:
+ return pysql.SQL("""UPDATE location_postcode SET indexed_status = 0
+ WHERE place_id IN ({})""")\
+ .format(pysql.SQL(',').join((pysql.Placeholder() for _ in range(batch_size))))
+
+ def index_places_params(self, place: DictRow) -> Sequence[Any]:
+ return (place['place_id'], )
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Path settings for extra data used by Nominatim.
+"""
+from pathlib import Path
+
+SQLLIB_DIR = (Path(__file__) / '..' / '..' / '..' / 'lib-sql').resolve()
+LUALIB_DIR = (Path(__file__) / '..' / '..' / '..' / 'lib-lua').resolve()
+DATA_DIR = (Path(__file__) / '..' / '..' / '..' / 'data').resolve()
+CONFIG_DIR = (Path(__file__) / '..' / '..' / '..' / 'settings').resolve()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Abstract class definitions for tokenizers. These base classes are here
from typing import List, Tuple, Dict, Any, Optional, Iterable
from pathlib import Path
-from nominatim.config import Configuration
-from nominatim.data.place_info import PlaceInfo
-from nominatim.typing import Protocol
+from ..typing import Protocol
+from ..config import Configuration
+from ..db.connection import Connection
+from ..data.place_info import PlaceInfo
+
class AbstractAnalyzer(ABC):
""" The analyzer provides the functions for analysing names and building
def __enter__(self) -> 'AbstractAnalyzer':
return self
-
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
self.close()
-
@abstractmethod
def close(self) -> None:
""" Free all resources used by the analyzer.
"""
-
@abstractmethod
def get_word_token_info(self, words: List[str]) -> List[Tuple[str, str, int]]:
""" Return token information for the given list of words.
Returns:
The function returns the list of all tuples that could be
- found for the given words. Each list entry is a tuple of
- (original word, word token, word id).
+ found for the given words. Each list entry is a tuple of
+ (original word, word token, word id).
"""
-
@abstractmethod
def normalize_postcode(self, postcode: str) -> str:
""" Convert the postcode to its standardized form.
The given postcode after normalization.
"""
-
@abstractmethod
def update_postcodes_from_db(self) -> None:
""" Update the tokenizer's postcode tokens from the current content
of the `location_postcode` table.
"""
-
@abstractmethod
def update_special_phrases(self,
phrases: Iterable[Tuple[str, str, str, str]],
ones that already exist.
"""
-
@abstractmethod
def add_country_names(self, country_code: str, names: Dict[str, str]) -> None:
""" Add the given names to the tokenizer's list of country tokens.
names: Dictionary of name type to name.
"""
-
@abstractmethod
def process_place(self, place: PlaceInfo) -> Any:
""" Extract tokens for the given place and compute the
Returns:
A JSON-serialisable structure that will be handed into
- the database via the `token_info` field.
+ the database via the `token_info` field.
"""
-
class AbstractTokenizer(ABC):
""" The tokenizer instance is the central instance of the tokenizer in
the system. There will only be a single instance of the tokenizer
tables should be skipped. This option is only required for
migration purposes and can be safely ignored by custom
tokenizers.
-
- TODO: can we move the init_db parameter somewhere else?
"""
-
@abstractmethod
def init_from_project(self, config: Configuration) -> None:
""" Initialise the tokenizer from an existing database setup.
config: Read-only object with configuration options.
"""
-
@abstractmethod
def finalize_import(self, config: Configuration) -> None:
""" This function is called at the very end of an import when all
config: Read-only object with configuration options.
"""
-
@abstractmethod
def update_sql_functions(self, config: Configuration) -> None:
""" Update the SQL part of the tokenizer. This function is called
config: Read-only object with configuration options.
"""
-
@abstractmethod
def check_database(self, config: Configuration) -> Optional[str]:
""" Check that the database is set up correctly and ready for being
Returns:
If an issue was found, return an error message with the
- description of the issue as well as hints for the user on
- how to resolve the issue. If everything is okay, return `None`.
+ description of the issue as well as hints for the user on
+ how to resolve the issue. If everything is okay, return `None`.
"""
-
@abstractmethod
- def update_statistics(self) -> None:
+ def update_statistics(self, config: Configuration, threads: int = 1) -> None:
""" Recompute any tokenizer statistics necessary for efficient lookup.
This function is meant to be called from time to time by the user
to improve performance. However, the tokenizer must not depend on
it to be called in order to work.
"""
-
@abstractmethod
def update_word_tokens(self) -> None:
""" Do house-keeping on the tokenizers internal data structures.
Remove unused word tokens, resort data etc.
"""
-
@abstractmethod
def name_analyzer(self) -> AbstractAnalyzer:
""" Create a new analyzer for tokenizing names and queries
call the close() function before destructing the analyzer.
"""
+ @abstractmethod
+ def most_frequent_words(self, conn: Connection, num: int) -> List[str]:
+ """ Return a list of the most frequent full words in the database.
+
+ Arguments:
+ conn: Open connection to the database which may be used to
+ retrieve the words.
+ num: Maximum number of words to return.
+ """
+
class TokenizerModule(Protocol):
""" Interface that must be exported by modules that implement their
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for creating a tokenizer or initialising the right one for an
This module provides the functions to create and configure a new tokenizer
as well as instantiating the appropriate tokenizer for updating an existing
database.
-
-A tokenizer usually also includes PHP code for querying. The appropriate PHP
-normalizer module is installed, when the tokenizer is created.
"""
from typing import Optional
import logging
import importlib
from pathlib import Path
-from nominatim.errors import UsageError
-from nominatim.db import properties
-from nominatim.db.connection import connect
-from nominatim.config import Configuration
-from nominatim.tokenizer.base import AbstractTokenizer, TokenizerModule
+from ..errors import UsageError
+from ..db import properties
+from ..db.connection import connect
+from ..config import Configuration
+from ..tokenizer.base import AbstractTokenizer, TokenizerModule
LOG = logging.getLogger()
+
def _import_tokenizer(name: str) -> TokenizerModule:
""" Load the tokenizer.py module from project directory.
"""
"Check the setting of NOMINATIM_TOKENIZER.", name)
raise UsageError('Tokenizer not found')
- return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
+ return importlib.import_module('nominatim_db.tokenizer.' + name + '_tokenizer')
def create_tokenizer(config: Configuration, init_db: bool = True,
module_name = config.TOKENIZER
# Create the directory for the tokenizer data
+ assert config.project_dir is not None
basedir = config.project_dir / 'tokenizer'
if not basedir.exists():
basedir.mkdir()
The function looks up the appropriate tokenizer in the database
and initialises it.
"""
+ assert config.project_dir is not None
basedir = config.project_dir / 'tokenizer'
if not basedir.is_dir():
# Directory will be repopulated by tokenizer below.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Helper class to create ICU rules from a configuration file.
from icu import Transliterator
-from nominatim.config import flatten_config_list, Configuration
-from nominatim.db.properties import set_property, get_property
-from nominatim.db.connection import Connection
-from nominatim.errors import UsageError
-from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
-from nominatim.tokenizer.icu_token_analysis import ICUTokenAnalysis
-from nominatim.tokenizer.token_analysis.base import AnalysisModule, Analyzer
-import nominatim.data.country_info
+from ..config import flatten_config_list, Configuration
+from ..db.properties import set_property, get_property
+from ..db.connection import Connection
+from ..errors import UsageError
+from .place_sanitizer import PlaceSanitizer
+from .icu_token_analysis import ICUTokenAnalysis
+from .token_analysis.base import AnalysisModule, Analyzer
+from ..data import country_info
LOG = logging.getLogger()
config='TOKENIZER_CONFIG')
# Make sure country information is available to analyzers and sanitizers.
- nominatim.data.country_info.setup_country_config(config)
+ country_info.setup_country_config(config)
self.normalization_rules = self._cfg_to_icu_rules(rules, 'normalization')
self.transliteration_rules = self._cfg_to_icu_rules(rules, 'transliteration')
# Load optional sanitizer rule set.
self.sanitizer_rules = rules.get('sanitizers', [])
-
def load_config_from_db(self, conn: Connection) -> None:
""" Get previously saved parts of the configuration from the
database.
self.analysis_rules = []
self._setup_analysis()
-
def save_config_to_db(self, conn: Connection) -> None:
""" Save the part of the configuration that cannot be changed into
the database.
set_property(conn, DBCFG_IMPORT_TRANS_RULES, self.transliteration_rules)
set_property(conn, DBCFG_IMPORT_ANALYSIS_RULES, json.dumps(self.analysis_rules))
-
def make_sanitizer(self) -> PlaceSanitizer:
""" Create a place sanitizer from the configured rules.
"""
return PlaceSanitizer(self.sanitizer_rules, self.config)
-
def make_token_analysis(self) -> ICUTokenAnalysis:
""" Create a token analyser from the reviouly loaded rules.
"""
return ICUTokenAnalysis(self.normalization_rules,
self.transliteration_rules, self.analysis)
-
def get_search_rules(self) -> str:
""" Return the ICU rules to be used during search.
The rules combine normalization and transliteration.
rules.write(self.transliteration_rules)
return rules.getvalue()
-
def get_normalization_rules(self) -> str:
""" Return rules for normalisation of a term.
"""
return self.normalization_rules
-
def get_transliteration_rules(self) -> str:
""" Return the rules for converting a string into its asciii representation.
"""
return self.transliteration_rules
-
def _setup_analysis(self) -> None:
""" Process the rules used for creating the various token analyzers.
"""
- self.analysis: Dict[Optional[str], TokenAnalyzerRule] = {}
+ self.analysis: Dict[Optional[str], TokenAnalyzerRule] = {}
if not isinstance(self.analysis_rules, list):
raise UsageError("Configuration section 'token-analysis' must be a list.")
norm = Transliterator.createFromRules("rule_loader_normalization",
self.normalization_rules)
trans = Transliterator.createFromRules("rule_loader_transliteration",
- self.transliteration_rules)
+ self.transliteration_rules)
for section in self.analysis_rules:
name = section.get('id', None)
self.analysis[name] = TokenAnalyzerRule(section, norm, trans,
self.config)
-
@staticmethod
def _cfg_to_icu_rules(rules: Mapping[str, Any], section: str) -> str:
""" Load an ICU ruleset from the given section. If the section is a
raise UsageError("'analyzer' parameter needs to be simple string")
self._analysis_mod: AnalysisModule = \
- config.load_plugin_module(analyzer_name, 'nominatim.tokenizer.token_analysis')
+ config.load_plugin_module(analyzer_name, 'nominatim_db.tokenizer.token_analysis')
self.config = self._analysis_mod.configure(rules, normalizer,
transliterator)
-
def create(self, normalizer: Any, transliterator: Any) -> Analyzer:
""" Create a new analyser instance for the given rule.
"""
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Container class collecting all components required to transform an OSM name
from typing import Mapping, Optional, TYPE_CHECKING
from icu import Transliterator
-from nominatim.tokenizer.token_analysis.base import Analyzer
+from .token_analysis.base import Analyzer
if TYPE_CHECKING:
- from typing import Any
- from nominatim.tokenizer.icu_rule_loader import TokenAnalyzerRule # pylint: disable=cyclic-import
+ from typing import Any # noqa
+ from .icu_rule_loader import TokenAnalyzerRule
+
class ICUTokenAnalysis:
""" Container class collecting the transliterators and token analysis
def __init__(self, norm_rules: str, trans_rules: str,
analysis_rules: Mapping[Optional[str], 'TokenAnalyzerRule']):
+ # additional break signs are not relevant during name analysis
+ norm_rules += ";[[:Space:][-:]]+ > ' ';"
self.normalizer = Transliterator.createFromRules("icu_normalization",
norm_rules)
trans_rules += ";[:Space:]+ > ' '"
self.analysis = {name: arules.create(self.normalizer, self.to_ascii)
for name, arules in analysis_rules.items()}
-
def get_analyzer(self, name: Optional[str]) -> Analyzer:
""" Return the given named analyzer. If no analyzer with that
name exists, return the default analyzer.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tokenizer implementing normalisation as used before Nominatim 4 but using
from typing import Optional, Sequence, List, Tuple, Mapping, Any, cast, \
Dict, Set, Iterable
import itertools
-import json
import logging
from pathlib import Path
-from textwrap import dedent
-
-from nominatim.db.connection import connect, Connection, Cursor
-from nominatim.config import Configuration
-from nominatim.db.utils import CopyBuffer
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.data.place_info import PlaceInfo
-from nominatim.tokenizer.icu_rule_loader import ICURuleLoader
-from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
-from nominatim.data.place_name import PlaceName
-from nominatim.tokenizer.icu_token_analysis import ICUTokenAnalysis
-from nominatim.tokenizer.base import AbstractAnalyzer, AbstractTokenizer
+
+from psycopg.types.json import Jsonb
+from psycopg import sql as pysql
+
+from ..db.connection import connect, Connection, Cursor, \
+ drop_tables, table_exists, execute_scalar
+from ..config import Configuration
+from ..db.sql_preprocessor import SQLPreprocessor
+from ..data.place_info import PlaceInfo
+from ..data.place_name import PlaceName
+from .icu_rule_loader import ICURuleLoader
+from .place_sanitizer import PlaceSanitizer
+from .icu_token_analysis import ICUTokenAnalysis
+from .base import AbstractAnalyzer, AbstractTokenizer
DBCFG_TERM_NORMALIZATION = "tokenizer_term_normalization"
LOG = logging.getLogger()
+WORD_TYPES = (('country_names', 'C'),
+ ('postcodes', 'P'),
+ ('full_word', 'W'),
+ ('housenumbers', 'H'))
+
+
def create(dsn: str, data_dir: Path) -> 'ICUTokenizer':
""" Create a new instance of the tokenizer provided by this module.
"""
self.data_dir = data_dir
self.loader: Optional[ICURuleLoader] = None
-
def init_new_db(self, config: Configuration, init_db: bool = True) -> None:
""" Set up a new tokenizer for the database.
"""
self.loader = ICURuleLoader(config)
- self._install_php(config.lib_dir.php, overwrite=True)
self._save_config()
if init_db:
self.update_sql_functions(config)
- self._init_db_tables(config)
-
+ self._setup_db_tables(config)
+ self._create_base_indices(config, 'word')
def init_from_project(self, config: Configuration) -> None:
""" Initialise the tokenizer from the project directory.
with connect(self.dsn) as conn:
self.loader.load_config_from_db(conn)
- self._install_php(config.lib_dir.php, overwrite=False)
-
-
def finalize_import(self, config: Configuration) -> None:
""" Do any required postprocessing to make the tokenizer data ready
for use.
"""
- with connect(self.dsn) as conn:
- sqlp = SQLPreprocessor(conn, config)
- sqlp.run_sql_file(conn, 'tokenizer/legacy_tokenizer_indices.sql')
-
+ self._create_lookup_indices(config, 'word')
def update_sql_functions(self, config: Configuration) -> None:
""" Reimport the SQL functions for this tokenizer.
sqlp = SQLPreprocessor(conn, config)
sqlp.run_sql_file(conn, 'tokenizer/icu_tokenizer.sql')
-
def check_database(self, config: Configuration) -> None:
""" Check that the tokenizer is set up correctly.
"""
# Will throw an error if there is an issue.
self.init_from_project(config)
-
- def update_statistics(self) -> None:
+ def update_statistics(self, config: Configuration, threads: int = 2) -> None:
""" Recompute frequencies for all name words.
"""
with connect(self.dsn) as conn:
- if conn.table_exists('search_name'):
- with conn.cursor() as cur:
- cur.drop_table("word_frequencies")
- LOG.info("Computing word frequencies")
- cur.execute("""CREATE TEMP TABLE word_frequencies AS
- SELECT unnest(name_vector) as id, count(*)
- FROM search_name GROUP BY id""")
- cur.execute("CREATE INDEX ON word_frequencies(id)")
- LOG.info("Update word table with recomputed frequencies")
- cur.execute("""UPDATE word
- SET info = info || jsonb_build_object('count', count)
- FROM word_frequencies WHERE word_id = id""")
- cur.drop_table("word_frequencies")
- conn.commit()
+ if not table_exists(conn, 'search_name'):
+ return
+ with conn.cursor() as cur:
+ cur.execute('ANALYSE search_name')
+ if threads > 1:
+ cur.execute(pysql.SQL('SET max_parallel_workers_per_gather TO {}')
+ .format(pysql.Literal(min(threads, 6),)))
+
+ LOG.info('Computing word frequencies')
+ drop_tables(conn, 'word_frequencies')
+ cur.execute("""
+ CREATE TEMP TABLE word_frequencies AS
+ WITH word_freq AS MATERIALIZED (
+ SELECT unnest(name_vector) as id, count(*)
+ FROM search_name GROUP BY id),
+ addr_freq AS MATERIALIZED (
+ SELECT unnest(nameaddress_vector) as id, count(*)
+ FROM search_name GROUP BY id)
+ SELECT coalesce(a.id, w.id) as id,
+ (CASE WHEN w.count is null THEN '{}'::JSONB
+ ELSE jsonb_build_object('count', w.count) END
+ ||
+ CASE WHEN a.count is null THEN '{}'::JSONB
+ ELSE jsonb_build_object('addr_count', a.count) END) as info
+ FROM word_freq w FULL JOIN addr_freq a ON a.id = w.id;
+ """)
+ cur.execute('CREATE UNIQUE INDEX ON word_frequencies(id) INCLUDE(info)')
+ cur.execute('ANALYSE word_frequencies')
+ LOG.info('Update word table with recomputed frequencies')
+ drop_tables(conn, 'tmp_word')
+ cur.execute("""CREATE TABLE tmp_word AS
+ SELECT word_id, word_token, type, word,
+ (CASE WHEN wf.info is null THEN word.info
+ ELSE coalesce(word.info, '{}'::jsonb) || wf.info
+ END) as info
+ FROM word LEFT JOIN word_frequencies wf
+ ON word.word_id = wf.id
+ """)
+ drop_tables(conn, 'word_frequencies')
+
+ with conn.cursor() as cur:
+ cur.execute('SET max_parallel_workers_per_gather TO 0')
+
+ sqlp = SQLPreprocessor(conn, config)
+ sqlp.run_string(conn,
+ 'GRANT SELECT ON tmp_word TO "{{config.DATABASE_WEBUSER}}"')
+ conn.commit()
+ self._create_base_indices(config, 'tmp_word')
+ self._create_lookup_indices(config, 'tmp_word')
+ self._move_temporary_word_table('tmp_word')
def _cleanup_housenumbers(self) -> None:
""" Remove unused house numbers.
"""
with connect(self.dsn) as conn:
- if not conn.table_exists('search_name'):
+ if not table_exists(conn, 'search_name'):
return
with conn.cursor(name="hnr_counter") as cur:
cur.execute("""SELECT DISTINCT word_id, coalesce(info->>'lookup', word_token)
(list(candidates.values()), ))
conn.commit()
-
-
def update_word_tokens(self) -> None:
""" Remove unused tokens.
"""
self._cleanup_housenumbers()
LOG.warning("Tokenizer house-keeping done.")
-
def name_analyzer(self) -> 'ICUNameAnalyzer':
""" Create a new analyzer for tokenizing names and queries
using this tokinzer. Analyzers are context managers and should
return ICUNameAnalyzer(self.dsn, self.loader.make_sanitizer(),
self.loader.make_token_analysis())
-
- def _install_php(self, phpdir: Path, overwrite: bool = True) -> None:
- """ Install the php script for the tokenizer.
+ def most_frequent_words(self, conn: Connection, num: int) -> List[str]:
+ """ Return a list of the `num` most frequent full words
+ in the database.
"""
- assert self.loader is not None
- php_file = self.data_dir / "tokenizer.php"
-
- if not php_file.exists() or overwrite:
- php_file.write_text(dedent(f"""\
- <?php
- @define('CONST_Max_Word_Frequency', 10000000);
- @define('CONST_Term_Normalization_Rules', "{self.loader.normalization_rules}");
- @define('CONST_Transliteration', "{self.loader.get_search_rules()}");
- require_once('{phpdir}/tokenizer/icu_tokenizer.php');"""), encoding='utf-8')
-
+ with conn.cursor() as cur:
+ cur.execute("""SELECT word, sum((info->>'count')::int) as count
+ FROM word WHERE type = 'W'
+ GROUP BY word
+ ORDER BY count DESC LIMIT %s""", (num,))
+ return list(s[0].split('@')[0] for s in cur)
def _save_config(self) -> None:
""" Save the configuration that needs to remain stable for the given
with connect(self.dsn) as conn:
self.loader.save_config_to_db(conn)
+ def _setup_db_tables(self, config: Configuration) -> None:
+ """ Set up the word table and fill it with pre-computed word
+ frequencies.
+ """
+ with connect(self.dsn) as conn:
+ drop_tables(conn, 'word')
+ sqlp = SQLPreprocessor(conn, config)
+ sqlp.run_string(conn, """
+ CREATE TABLE word (
+ word_id INTEGER,
+ word_token text NOT NULL,
+ type text NOT NULL,
+ word text,
+ info jsonb
+ ) {{db.tablespace.search_data}};
+ GRANT SELECT ON word TO "{{config.DATABASE_WEBUSER}}";
+
+ DROP SEQUENCE IF EXISTS seq_word;
+ CREATE SEQUENCE seq_word start 1;
+ GRANT SELECT ON seq_word to "{{config.DATABASE_WEBUSER}}";
+ """)
+ conn.commit()
- def _init_db_tables(self, config: Configuration) -> None:
+ def _create_base_indices(self, config: Configuration, table_name: str) -> None:
""" Set up the word table and fill it with pre-computed word
frequencies.
"""
with connect(self.dsn) as conn:
sqlp = SQLPreprocessor(conn, config)
- sqlp.run_sql_file(conn, 'tokenizer/icu_tokenizer_tables.sql')
+ sqlp.run_string(conn,
+ """CREATE INDEX idx_{{table_name}}_word_token ON {{table_name}}
+ USING BTREE (word_token) {{db.tablespace.search_index}}""",
+ table_name=table_name)
+ for name, ctype in WORD_TYPES:
+ sqlp.run_string(conn,
+ """CREATE INDEX idx_{{table_name}}_{{idx_name}} ON {{table_name}}
+ USING BTREE (word) {{db.tablespace.address_index}}
+ WHERE type = '{{column_type}}'
+ """,
+ table_name=table_name, idx_name=name,
+ column_type=ctype)
+ conn.commit()
+
+ def _create_lookup_indices(self, config: Configuration, table_name: str) -> None:
+ """ Create additional indexes used when running the API.
+ """
+ with connect(self.dsn) as conn:
+ sqlp = SQLPreprocessor(conn, config)
+ # Index required for details lookup.
+ sqlp.run_string(
+ conn,
+ """
+ CREATE INDEX IF NOT EXISTS idx_{{table_name}}_word_id
+ ON {{table_name}} USING BTREE (word_id) {{db.tablespace.search_index}}
+ """,
+ table_name=table_name)
+ conn.commit()
+
+ def _move_temporary_word_table(self, old: str) -> None:
+ """ Rename all tables and indexes used by the tokenizer.
+ """
+ with connect(self.dsn) as conn:
+ drop_tables(conn, 'word')
+ with conn.cursor() as cur:
+ cur.execute(f"ALTER TABLE {old} RENAME TO word")
+ for idx in ('word_token', 'word_id'):
+ cur.execute(f"""ALTER INDEX idx_{old}_{idx}
+ RENAME TO idx_word_{idx}""")
+ for name, _ in WORD_TYPES:
+ cur.execute(f"""ALTER INDEX idx_{old}_{name}
+ RENAME TO idx_word_{name}""")
conn.commit()
def __init__(self, dsn: str, sanitizer: PlaceSanitizer,
token_analysis: ICUTokenAnalysis) -> None:
- self.conn: Optional[Connection] = connect(dsn).connection
+ self.conn: Optional[Connection] = connect(dsn)
self.conn.autocommit = True
self.sanitizer = sanitizer
self.token_analysis = token_analysis
self._cache = _TokenCache()
-
def close(self) -> None:
""" Free all resources used by the analyzer.
"""
self.conn.close()
self.conn = None
-
def _search_normalized(self, name: str) -> str:
""" Return the search token transliteration of the given name.
"""
return cast(str, self.token_analysis.search.transliterate(name)).strip()
-
def _normalized(self, name: str) -> str:
""" Return the normalized version of the given name with all
non-relevant information removed.
"""
return cast(str, self.token_analysis.normalizer.transliterate(name)).strip()
-
def get_word_token_info(self, words: Sequence[str]) -> List[Tuple[str, str, int]]:
""" Return token information for the given list of words.
If a word starts with # it is assumed to be a full name
part_ids = {r[0]: r[1] for r in cur}
return [(k, v, full_ids.get(v, None)) for k, v in full_tokens.items()] \
- + [(k, v, part_ids.get(v, None)) for k, v in partial_tokens.items()]
-
+ + [(k, v, part_ids.get(v, None)) for k, v in partial_tokens.items()]
def normalize_postcode(self, postcode: str) -> str:
""" Convert the postcode to a standardized form.
"""
return postcode.strip().upper()
-
def update_postcodes_from_db(self) -> None:
""" Update postcode tokens in the word table from the location_postcode
table.
if terms:
with self.conn.cursor() as cur:
- cur.execute_values("""SELECT create_postcode_word(pc, var)
- FROM (VALUES %s) AS v(pc, var)""",
- terms)
-
-
-
+ cur.executemany("""SELECT create_postcode_word(%s, %s)""", terms)
def update_special_phrases(self, phrases: Iterable[Tuple[str, str, str, str]],
should_replace: bool) -> None:
LOG.info("Total phrases: %s. Added: %s. Deleted: %s",
len(norm_phrases), added, deleted)
-
def _add_special_phrases(self, cursor: Cursor,
new_phrases: Set[Tuple[str, str, str, str]],
existing_phrases: Set[Tuple[str, str, str, str]]) -> int:
to_add = new_phrases - existing_phrases
added = 0
- with CopyBuffer() as copystr:
+ with cursor.copy('COPY word(word_token, type, word, info) FROM STDIN') as copy:
for word, cls, typ, oper in to_add:
term = self._search_normalized(word)
if term:
- copystr.add(term, 'S', word,
- json.dumps({'class': cls, 'type': typ,
- 'op': oper if oper in ('in', 'near') else None}))
+ copy.write_row((term, 'S', word,
+ Jsonb({'class': cls, 'type': typ,
+ 'op': oper if oper in ('in', 'near') else None})))
added += 1
- copystr.copy_out(cursor, 'word',
- columns=['word_token', 'type', 'word', 'info'])
-
return added
-
def _remove_special_phrases(self, cursor: Cursor,
- new_phrases: Set[Tuple[str, str, str, str]],
- existing_phrases: Set[Tuple[str, str, str, str]]) -> int:
+ new_phrases: Set[Tuple[str, str, str, str]],
+ existing_phrases: Set[Tuple[str, str, str, str]]) -> int:
""" Remove all phrases from the database that are no longer in the
new phrase list.
"""
to_delete = existing_phrases - new_phrases
if to_delete:
- cursor.execute_values(
- """ DELETE FROM word USING (VALUES %s) as v(name, in_class, in_type, op)
- WHERE type = 'S' and word = name
- and info->>'class' = in_class and info->>'type' = in_type
- and ((op = '-' and info->>'op' is null) or op = info->>'op')
+ cursor.executemany(
+ """ DELETE FROM word
+ WHERE type = 'S' and word = %s
+ and info->>'class' = %s and info->>'type' = %s
+ and %s = coalesce(info->>'op', '-')
""", to_delete)
return len(to_delete)
-
def add_country_names(self, country_code: str, names: Mapping[str, str]) -> None:
""" Add default names for the given country to the search index.
"""
self.sanitizer.process_names(info)[0],
internal=True)
-
def _add_country_full_names(self, country_code: str, names: Sequence[PlaceName],
internal: bool = False) -> None:
""" Add names for the given country from an already sanitized
gone_tokens.update(existing_tokens[False] & word_tokens)
if gone_tokens:
cur.execute("""DELETE FROM word
- USING unnest(%s) as token
+ USING unnest(%s::text[]) as token
WHERE type = 'C' and word = %s
and word_token = token""",
(list(gone_tokens), country_code))
if internal:
sql = """INSERT INTO word (word_token, type, word, info)
(SELECT token, 'C', %s, '{"internal": "yes"}'
- FROM unnest(%s) as token)
+ FROM unnest(%s::text[]) as token)
"""
else:
sql = """INSERT INTO word (word_token, type, word)
(SELECT token, 'C', %s
- FROM unnest(%s) as token)
+ FROM unnest(%s::text[]) as token)
"""
cur.execute(sql, (country_code, list(new_tokens)))
-
def process_place(self, place: PlaceInfo) -> Mapping[str, Any]:
""" Determine tokenizer information about the given place.
return token_info.to_dict()
-
def _process_place_address(self, token_info: '_TokenInfo',
address: Sequence[PlaceName]) -> None:
for item in address:
token_info.add_street(self._retrieve_full_tokens(item.name))
elif item.kind == 'place':
if not item.suffix:
- token_info.add_place(self._compute_partial_tokens(item.name))
- elif not item.kind.startswith('_') and not item.suffix and \
- item.kind not in ('country', 'full', 'inclusion'):
- token_info.add_address_term(item.kind, self._compute_partial_tokens(item.name))
-
+ token_info.add_place(itertools.chain(*self._compute_name_tokens([item])))
+ elif (not item.kind.startswith('_') and not item.suffix and
+ item.kind not in ('country', 'full', 'inclusion')):
+ token_info.add_address_term(item.kind,
+ itertools.chain(*self._compute_name_tokens([item])))
def _compute_housenumber_token(self, hnr: PlaceName) -> Tuple[Optional[int], Optional[str]]:
""" Normalize the housenumber and return the word token and the
if norm_name:
result = self._cache.housenumbers.get(norm_name, result)
if result[0] is None:
- with self.conn.cursor() as cur:
- hid = cur.scalar("SELECT getorcreate_hnr_id(%s)", (norm_name, ))
+ hid = execute_scalar(self.conn, "SELECT getorcreate_hnr_id(%s)", (norm_name, ))
- result = hid, norm_name
- self._cache.housenumbers[norm_name] = result
+ result = hid, norm_name
+ self._cache.housenumbers[norm_name] = result
else:
# Otherwise use the analyzer to determine the canonical name.
# Per convention we use the first variant as the 'lookup name', the
if result[0] is None:
variants = analyzer.compute_variants(word_id)
if variants:
- with self.conn.cursor() as cur:
- hid = cur.scalar("SELECT create_analyzed_hnr_id(%s, %s)",
+ hid = execute_scalar(self.conn, "SELECT create_analyzed_hnr_id(%s, %s)",
(word_id, list(variants)))
- result = hid, variants[0]
- self._cache.housenumbers[word_id] = result
+ result = hid, variants[0]
+ self._cache.housenumbers[word_id] = result
return result
-
- def _compute_partial_tokens(self, name: str) -> List[int]:
- """ Normalize the given term, split it into partial words and return
- then token list for them.
- """
- assert self.conn is not None
- norm_name = self._search_normalized(name)
-
- tokens = []
- need_lookup = []
- for partial in norm_name.split():
- token = self._cache.partials.get(partial)
- if token:
- tokens.append(token)
- else:
- need_lookup.append(partial)
-
- if need_lookup:
- with self.conn.cursor() as cur:
- cur.execute("""SELECT word, getorcreate_partial_word(word)
- FROM unnest(%s) word""",
- (need_lookup, ))
-
- for partial, token in cur:
- assert token is not None
- tokens.append(token)
- self._cache.partials[partial] = token
-
- return tokens
-
-
def _retrieve_full_tokens(self, name: str) -> List[int]:
""" Get the full name token for the given name, if it exists.
The name is only retrieved for the standard analyser.
return full
-
def _compute_name_tokens(self, names: Sequence[PlaceName]) -> Tuple[Set[int], Set[int]]:
""" Computes the full name and partial name tokens for the given
dictionary of names.
return full_tokens, partial_tokens
-
def _add_postcode(self, item: PlaceName) -> Optional[str]:
""" Make sure the normalized postcode is present in the word table.
"""
self.names: Optional[str] = None
self.housenumbers: Set[str] = set()
self.housenumber_tokens: Set[int] = set()
- self.street_tokens: Set[int] = set()
+ self.street_tokens: Optional[Set[int]] = None
self.place_tokens: Set[int] = set()
self.address_tokens: Dict[str, str] = {}
self.postcode: Optional[str] = None
-
def _mk_array(self, tokens: Iterable[Any]) -> str:
return f"{{{','.join((str(s) for s in tokens))}}}"
-
def to_dict(self) -> Dict[str, Any]:
""" Return the token information in database importable format.
"""
out['hnr'] = ';'.join(self.housenumbers)
out['hnr_tokens'] = self._mk_array(self.housenumber_tokens)
- if self.street_tokens:
+ if self.street_tokens is not None:
out['street'] = self._mk_array(self.street_tokens)
if self.place_tokens:
return out
-
def set_names(self, fulls: Iterable[int], partials: Iterable[int]) -> None:
""" Adds token information for the normalised names.
"""
self.names = self._mk_array(itertools.chain(fulls, partials))
-
def add_housenumber(self, token: Optional[int], hnr: Optional[str]) -> None:
""" Extract housenumber information from a list of normalised
housenumbers.
self.housenumbers.add(hnr)
self.housenumber_tokens.add(token)
-
def add_street(self, tokens: Iterable[int]) -> None:
""" Add addr:street match terms.
"""
+ if self.street_tokens is None:
+ self.street_tokens = set()
self.street_tokens.update(tokens)
-
def add_place(self, tokens: Iterable[int]) -> None:
""" Add addr:place search and match terms.
"""
self.place_tokens.update(tokens)
-
def add_address_term(self, key: str, partials: Iterable[int]) -> None:
""" Add additional address terms.
"""
- if partials:
- self.address_tokens[key] = self._mk_array(partials)
+ array = self._mk_array(partials)
+ if len(array) > 2:
+ self.address_tokens[key] = array
def set_postcode(self, postcode: Optional[str]) -> None:
""" Set the postcode to the given one.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Handler for cleaning name and address tags in place information before it
"""
from typing import Optional, List, Mapping, Sequence, Callable, Any, Tuple
-from nominatim.errors import UsageError
-from nominatim.config import Configuration
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
-from nominatim.tokenizer.sanitizers.base import SanitizerHandler, ProcessInfo
-from nominatim.data.place_name import PlaceName
-from nominatim.data.place_info import PlaceInfo
+from ..errors import UsageError
+from ..config import Configuration
+from .sanitizers.config import SanitizerConfig
+from .sanitizers.base import SanitizerHandler, ProcessInfo
+from ..data.place_name import PlaceName
+from ..data.place_info import PlaceInfo
class PlaceSanitizer:
raise UsageError("'step' attribute must be a simple string.")
module: SanitizerHandler = \
- config.load_plugin_module(func['step'], 'nominatim.tokenizer.sanitizers')
+ config.load_plugin_module(func['step'], 'nominatim_db.tokenizer.sanitizers')
self.handlers.append(module.create(SanitizerConfig(func)))
-
def process_names(self, place: PlaceInfo) -> Tuple[List[PlaceName], List[PlaceName]]:
""" Extract a sanitized list of names and address parts from the
given place. The function returns a tuple
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Common data types and protocols for sanitizers.
"""
from typing import Optional, List, Mapping, Callable
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
-from nominatim.data.place_info import PlaceInfo
-from nominatim.data.place_name import PlaceName
-from nominatim.typing import Protocol, Final
+from ...typing import Protocol, Final
+from ...data.place_info import PlaceInfo
+from ...data.place_name import PlaceName
+from .config import SanitizerConfig
class ProcessInfo:
self.names = self._convert_name_dict(place.name)
self.address = self._convert_name_dict(place.address)
-
@staticmethod
def _convert_name_dict(names: Optional[Mapping[str, str]]) -> List[PlaceName]:
""" Convert a dictionary of names into a list of PlaceNames.
Return:
The result must be a callable that takes a place description
- and transforms name and address as reuqired.
+ and transforms name and address as required.
"""
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Sanitizer that preprocesses address tags for house numbers. The sanitizer
expression that must match the full house number value.
"""
from typing import Callable, Iterator, List
-import re
-from nominatim.tokenizer.sanitizers.base import ProcessInfo
-from nominatim.data.place_name import PlaceName
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
+from ...data.place_name import PlaceName
+from .base import ProcessInfo
+from .config import SanitizerConfig
+
class _HousenumberSanitizer:
def __init__(self, config: SanitizerConfig) -> None:
- self.filter_kind = config.get_filter_kind('housenumber')
+ self.filter_kind = config.get_filter('filter-kind', ['housenumber'])
self.split_regexp = config.get_delimiter()
- nameregexps = config.get_string_list('convert-to-name', [])
- self.is_name_regexp = [re.compile(r) for r in nameregexps]
-
-
+ self.filter_name = config.get_filter('convert-to-name', 'FAIL_ALL')
def __call__(self, obj: ProcessInfo) -> None:
if not obj.address:
new_address: List[PlaceName] = []
for item in obj.address:
if self.filter_kind(item.kind):
- if self._treat_as_name(item.name):
+ if self.filter_name(item.name):
obj.names.append(item.clone(kind='housenumber'))
else:
new_address.extend(item.clone(kind='housenumber', name=n)
obj.address = new_address
-
def sanitize(self, value: str) -> Iterator[str]:
""" Extract housenumbers in a regularized format from an OSM value.
if hnr:
yield from self._regularize(hnr)
-
def _regularize(self, hnr: str) -> Iterator[str]:
yield hnr
- def _treat_as_name(self, housenumber: str) -> bool:
- return any(r.fullmatch(housenumber) is not None for r in self.is_name_regexp)
-
-
def create(config: SanitizerConfig) -> Callable[[ProcessInfo], None]:
""" Create a housenumber processing function.
"""
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Sanitizer that filters postcodes by their officially allowed pattern.
"""
from typing import Callable, Optional, Tuple
-from nominatim.data.postcode_format import PostcodeFormatter
-from nominatim.tokenizer.sanitizers.base import ProcessInfo
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
+from ...data.postcode_format import PostcodeFormatter
+from .base import ProcessInfo
+from .config import SanitizerConfig
+
class _PostcodeSanitizer:
if default_pattern is not None and isinstance(default_pattern, str):
self.matcher.set_default_pattern(default_pattern)
-
def __call__(self, obj: ProcessInfo) -> None:
if not obj.address:
return
postcode.name = formatted[0]
postcode.set_attr('variant', formatted[1])
-
def scan(self, postcode: str, country: Optional[str]) -> Optional[Tuple[str, str]]:
""" Check the postcode for correct formatting and return the
normalized version. Returns None if the postcode does not
assert country is not None
- return self.matcher.normalize(country, match),\
- ' '.join(filter(lambda p: p is not None, match.groups()))
-
-
+ return self.matcher.normalize(country, match), \
+ ' '.join(filter(lambda p: p is not None, match.groups()))
def create(config: SanitizerConfig) -> Callable[[ProcessInfo], None]:
- """ Create a housenumber processing function.
+ """ Create a function that filters postcodes by their officially allowed pattern.
"""
return _PostcodeSanitizer(config)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Sanitizer that preprocesses tags from the TIGER import.
+
+It makes the following changes:
+
+* remove state reference from tiger:county
+"""
+from typing import Callable
+import re
+
+from .base import ProcessInfo
+from .config import SanitizerConfig
+
+COUNTY_MATCH = re.compile('(.*), [A-Z][A-Z]')
+
+
+def _clean_tiger_county(obj: ProcessInfo) -> None:
+ """ Remove the state reference from tiger:county tags.
+
+ This transforms a name like 'Hamilton, AL' into 'Hamilton'.
+ If no state reference is detected at the end, the name is left as is.
+ """
+ if not obj.address:
+ return
+
+ for item in obj.address:
+ if item.kind == 'tiger' and item.suffix == 'county':
+ m = COUNTY_MATCH.fullmatch(item.name)
+ if m:
+ item.name = m[1]
+ # Switch kind and suffix, the split left them reversed.
+ item.kind = 'county'
+ item.suffix = 'tiger'
+
+ return
+
+
+def create(_: SanitizerConfig) -> Callable[[ProcessInfo], None]:
+ """ Create a function that preprocesses tags from the TIGER import.
+ """
+ return _clean_tiger_county
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Configuration for Sanitizers.
+"""
+from typing import Sequence, Union, Optional, Pattern, Callable, Any, TYPE_CHECKING
+from collections import UserDict
+import re
+
+from ...errors import UsageError
+
+# working around missing generics in Python < 3.8
+# See https://github.com/python/typing/issues/60#issuecomment-869757075
+if TYPE_CHECKING:
+ _BaseUserDict = UserDict[str, Any]
+else:
+ _BaseUserDict = UserDict
+
+
+class SanitizerConfig(_BaseUserDict):
+ """ The `SanitizerConfig` class is a read-only dictionary
+ with configuration options for the sanitizer.
+ In addition to the usual dictionary functions, the class provides
+ accessors to standard sanitizer options that are used by many of the
+ sanitizers.
+ """
+
+ def get_string_list(self, param: str, default: Sequence[str] = tuple()) -> Sequence[str]:
+ """ Extract a configuration parameter as a string list.
+
+ Arguments:
+ param: Name of the configuration parameter.
+ default: Takes a tuple or list of strings which will
+ be returned if the parameter is missing in the
+ sanitizer configuration.
+ Note that if this default parameter is not
+ provided then an empty list is returned.
+
+ Returns:
+ If the parameter value is a simple string, it is returned as a
+ one-item list. If the parameter value does not exist, the given
+ default is returned. If the parameter value is a list, it is
+ checked to contain only strings before being returned.
+ """
+ values = self.data.get(param, None)
+
+ if values is None:
+ return list(default)
+
+ if isinstance(values, str):
+ return [values] if values else []
+
+ if not isinstance(values, (list, tuple)):
+ raise UsageError(f"Parameter '{param}' must be string or list of strings.")
+
+ if any(not isinstance(value, str) for value in values):
+ raise UsageError(f"Parameter '{param}' must be string or list of strings.")
+
+ return values
+
+ def get_bool(self, param: str, default: Optional[bool] = None) -> bool:
+ """ Extract a configuration parameter as a boolean.
+
+ Arguments:
+ param: Name of the configuration parameter. The parameter must
+ contain one of the yaml boolean values or an
+ UsageError will be raised.
+ default: Value to return, when the parameter is missing.
+ When set to `None`, the parameter must be defined.
+
+ Returns:
+ Boolean value of the given parameter.
+ """
+ value = self.data.get(param, default)
+
+ if not isinstance(value, bool):
+ raise UsageError(f"Parameter '{param}' must be a boolean value ('yes' or 'no').")
+
+ return value
+
+ def get_delimiter(self, default: str = ',;') -> Pattern[str]:
+ """ Return the 'delimiters' parameter in the configuration as a
+ compiled regular expression that can be used to split strings on
+ these delimiters.
+
+ Arguments:
+ default: Delimiters to be used when 'delimiters' parameter
+ is not explicitly configured.
+
+ Returns:
+ A regular expression pattern which can be used to
+ split a string. The regular expression makes sure that the
+ resulting names are stripped and that repeated delimiters
+ are ignored. It may still create empty fields on occasion. The
+ code needs to filter those.
+ """
+ delimiter_set = set(self.data.get('delimiters', default))
+ if not delimiter_set:
+ raise UsageError("Empty 'delimiter' parameter not allowed for sanitizer.")
+
+ return re.compile('\\s*[{}]+\\s*'.format(''.join('\\' + d for d in delimiter_set)))
+
+ def get_filter(self, param: str, default: Union[str, Sequence[str]] = 'PASS_ALL'
+ ) -> Callable[[str], bool]:
+ """ Returns a filter function for the given parameter of the sanitizer
+ configuration.
+
+ The value provided for the parameter in sanitizer configuration
+ should be a string or list of strings, where each string is a regular
+ expression. These regular expressions will later be used by the
+ filter function to filter strings.
+
+ Arguments:
+ param: The parameter for which the filter function
+ will be created.
+ default: Defines the behaviour of filter function if
+ parameter is missing in the sanitizer configuration.
+ Takes a string(PASS_ALL or FAIL_ALL) or a list of strings.
+ Any other value of string or an empty list is not allowed,
+ and will raise a ValueError. If the value is PASS_ALL, the filter
+ function will let all strings to pass, if the value is FAIL_ALL,
+ filter function will let no strings to pass.
+ If value provided is a list of strings each string
+ is treated as a regular expression. In this case these regular
+ expressions will be used by the filter function.
+ By default allow filter function to let all strings pass.
+
+ Returns:
+ A filter function that takes a target string as the argument and
+ returns True if it fully matches any of the regular expressions
+ otherwise returns False.
+ """
+ filters = self.get_string_list(param) or default
+
+ if filters == 'PASS_ALL':
+ return lambda _: True
+ if filters == 'FAIL_ALL':
+ return lambda _: False
+
+ if filters and isinstance(filters, (list, tuple)):
+ regexes = [re.compile(regex) for regex in filters]
+ return lambda target: any(regex.fullmatch(target) for regex in regexes)
+
+ raise ValueError("Default parameter must be a non-empty list or a string value \
+ ('PASS_ALL' or 'FAIL_ALL').")
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later\r
+#\r
+# This file is part of Nominatim. (https://nominatim.org)\r
+#\r
+# Copyright (C) 2024 by the Nominatim developer community.\r
+# For a full list of authors see the git log.\r
+"""\r
+Sanitizer which prevents certain tags from getting into the search index.\r
+It remove tags which matches all properties given below.\r
+\r
+\r
+Arguments:\r
+ type: Define which type of tags should be considered for removal.\r
+ There are two types of tags 'name' and 'address' tags.\r
+ Takes a string 'name' or 'address'. (default: 'name')\r
+\r
+ filter-kind: Define which 'kind' of tags should be removed.\r
+ Takes a string or list of strings where each\r
+ string is a regular expression. A tag is considered\r
+ to be a candidate for removal if its 'kind' property\r
+ fully matches any of the given regular expressions.\r
+ Note that by default all 'kind' of tags are considered.\r
+\r
+ suffix: Define the 'suffix' property of the tags which should be\r
+ removed. Takes a string or list of strings where each\r
+ string is a regular expression. A tag is considered to be a\r
+ candidate for removal if its 'suffix' property fully\r
+ matches any of the given regular expressions. Note that by\r
+ default tags with any suffix value are considered including\r
+ those which don't have a suffix at all.\r
+\r
+ name: Define the 'name' property corresponding to the 'kind' property\r
+ of the tag. Takes a string or list of strings where each string\r
+ is a regular expression. A tag is considered to be a candidate\r
+ for removal if its name fully matches any of the given regular\r
+ expressions. Note that by default tags with any 'name' are\r
+ considered.\r
+\r
+ country_code: Define the country code of places whose tags should be\r
+ considered for removed. Takes a string or list of strings\r
+ where each string is a two-letter lower-case country code.\r
+ Note that by default tags of places with any country code\r
+ are considered including those which don't have a country\r
+ code at all.\r
+\r
+ rank_address: Define the address rank of places whose tags should be\r
+ considered for removal. Takes a string or list of strings\r
+ where each string is a number or range of number or the\r
+ form <from>-<to>.\r
+ Note that default is '0-30', which means that tags of all\r
+ places are considered.\r
+ See https://nominatim.org/release-docs/latest/customize/Ranking/#address-rank\r
+ to learn more about address rank.\r
+\r
+\r
+"""\r
+from typing import Callable, List, Tuple, Sequence\r
+\r
+from ...data.place_name import PlaceName\r
+from .base import ProcessInfo\r
+from .config import SanitizerConfig\r
+\r
+\r
+class _TagSanitizer:\r
+\r
+ def __init__(self, config: SanitizerConfig) -> None:\r
+ self.type = config.get('type', 'name')\r
+ self.filter_kind = config.get_filter('filter-kind')\r
+ self.country_codes = config.get_string_list('country_code', [])\r
+ self.filter_suffix = config.get_filter('suffix')\r
+ self.filter_name = config.get_filter('name')\r
+ self.allowed_ranks = self._set_allowed_ranks(\r
+ config.get_string_list("rank_address", ["0-30"])\r
+ )\r
+\r
+ self.has_country_code = config.get('country_code', None) is not None\r
+\r
+ def __call__(self, obj: ProcessInfo) -> None:\r
+ tags = obj.names if self.type == 'name' else obj.address\r
+\r
+ if not tags \\r
+ or not self.allowed_ranks[obj.place.rank_address] \\r
+ or self.has_country_code \\r
+ and obj.place.country_code not in self.country_codes:\r
+ return\r
+\r
+ filtered_tags: List[PlaceName] = []\r
+\r
+ for tag in tags:\r
+\r
+ if not self.filter_kind(tag.kind) \\r
+ or not self.filter_suffix(tag.suffix or '') \\r
+ or not self.filter_name(tag.name):\r
+ filtered_tags.append(tag)\r
+\r
+ if self.type == 'name':\r
+ obj.names = filtered_tags\r
+ else:\r
+ obj.address = filtered_tags\r
+\r
+ def _set_allowed_ranks(self, ranks: Sequence[str]) -> Tuple[bool, ...]:\r
+ """ Returns a tuple of 31 boolean values corresponding to the\r
+ address ranks 0-30. Value at index 'i' is True if rank 'i'\r
+ is present in the ranks or lies in the range of any of the\r
+ ranks provided in the sanitizer configuration, otherwise\r
+ the value is False.\r
+ """\r
+ allowed_ranks = [False] * 31\r
+\r
+ for rank in ranks:\r
+ intvl = [int(x) for x in rank.split('-')]\r
+\r
+ start, end = intvl[0], intvl[0] if len(intvl) == 1 else intvl[1]\r
+\r
+ for i in range(start, end + 1):\r
+ allowed_ranks[i] = True\r
+\r
+ return tuple(allowed_ranks)\r
+\r
+\r
+def create(config: SanitizerConfig) -> Callable[[ProcessInfo], None]:\r
+ """ Create a function to process removal of certain tags.\r
+ """\r
+\r
+ return _TagSanitizer(config)\r
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Sanitizer that splits lists of names into their components.
"""
from typing import Callable
-from nominatim.tokenizer.sanitizers.base import ProcessInfo
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
+from .base import ProcessInfo
+from .config import SanitizerConfig
+
def create(config: SanitizerConfig) -> Callable[[ProcessInfo], None]:
""" Create a name processing function that splits name values with
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
This sanitizer creates additional name variants for names that have
"""
from typing import Callable
-from nominatim.tokenizer.sanitizers.base import ProcessInfo
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
+from .base import ProcessInfo
+from .config import SanitizerConfig
def create(_: SanitizerConfig) -> Callable[[ProcessInfo], None]:
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
This sanitizer sets the `analyzer` property depending on the
Arguments:
filter-kind: Restrict the names the sanitizer should be applied to
- to the given tags. The parameter expects a list of
+ the given tags. The parameter expects a list of
regular expressions which are matched against 'kind'.
Note that a match against the full string is expected.
whitelist: Restrict the set of languages that should be tagged.
"""
from typing import Callable, Dict, Optional, List
-from nominatim.data import country_info
-from nominatim.tokenizer.sanitizers.base import ProcessInfo
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
+from ...data import country_info
+from .base import ProcessInfo
+from .config import SanitizerConfig
+
class _AnalyzerByLanguage:
""" Processor for tagging the language of names in a place.
"""
def __init__(self, config: SanitizerConfig) -> None:
- self.filter_kind = config.get_filter_kind()
+ self.filter_kind = config.get_filter('filter-kind')
self.replace = config.get('mode', 'replace') != 'append'
self.whitelist = config.get('whitelist')
self._compute_default_languages(config.get('use-defaults', 'no'))
-
def _compute_default_languages(self, use_defaults: str) -> None:
self.deflangs: Dict[Optional[str], List[str]] = {}
for ccode, clangs in country_info.iterate('languages'):
if len(clangs) == 1 or use_defaults == 'all':
if self.whitelist:
- self.deflangs[ccode] = [l for l in clangs if l in self.whitelist]
+ self.deflangs[ccode] = [cl for cl in clangs if cl in self.whitelist]
else:
self.deflangs[ccode] = clangs
-
def _suffix_matches(self, suffix: str) -> bool:
if self.whitelist is None:
return len(suffix) in (2, 3) and suffix.islower()
return suffix in self.whitelist
-
def __call__(self, obj: ProcessInfo) -> None:
if not obj.names:
return
else:
langs = self.deflangs.get(obj.place.country_code)
-
if langs:
if self.replace:
name.set_attr('analyzer', langs[0])
else:
more_names.append(name.clone(attr={'analyzer': langs[0]}))
- more_names.extend(name.clone(attr={'analyzer': l}) for l in langs[1:])
+ more_names.extend(name.clone(attr={'analyzer': lg}) for lg in langs[1:])
obj.names.extend(more_names)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+This sanitizer maps OSM data to Japanese block addresses.
+It replaces blocknumber and housenumber with housenumber,
+and quarter and neighbourhood with place.
+"""
+
+
+from typing import Callable
+from typing import List, Optional
+
+from .base import ProcessInfo
+from .config import SanitizerConfig
+from ...data.place_name import PlaceName
+
+
+def create(_: SanitizerConfig) -> Callable[[ProcessInfo], None]:
+ """Set up the sanitizer
+ """
+ return tag_japanese
+
+
+def reconbine_housenumber(
+ new_address: List[PlaceName],
+ tmp_housenumber: Optional[str],
+ tmp_blocknumber: Optional[str]
+) -> List[PlaceName]:
+ """ Recombine the tag of housenumber by using housenumber and blocknumber
+ """
+ if tmp_blocknumber and tmp_housenumber:
+ new_address.append(
+ PlaceName(
+ kind='housenumber',
+ name=f'{tmp_blocknumber}-{tmp_housenumber}',
+ suffix=''
+ )
+ )
+ elif tmp_blocknumber:
+ new_address.append(
+ PlaceName(
+ kind='housenumber',
+ name=tmp_blocknumber,
+ suffix=''
+ )
+ )
+ elif tmp_housenumber:
+ new_address.append(
+ PlaceName(
+ kind='housenumber',
+ name=tmp_housenumber,
+ suffix=''
+ )
+ )
+ return new_address
+
+
+def reconbine_place(
+ new_address: List[PlaceName],
+ tmp_neighbourhood: Optional[str],
+ tmp_quarter: Optional[str]
+) -> List[PlaceName]:
+ """ Recombine the tag of place by using neighbourhood and quarter
+ """
+ if tmp_neighbourhood and tmp_quarter:
+ new_address.append(
+ PlaceName(
+ kind='place',
+ name=f'{tmp_quarter}{tmp_neighbourhood}',
+ suffix=''
+ )
+ )
+ elif tmp_neighbourhood:
+ new_address.append(
+ PlaceName(
+ kind='place',
+ name=tmp_neighbourhood,
+ suffix=''
+ )
+ )
+ elif tmp_quarter:
+ new_address.append(
+ PlaceName(
+ kind='place',
+ name=tmp_quarter,
+ suffix=''
+ )
+ )
+ return new_address
+
+
+def tag_japanese(obj: ProcessInfo) -> None:
+ """Recombine kind of address
+ """
+ if obj.place.country_code != 'jp':
+ return
+ tmp_housenumber = None
+ tmp_blocknumber = None
+ tmp_neighbourhood = None
+ tmp_quarter = None
+
+ new_address = []
+ for item in obj.address:
+ if item.kind == 'housenumber':
+ tmp_housenumber = item.name
+ elif item.kind == 'block_number':
+ tmp_blocknumber = item.name
+ elif item.kind == 'neighbourhood':
+ tmp_neighbourhood = item.name
+ elif item.kind == 'quarter':
+ tmp_quarter = item.name
+ else:
+ new_address.append(item)
+
+ new_address = reconbine_housenumber(new_address, tmp_housenumber, tmp_blocknumber)
+ new_address = reconbine_place(new_address, tmp_neighbourhood, tmp_quarter)
+
+ obj.address = [item for item in new_address if item.name is not None]
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Common data types and protocols for analysers.
"""
from typing import Mapping, List, Any
-from nominatim.typing import Protocol
-from nominatim.data.place_name import PlaceName
+from ...typing import Protocol
+from ...data.place_name import PlaceName
+
class Analyzer(Protocol):
""" The `create()` function of an analysis module needs to return an
Returns:
ID string with a canonical form of the name. The string may
- be empty, when the analyzer cannot analyze the name at all,
- for example because the character set in use does not match.
+ be empty, when the analyzer cannot analyze the name at all,
+ for example because the character set in use does not match.
"""
def compute_variants(self, canonical_id: str) -> List[str]:
Returns:
A list of possible spelling variants. All strings must have
- been transformed with the global normalizer and
- transliterator ICU rules. Otherwise they cannot be matched
- against the input by the query frontend.
- The list may be empty, when there are no useful
- spelling variants. This may happen when an analyzer only
- usually outputs additional variants to the canonical spelling
- and there are no such variants.
+ been transformed with the global normalizer and
+ transliterator ICU rules. Otherwise they cannot be matched
+ against the input by the query frontend.
+ The list may be empty, when there are no useful
+ spelling variants. This may happen when an analyzer only
+ usually outputs additional variants to the canonical spelling
+ and there are no such variants.
"""
Returns:
A data object with configuration data. This will be handed
- as is into the `create()` function and may be
- used freely by the analysis module as needed.
+ as is into the `create()` function and may be
+ used freely by the analysis module as needed.
"""
def create(self, normalizer: Any, transliterator: Any, config: Any) -> Analyzer:
Returns:
A new analyzer instance. This must be an object that implements
- the Analyzer protocol.
+ the Analyzer protocol.
"""
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Parser for configuration for variants.
import itertools
import re
-from nominatim.config import flatten_config_list
-from nominatim.errors import UsageError
+from ...config import flatten_config_list
+from ...errors import UsageError
+
class ICUVariant(NamedTuple):
""" A single replacement rule for variant creation.
def __init__(self, normalizer: Any) -> None:
self.norm = normalizer
-
def compute(self, rule: Any) -> Iterator[ICUVariant]:
""" Generator for all ICUVariant tuples from a single variant rule.
"""
for froms, tos in _create_variants(*src, repl, decompose):
yield ICUVariant(froms, tos)
-
def _parse_variant_word(self, name: str) -> Optional[Tuple[str, str, str]]:
name = name.strip()
match = re.fullmatch(r'([~^]?)([^~$^]*)([~$]?)', name)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Generic processor for names that creates abbreviation variants.
import datrie
-from nominatim.errors import UsageError
-from nominatim.data.place_name import PlaceName
-from nominatim.tokenizer.token_analysis.config_variants import get_variant_config
-from nominatim.tokenizer.token_analysis.generic_mutation import MutationVariantGenerator
+from ...errors import UsageError
+from ...data.place_name import PlaceName
+from .config_variants import get_variant_config
+from .generic_mutation import MutationVariantGenerator
+
+# Configuration section
-### Configuration section
def configure(rules: Mapping[str, Any], normalizer: Any, _: Any) -> Dict[str, Any]:
""" Extract and preprocess the configuration for this module.
return config
-### Analysis section
+# Analysis section
def create(normalizer: Any, transliterator: Any,
config: Mapping[str, Any]) -> 'GenericTokenAnalysis':
# set up mutation rules
self.mutations = [MutationVariantGenerator(*cfg) for cfg in config['mutations']]
-
def get_canonical_id(self, name: PlaceName) -> str:
""" Return the normalized form of the name. This is the standard form
from which possible variants for the name can be derived.
"""
return cast(str, self.norm.transliterate(name.name)).strip()
-
def compute_variants(self, norm_name: str) -> List[str]:
""" Compute the spelling variants for the given normalized name
and transliterate the result.
return [name for name in self._transliterate_unique_list(norm_name, variants) if name]
-
def _transliterate_unique_list(self, norm_name: str,
iterable: Iterable[str]) -> Iterator[Optional[str]]:
seen = set()
seen.add(variant)
yield self.to_ascii.transliterate(variant).strip()
-
def _generate_word_variants(self, norm_name: str) -> Iterable[str]:
baseform = '^ ' + norm_name + ' ^'
baselen = len(baseform)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Creator for mutation variants for the generic token analysis.
import logging
import re
-from nominatim.errors import UsageError
+from ...errors import UsageError
LOG = logging.getLogger()
+
def _zigzag(outer: Iterable[str], inner: Iterable[str]) -> Iterator[str]:
return itertools.chain.from_iterable(itertools.zip_longest(outer, inner, fillvalue=''))
"This is not allowed.", pattern)
raise UsageError("Bad mutation pattern in configuration.")
-
def generate(self, names: Iterable[str]) -> Iterator[str]:
""" Generator function for the name variants. 'names' is an iterable
over a set of names for which the variants are to be generated.
for seps in self._fillers(len(parts)):
yield ''.join(_zigzag(parts, seps))
-
def _fillers(self, num_parts: int) -> Iterator[Tuple[str, ...]]:
""" Returns a generator for strings to join the given number of string
parts in all possible combinations.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Specialized processor for housenumbers. Analyses common housenumber patterns
from typing import Any, List, cast
import re
-from nominatim.data.place_name import PlaceName
-from nominatim.tokenizer.token_analysis.generic_mutation import MutationVariantGenerator
+from ...data.place_name import PlaceName
+from .generic_mutation import MutationVariantGenerator
RE_NON_DIGIT = re.compile('[^0-9]')
RE_DIGIT_ALPHA = re.compile(r'(\d)\s*([^\d\s␣])')
RE_ALPHA_DIGIT = re.compile(r'([^\s\d␣])\s*(\d)')
RE_NAMED_PART = re.compile(r'[a-z]{4}')
-### Configuration section
+# Configuration section
+
def configure(*_: Any) -> None:
""" All behaviour is currently hard-coded.
"""
return None
-### Analysis section
+# Analysis section
+
-def create(normalizer: Any, transliterator: Any, config: None) -> 'HousenumberTokenAnalysis': # pylint: disable=W0613
+def create(normalizer: Any, transliterator: Any, config: None) -> 'HousenumberTokenAnalysis':
""" Create a new token analysis instance for this module.
"""
return HousenumberTokenAnalysis(normalizer, transliterator)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Specialized processor for postcodes. Supports a 'lookup' variant of the
"""
from typing import Any, List
-from nominatim.tokenizer.token_analysis.generic_mutation import MutationVariantGenerator
-from nominatim.data.place_name import PlaceName
+from ...data.place_name import PlaceName
+from .generic_mutation import MutationVariantGenerator
+
+# Configuration section
-### Configuration section
def configure(*_: Any) -> None:
""" All behaviour is currently hard-coded.
"""
return None
-### Analysis section
+# Analysis section
+
-def create(normalizer: Any, transliterator: Any, config: None) -> 'PostcodeTokenAnalysis': # pylint: disable=W0613
+def create(normalizer: Any, transliterator: Any, config: None) -> 'PostcodeTokenAnalysis':
""" Create a new token analysis instance for this module.
"""
return PostcodeTokenAnalysis(normalizer, transliterator)
self.mutator = MutationVariantGenerator(' ', (' ', ''))
-
def get_canonical_id(self, name: PlaceName) -> str:
""" Return the standard form of the postcode.
"""
return name.name.strip().upper()
-
def compute_variants(self, norm_name: str) -> List[str]:
""" Compute the spelling variants for the given normalized postcode.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Module with functions for importing, updating Nominatim databases
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Function to add additional OSM data from a file or the API into the database.
import logging
import urllib
-from nominatim.tools.exec_utils import run_osm2pgsql, get_url
+from ..db.connection import connect
+from ..utils.url_utils import get_url
+from .exec_utils import run_osm2pgsql
LOG = logging.getLogger()
-def add_data_from_file(fname: str, options: MutableMapping[str, Any]) -> int:
+
+def _run_osm2pgsql(dsn: str, options: MutableMapping[str, Any]) -> None:
+ run_osm2pgsql(options)
+
+ # Handle deletions
+ with connect(dsn) as conn:
+ with conn.cursor() as cur:
+ cur.execute('SELECT flush_deleted_places()')
+ conn.commit()
+
+
+def add_data_from_file(dsn: str, fname: str, options: MutableMapping[str, Any]) -> int:
""" Adds data from a OSM file to the database. The file may be a normal
OSM file or a diff file in all formats supported by libosmium.
"""
options['import_file'] = Path(fname)
options['append'] = True
- run_osm2pgsql(options)
+ _run_osm2pgsql(dsn, options)
# No status update. We don't know where the file came from.
return 0
-def add_osm_object(osm_type: str, osm_id: int, use_main_api: bool,
+def add_osm_object(dsn: str, osm_type: str, osm_id: int, use_main_api: bool,
options: MutableMapping[str, Any]) -> int:
""" Add or update a single OSM object from the latest version of the
API.
options['append'] = True
options['import_data'] = get_url(base_url).encode('utf-8')
- run_osm2pgsql(options)
+ _run_osm2pgsql(dsn, options)
return 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for database analysis and maintenance.
from typing import Optional, Tuple, Any, cast
import logging
-from psycopg2.extras import Json, register_hstore
+import psycopg
+from psycopg.types.json import Json
-from nominatim.config import Configuration
-from nominatim.db.connection import connect, Cursor
-from nominatim.tokenizer import factory as tokenizer_factory
-from nominatim.errors import UsageError
-from nominatim.data.place_info import PlaceInfo
-from nominatim.typing import DictCursorResult
+from ..typing import DictCursorResult
+from ..config import Configuration
+from ..db.connection import connect, Cursor, register_hstore
+from ..errors import UsageError
+from ..tokenizer import factory as tokenizer_factory
+from ..data.place_info import PlaceInfo
LOG = logging.getLogger()
+
def _get_place_info(cursor: Cursor, osm_id: Optional[str],
place_id: Optional[int]) -> DictCursorResult:
sql = """SELECT place_id, extra.*
"""
with connect(config.get_libpq_dsn()) as conn:
register_hstore(conn)
- with conn.cursor() as cur:
+ with conn.cursor(row_factory=psycopg.rows.dict_row) as cur:
place = _get_place_info(cur, osm_id, place_id)
cur.execute("update placex set indexed_status = 2 where place_id = %s",
tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
+ # Enable printing of messages.
+ conn.add_notice_handler(lambda diag: print(diag.message_primary))
+
with tokenizer.name_analyzer() as analyzer:
cur.execute("""UPDATE placex
SET indexed_status = 0, address = %s, token_info = %s,
# we do not want to keep the results
conn.rollback()
- for msg in conn.notices:
- print(msg)
+
+def clean_deleted_relations(config: Configuration, age: str) -> None:
+ """ Clean deleted relations older than a given age
+ """
+ with connect(config.get_libpq_dsn()) as conn:
+ with conn.cursor() as cur:
+ try:
+ cur.execute("""SELECT place_force_delete(p.place_id)
+ FROM import_polygon_delete d, placex p
+ WHERE p.osm_type = d.osm_type AND p.osm_id = d.osm_id
+ AND age(p.indexed_date) > %s::interval""",
+ (age, ))
+ except psycopg.DataError as exc:
+ raise UsageError('Invalid PostgreSQL time interval format') from exc
+ conn.commit()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Collection of functions that check if the database is complete and functional.
from enum import Enum
from textwrap import dedent
-from nominatim.config import Configuration
-from nominatim.db.connection import connect, Connection
-from nominatim.errors import UsageError
-from nominatim.tokenizer import factory as tokenizer_factory
+from ..config import Configuration
+from ..db.connection import connect, Connection, \
+ index_exists, table_exists, execute_scalar
+from ..db import properties
+from ..errors import UsageError
+from ..tokenizer import factory as tokenizer_factory
+from . import freeze
+from ..version import NOMINATIM_VERSION, parse_version
CHECKLIST = []
+
class CheckState(Enum):
""" Possible states of a check. FATAL stops check execution entirely.
"""
NOT_APPLICABLE = 3
WARN = 4
+
CheckResult = Union[CheckState, Tuple[CheckState, Mapping[str, Any]]]
CheckFunc = Callable[[Connection, Configuration], CheckResult]
+
def _check(hint: Optional[str] = None) -> Callable[[CheckFunc], CheckFunc]:
""" Decorator for checks. It adds the function to the list of
checks to execute and adds the code for printing progress messages.
return decorator
+
class _BadConnection:
def __init__(self, msg: str) -> None:
""" Dummy function to provide the implementation.
"""
+
def check_database(config: Configuration) -> int:
""" Run a number of checks on the database and return the status.
"""
try:
- conn = connect(config.get_libpq_dsn()).connection
+ conn = connect(config.get_libpq_dsn())
except UsageError as err:
- conn = _BadConnection(str(err)) # type: ignore[assignment]
+ conn = _BadConnection(str(err)) # type: ignore[assignment]
overall_result = 0
for check in CHECKLIST:
'idx_placex_rank_search',
'idx_placex_rank_address',
'idx_placex_parent_place_id',
+ 'idx_placex_geometry_reverse_lookupplacenode',
'idx_placex_geometry_reverse_lookuppolygon',
'idx_placex_geometry_placenode',
'idx_osmline_parent_place_id',
'idx_osmline_parent_osm_id',
'idx_postcode_id',
'idx_postcode_postcode'
- ]
- if conn.table_exists('search_name'):
+ ]
+
+ # These won't exist if --reverse-only import was used
+ if table_exists(conn, 'search_name'):
indexes.extend(('idx_search_name_nameaddress_vector',
'idx_search_name_name_vector',
- 'idx_search_name_centroid'))
- if conn.server_version_tuple() >= (11, 0, 0):
- indexes.extend(('idx_placex_housenumber',
- 'idx_osmline_parent_osm_id_with_hnr'))
- if conn.table_exists('place'):
+ 'idx_search_name_centroid',
+ 'idx_placex_housenumber',
+ 'idx_osmline_parent_osm_id_with_hnr'))
+
+ # These won't exist if --no-updates import was used
+ if table_exists(conn, 'place'):
indexes.extend(('idx_location_area_country_place_id',
'idx_place_osm_unique',
'idx_placex_rank_address_sector',
# CHECK FUNCTIONS
#
-# Functions are exectured in the order they appear here.
+# Functions are executed in the order they appear here.
@_check(hint="""\
{error}
return CheckState.OK
+
+@_check(hint="""\
+ Database version ({db_version}) doesn't match Nominatim version ({nom_version})
+
+ Hints:
+ * Are you connecting to the correct database?
+
+ {instruction}
+
+ Check the Migration chapter of the Administration Guide.
+
+ Project directory: {config.project_dir}
+ Current setting of NOMINATIM_DATABASE_DSN: {config.DATABASE_DSN}
+ """)
+def check_database_version(conn: Connection, config: Configuration) -> CheckResult:
+ """ Checking database_version matches Nominatim software version
+ """
+
+ if table_exists(conn, 'nominatim_properties'):
+ db_version_str = properties.get_property(conn, 'database_version')
+ else:
+ db_version_str = None
+
+ if db_version_str is not None:
+ db_version = parse_version(db_version_str)
+
+ if db_version == NOMINATIM_VERSION:
+ return CheckState.OK
+
+ instruction = (
+ 'Run migrations: nominatim admin --migrate'
+ if db_version < NOMINATIM_VERSION
+ else 'You need to upgrade the Nominatim software.'
+ )
+ else:
+ instruction = ''
+
+ return CheckState.FATAL, dict(db_version=db_version_str,
+ nom_version=NOMINATIM_VERSION,
+ instruction=instruction,
+ config=config)
+
+
@_check(hint="""\
placex table not found
Hints:
- * Are you connecting to the right database?
+ * Are you connecting to the correct database?
* Did the import process finish without errors?
Project directory: {config.project_dir}
def check_placex_table(conn: Connection, config: Configuration) -> CheckResult:
""" Checking for placex table
"""
- if conn.table_exists('placex'):
+ if table_exists(conn, 'placex'):
return CheckState.OK
return CheckState.FATAL, dict(config=config)
def check_placex_size(conn: Connection, _: Configuration) -> CheckResult:
""" Checking for placex content
"""
- with conn.cursor() as cur:
- cnt = cur.scalar('SELECT count(*) FROM (SELECT * FROM placex LIMIT 100) x')
+ cnt = execute_scalar(conn, 'SELECT count(*) FROM (SELECT * FROM placex LIMIT 100) x')
return CheckState.OK if cnt > 0 else CheckState.FATAL
def check_existance_wikipedia(conn: Connection, _: Configuration) -> CheckResult:
""" Checking for wikipedia/wikidata data
"""
- if not conn.table_exists('search_name') or not conn.table_exists('place'):
+ if not table_exists(conn, 'search_name') or not table_exists(conn, 'place'):
return CheckState.NOT_APPLICABLE
- with conn.cursor() as cur:
- cnt = cur.scalar('SELECT count(*) FROM wikipedia_article')
+ if table_exists(conn, 'wikimedia_importance'):
+ cnt = execute_scalar(conn, 'SELECT count(*) FROM wikimedia_importance')
+ else:
+ cnt = execute_scalar(conn, 'SELECT count(*) FROM wikipedia_article')
- return CheckState.WARN if cnt == 0 else CheckState.OK
+ return CheckState.WARN if cnt == 0 else CheckState.OK
@_check(hint="""\
def check_indexing(conn: Connection, _: Configuration) -> CheckResult:
""" Checking indexing status
"""
- with conn.cursor() as cur:
- cnt = cur.scalar('SELECT count(*) FROM placex WHERE indexed_status > 0')
+ cnt = execute_scalar(conn, 'SELECT count(*) FROM placex WHERE indexed_status > 0')
if cnt == 0:
return CheckState.OK
- if conn.index_exists('idx_placex_rank_search'):
+ if freeze.is_frozen(conn):
+ index_cmd = """\
+ Database is marked frozen, it cannot be updated.
+ Low counts of unindexed places are fine."""
+ return CheckState.WARN, dict(count=cnt, index_cmd=index_cmd)
+
+ if index_exists(conn, 'idx_placex_rank_search'):
# Likely just an interrupted update.
index_cmd = 'nominatim index'
else:
"""
missing = []
for index in _get_indexes(conn):
- if not conn.index_exists(index):
+ if not index_exists(conn, index):
missing.append(index)
if missing:
if not config.get_bool('USE_US_TIGER_DATA'):
return CheckState.NOT_APPLICABLE
- if not conn.table_exists('location_property_tiger'):
+ if not table_exists(conn, 'location_property_tiger'):
return CheckState.FAIL, dict(error='TIGER data table not found.')
- with conn.cursor() as cur:
- if cur.scalar('SELECT count(*) FROM location_property_tiger') == 0:
- return CheckState.FAIL, dict(error='TIGER data table is empty.')
+ if execute_scalar(conn, 'SELECT count(*) FROM location_property_tiger') == 0:
+ return CheckState.FAIL, dict(error='TIGER data table is empty.')
return CheckState.OK
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Collection of host system information including software versions, memory,
import subprocess
import sys
from pathlib import Path
-from typing import List, Optional, Tuple, Union, cast
+from typing import List, Optional, Union
import psutil
-from psycopg2.extensions import make_dsn, parse_dsn
-from nominatim.config import Configuration
-from nominatim.db.connection import connect
-from nominatim.typing import DictCursorResults
-from nominatim.version import version_str
-
-
-def convert_version(ver_tup: Tuple[int, int]) -> str:
- """converts tuple version (ver_tup) to a string representation"""
- return ".".join(map(str, ver_tup))
+from ..config import Configuration
+from ..db.connection import connect, server_version_tuple, execute_scalar
+from ..version import NOMINATIM_VERSION
def friendly_memory_string(mem: float) -> str:
filename: str, start: str, sep: str, fieldnum: int = 1
) -> Optional[str]:
"""open filename, finds the line starting with the 'start' string.
- Splits the line using seperator and returns a "fieldnum" from the split."""
+ Splits the line using separator and returns a "fieldnum" from the split."""
with open(filename, encoding='utf8') as file:
result = ""
for line in file:
"""Generate a report about the host system including software versions, memory,
storage, and database configuration."""
- with connect(make_dsn(config.get_libpq_dsn(), dbname='postgres')) as conn:
- postgresql_ver: str = convert_version(conn.server_version_tuple())
+ with connect(config.get_libpq_dsn(), dbname='postgres') as conn:
+ postgresql_ver: str = '.'.join(map(str, server_version_tuple(conn)))
with conn.cursor() as cur:
- cur.execute(f"""
- SELECT datname FROM pg_catalog.pg_database
- WHERE datname='{parse_dsn(config.get_libpq_dsn())['dbname']}'""")
- nominatim_db_exists = cast(Optional[DictCursorResults], cur.fetchall())
- if nominatim_db_exists:
- with connect(config.get_libpq_dsn()) as conn:
- postgis_ver: str = convert_version(conn.postgis_version_tuple())
- else:
- postgis_ver = "Unable to connect to database"
+ cur.execute("SELECT datname FROM pg_catalog.pg_database WHERE datname=%s",
+ (config.get_database_params()['dbname'], ))
+ nominatim_db_exists = cur.rowcount > 0
+
+ if nominatim_db_exists:
+ with connect(config.get_libpq_dsn()) as conn:
+ postgis_ver: str = execute_scalar(conn, 'SELECT postgis_lib_version()')
+ else:
+ postgis_ver = "Unable to connect to database"
postgresql_config: str = get_postgresql_config(int(float(postgresql_ver)))
**Software Environment:**
- Python version: {sys.version}
- - Nominatim version: {version_str()}
- - PostgreSQL version: {postgresql_ver}
+ - Nominatim version: {NOMINATIM_VERSION!s}
+ - PostgreSQL version: {postgresql_ver}
- PostGIS version: {postgis_ver}
- OS: {os_name_info()}
-
-
+
+
**Hardware Configuration:**
- RAM: {friendly_memory_string(psutil.virtual_memory().total)}
- number of CPUs: {psutil.cpu_count(logical=False)}
- - bare metal/AWS/other cloud service (per systemd-detect-virt(1)): {run_command("systemd-detect-virt")}
+ - bare metal/AWS/other cloud service (per systemd-detect-virt(1)):
+ {run_command("systemd-detect-virt")}
- type and size of disks:
**`df -h` - df - report file system disk space usage: **
```
{run_command(["df", "-h"])}
```
-
+
**lsblk - list block devices: **
```
{run_command("lsblk")}
```
-
-
+
+
**Postgresql Configuration:**
```
{postgresql_config}
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Exporting a Nominatim database to SQlite.
+"""
+from typing import Set, Any, Optional, Union
+import datetime as dt
+import logging
+from pathlib import Path
+
+import sqlalchemy as sa
+
+import nominatim_api as napi
+from nominatim_api.search.query_analyzer_factory import make_query_analyzer
+from nominatim_api.typing import SaSelect, SaRow
+from nominatim_api.sql.sqlalchemy_types import Geometry, IntArray
+
+LOG = logging.getLogger()
+
+
+async def convert(project_dir: Optional[Union[str, Path]],
+ outfile: Path, options: Set[str]) -> None:
+ """ Export an existing database to sqlite. The resulting database
+ will be usable against the Python frontend of Nominatim.
+ """
+ api = napi.NominatimAPIAsync(project_dir)
+
+ try:
+ outapi = napi.NominatimAPIAsync(project_dir,
+ {'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={outfile}",
+ 'NOMINATIM_DATABASE_RW': '1'})
+
+ try:
+ async with api.begin() as src, outapi.begin() as dest:
+ writer = SqliteWriter(src, dest, options)
+ await writer.write()
+ finally:
+ await outapi.close()
+ finally:
+ await api.close()
+
+
+class SqliteWriter:
+ """ Worker class which creates a new SQLite database.
+ """
+
+ def __init__(self, src: napi.SearchConnection,
+ dest: napi.SearchConnection, options: Set[str]) -> None:
+ self.src = src
+ self.dest = dest
+ self.options = options
+
+ async def write(self) -> None:
+ """ Create the database structure and copy the data from
+ the source database to the destination.
+ """
+ LOG.warning('Setting up spatialite')
+ await self.dest.execute(sa.select(sa.func.InitSpatialMetaData(True, 'WGS84')))
+
+ await self.create_tables()
+ await self.copy_data()
+ if 'search' in self.options:
+ await self.create_word_table()
+ await self.create_indexes()
+
+ async def create_tables(self) -> None:
+ """ Set up the database tables.
+ """
+ LOG.warning('Setting up tables')
+ if 'search' not in self.options:
+ self.dest.t.meta.remove(self.dest.t.search_name)
+ else:
+ await self.create_class_tables()
+
+ await self.dest.connection.run_sync(self.dest.t.meta.create_all)
+
+ # Convert all Geometry columns to Spatialite geometries
+ for table in self.dest.t.meta.sorted_tables:
+ for col in table.c:
+ if isinstance(col.type, Geometry):
+ await self.dest.execute(sa.select(
+ sa.func.RecoverGeometryColumn(table.name, col.name, 4326,
+ col.type.subtype.upper(), 'XY')))
+
+ async def create_class_tables(self) -> None:
+ """ Set up the table that serve class/type-specific geometries.
+ """
+ sql = sa.text("""SELECT tablename FROM pg_tables
+ WHERE tablename LIKE 'place_classtype_%'""")
+ for res in await self.src.execute(sql):
+ for db in (self.src, self.dest):
+ sa.Table(res[0], db.t.meta,
+ sa.Column('place_id', sa.BigInteger),
+ sa.Column('centroid', Geometry))
+
+ async def create_word_table(self) -> None:
+ """ Create the word table.
+ This table needs the property information to determine the
+ correct format. Therefore needs to be done after all other
+ data has been copied.
+ """
+ await make_query_analyzer(self.src)
+ await make_query_analyzer(self.dest)
+ src = self.src.t.meta.tables['word']
+ dest = self.dest.t.meta.tables['word']
+
+ await self.dest.connection.run_sync(dest.create)
+
+ LOG.warning("Copying word table")
+ async_result = await self.src.connection.stream(sa.select(src))
+
+ async for partition in async_result.partitions(10000):
+ data = [{k: getattr(r, k) for k in r._fields} for r in partition]
+ await self.dest.execute(dest.insert(), data)
+
+ await self.dest.connection.run_sync(sa.Index('idx_word_woken', dest.c.word_token).create)
+
+ async def copy_data(self) -> None:
+ """ Copy data for all registered tables.
+ """
+ def _getfield(row: SaRow, key: str) -> Any:
+ value = getattr(row, key)
+ if isinstance(value, dt.datetime):
+ if value.tzinfo is not None:
+ value = value.astimezone(dt.timezone.utc)
+ return value
+
+ for table in self.dest.t.meta.sorted_tables:
+ LOG.warning("Copying '%s'", table.name)
+ async_result = await self.src.connection.stream(self.select_from(table.name))
+
+ async for partition in async_result.partitions(10000):
+ data = [{('class_' if k == 'class' else k): _getfield(r, k)
+ for k in r._fields}
+ for r in partition]
+ await self.dest.execute(table.insert(), data)
+
+ # Set up a minimal copy of pg_tables used to look up the class tables later.
+ pg_tables = sa.Table('pg_tables', self.dest.t.meta,
+ sa.Column('schemaname', sa.Text, default='public'),
+ sa.Column('tablename', sa.Text))
+ await self.dest.connection.run_sync(pg_tables.create)
+ data = [{'tablename': t} for t in self.dest.t.meta.tables]
+ await self.dest.execute(pg_tables.insert().values(data))
+
+ async def create_indexes(self) -> None:
+ """ Add indexes necessary for the frontend.
+ """
+ # reverse place node lookup needs an extra table to simulate a
+ # partial index with adaptive buffering.
+ await self.dest.execute(sa.text(
+ """ CREATE TABLE placex_place_node_areas AS
+ SELECT place_id, ST_Expand(geometry,
+ 14.0 * exp(-0.2 * rank_search) - 0.03) as geometry
+ FROM placex
+ WHERE rank_address between 5 and 25
+ and osm_type = 'N'
+ and linked_place_id is NULL """))
+ await self.dest.execute(sa.select(
+ sa.func.RecoverGeometryColumn('placex_place_node_areas', 'geometry',
+ 4326, 'GEOMETRY', 'XY')))
+ await self.dest.execute(sa.select(sa.func.CreateSpatialIndex(
+ 'placex_place_node_areas', 'geometry')))
+
+ # Remaining indexes.
+ await self.create_spatial_index('country_grid', 'geometry')
+ await self.create_spatial_index('placex', 'geometry')
+ await self.create_spatial_index('osmline', 'linegeo')
+ await self.create_spatial_index('tiger', 'linegeo')
+ await self.create_index('placex', 'place_id')
+ await self.create_index('placex', 'parent_place_id')
+ await self.create_index('placex', 'rank_address')
+ await self.create_index('addressline', 'place_id')
+ await self.create_index('postcode', 'place_id')
+ await self.create_index('osmline', 'place_id')
+ await self.create_index('tiger', 'place_id')
+
+ if 'search' in self.options:
+ await self.create_spatial_index('postcode', 'geometry')
+ await self.create_spatial_index('search_name', 'centroid')
+ await self.create_index('search_name', 'place_id')
+ await self.create_index('osmline', 'parent_place_id')
+ await self.create_index('tiger', 'parent_place_id')
+ await self.create_search_index()
+
+ for t in self.dest.t.meta.tables:
+ if t.startswith('place_classtype_'):
+ await self.dest.execute(sa.select(
+ sa.func.CreateSpatialIndex(t, 'centroid')))
+
+ async def create_spatial_index(self, table: str, column: str) -> None:
+ """ Create a spatial index on the given table and column.
+ """
+ await self.dest.execute(sa.select(
+ sa.func.CreateSpatialIndex(getattr(self.dest.t, table).name, column)))
+
+ async def create_index(self, table_name: str, column: str) -> None:
+ """ Create a simple index on the given table and column.
+ """
+ table = getattr(self.dest.t, table_name)
+ await self.dest.connection.run_sync(
+ sa.Index(f"idx_{table}_{column}", getattr(table.c, column)).create)
+
+ async def create_search_index(self) -> None:
+ """ Create the tables and indexes needed for word lookup.
+ """
+ LOG.warning("Creating reverse search table")
+ rsn = sa.Table('reverse_search_name', self.dest.t.meta,
+ sa.Column('word', sa.Integer()),
+ sa.Column('column', sa.Text()),
+ sa.Column('places', IntArray))
+ await self.dest.connection.run_sync(rsn.create)
+
+ tsrc = self.src.t.search_name
+ for column in ('name_vector', 'nameaddress_vector'):
+ sql = sa.select(sa.func.unnest(getattr(tsrc.c, column)).label('word'),
+ sa.func.ArrayAgg(tsrc.c.place_id).label('places'))\
+ .group_by('word')
+
+ async_result = await self.src.connection.stream(sql)
+ async for partition in async_result.partitions(100):
+ data = []
+ for row in partition:
+ row.places.sort()
+ data.append({'word': row.word,
+ 'column': column,
+ 'places': row.places})
+ await self.dest.execute(rsn.insert(), data)
+
+ await self.dest.connection.run_sync(
+ sa.Index('idx_reverse_search_name_word', rsn.c.word).create)
+
+ def select_from(self, table: str) -> SaSelect:
+ """ Create the SQL statement to select the source columns and rows.
+ """
+ columns = self.src.t.meta.tables[table].c
+
+ if table == 'placex':
+ # SQLite struggles with Geometries that are larger than 5MB,
+ # so simplify those.
+ return sa.select(*(c for c in columns if not isinstance(c.type, Geometry)),
+ sa.func.ST_AsText(columns.centroid).label('centroid'),
+ sa.func.ST_AsText(
+ sa.case((sa.func.ST_MemSize(columns.geometry) < 5000000,
+ columns.geometry),
+ else_=sa.func.ST_SimplifyPreserveTopology(
+ columns.geometry, 0.0001)
+ )).label('geometry'))
+
+ sql = sa.select(*(sa.func.ST_AsText(c).label(c.name)
+ if isinstance(c.type, Geometry) else c for c in columns))
+
+ return sql
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for setting up and importing a new Nominatim database.
from typing import Tuple, Optional, Union, Sequence, MutableMapping, Any
import logging
import os
-import selectors
import subprocess
+import asyncio
from pathlib import Path
import psutil
-from psycopg2 import sql as pysql
-
-from nominatim.config import Configuration
-from nominatim.db.connection import connect, get_pg_env, Connection
-from nominatim.db.async_connection import DBConnection
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.tools.exec_utils import run_osm2pgsql
-from nominatim.errors import UsageError
-from nominatim.version import POSTGRESQL_REQUIRED_VERSION, POSTGIS_REQUIRED_VERSION
+import psycopg
+from psycopg import sql as pysql
+
+from ..errors import UsageError
+from ..config import Configuration
+from ..db.connection import connect, get_pg_env, Connection, server_version_tuple, \
+ postgis_version_tuple, drop_tables, table_exists, execute_scalar
+from ..db.sql_preprocessor import SQLPreprocessor
+from ..db.query_pool import QueryPool
+from .exec_utils import run_osm2pgsql
+from ..version import POSTGRESQL_REQUIRED_VERSION, POSTGIS_REQUIRED_VERSION
LOG = logging.getLogger()
+
def _require_version(module: str, actual: Tuple[int, int], expected: Tuple[int, int]) -> None:
""" Compares the version for the given module and raises an exception
if the actual version is too old.
raise UsageError(f'{module} is too old.')
+def _require_loaded(extension_name: str, conn: Connection) -> None:
+ """ Check that the given extension is loaded. """
+ with conn.cursor() as cur:
+ cur.execute('SELECT * FROM pg_extension WHERE extname = %s', (extension_name, ))
+ if cur.rowcount <= 0:
+ LOG.fatal('Required module %s is not loaded.', extension_name)
+ raise UsageError(f'{extension_name} is not loaded.')
+
+
+def check_existing_database_plugins(dsn: str) -> None:
+ """ Check that the database has the required plugins installed."""
+ with connect(dsn) as conn:
+ _require_version('PostgreSQL server',
+ server_version_tuple(conn),
+ POSTGRESQL_REQUIRED_VERSION)
+ _require_version('PostGIS',
+ postgis_version_tuple(conn),
+ POSTGIS_REQUIRED_VERSION)
+ _require_loaded('hstore', conn)
+
+
def setup_database_skeleton(dsn: str, rouser: Optional[str] = None) -> None:
""" Create a new database for Nominatim and populate it with the
essential extensions.
with connect(dsn) as conn:
_require_version('PostgreSQL server',
- conn.server_version_tuple(),
+ server_version_tuple(conn),
POSTGRESQL_REQUIRED_VERSION)
if rouser is not None:
- with conn.cursor() as cur:
- cnt = cur.scalar('SELECT count(*) FROM pg_user where usename = %s',
+ cnt = execute_scalar(conn, 'SELECT count(*) FROM pg_user where usename = %s',
(rouser, ))
- if cnt == 0:
- LOG.fatal("Web user '%s' does not exist. Create it with:\n"
- "\n createuser %s", rouser, rouser)
- raise UsageError('Missing read-only user.')
+ if cnt == 0:
+ LOG.fatal("Web user '%s' does not exist. Create it with:\n"
+ "\n createuser %s", rouser, rouser)
+ raise UsageError('Missing read-only user.')
# Create extensions.
with conn.cursor() as cur:
cur.execute('CREATE EXTENSION IF NOT EXISTS hstore')
cur.execute('CREATE EXTENSION IF NOT EXISTS postgis')
-
- postgis_version = conn.postgis_version_tuple()
- if postgis_version[0] >= 3:
- cur.execute('CREATE EXTENSION IF NOT EXISTS postgis_raster')
+ cur.execute('CREATE EXTENSION IF NOT EXISTS postgis_raster')
conn.commit()
_require_version('PostGIS',
- conn.postgis_version_tuple(),
+ postgis_version_tuple(conn),
POSTGIS_REQUIRED_VERSION)
with connect(options['dsn']) as conn:
if not ignore_errors:
with conn.cursor() as cur:
- cur.execute('SELECT * FROM place LIMIT 1')
+ cur.execute('SELECT true FROM place LIMIT 1')
if cur.rowcount == 0:
raise UsageError('No data imported by osm2pgsql.')
if drop:
- conn.drop_table('planet_osm_nodes')
+ drop_tables(conn, 'planet_osm_nodes')
+ conn.commit()
if drop and options['flatnode_file']:
Path(options['flatnode_file']).unlink()
cur.execute('TRUNCATE location_property_tiger')
cur.execute('TRUNCATE location_property_osmline')
cur.execute('TRUNCATE location_postcode')
- if conn.table_exists('search_name'):
+ if table_exists(conn, 'search_name'):
cur.execute('TRUNCATE search_name')
cur.execute('DROP SEQUENCE IF EXISTS seq_place')
cur.execute('CREATE SEQUENCE seq_place start 100000')
'extratags', 'geometry')))
-def load_data(dsn: str, threads: int) -> None:
+async def load_data(dsn: str, threads: int) -> None:
""" Copy data into the word and placex table.
"""
- sel = selectors.DefaultSelector()
- # Then copy data from place to placex in <threads - 1> chunks.
- place_threads = max(1, threads - 1)
- for imod in range(place_threads):
- conn = DBConnection(dsn)
- conn.connect()
- conn.perform(
- pysql.SQL("""INSERT INTO placex ({columns})
- SELECT {columns} FROM place
- WHERE osm_id % {total} = {mod}
- AND NOT (class='place' and (type='houses' or type='postcode'))
- AND ST_IsValid(geometry)
- """).format(columns=_COPY_COLUMNS,
- total=pysql.Literal(place_threads),
- mod=pysql.Literal(imod)))
- sel.register(conn, selectors.EVENT_READ, conn)
-
- # Address interpolations go into another table.
- conn = DBConnection(dsn)
- conn.connect()
- conn.perform("""INSERT INTO location_property_osmline (osm_id, address, linegeo)
- SELECT osm_id, address, geometry FROM place
- WHERE class='place' and type='houses' and osm_type='W'
- and ST_GeometryType(geometry) = 'ST_LineString'
- """)
- sel.register(conn, selectors.EVENT_READ, conn)
-
- # Now wait for all of them to finish.
- todo = place_threads + 1
- while todo > 0:
- for key, _ in sel.select(1):
- conn = key.data
- sel.unregister(conn)
- conn.wait()
- conn.close()
- todo -= 1
+ placex_threads = max(1, threads - 1)
+
+ progress = asyncio.create_task(_progress_print())
+
+ async with QueryPool(dsn, placex_threads + 1) as pool:
+ # Copy data from place to placex in <threads - 1> chunks.
+ for imod in range(placex_threads):
+ await pool.put_query(
+ pysql.SQL("""INSERT INTO placex ({columns})
+ SELECT {columns} FROM place
+ WHERE osm_id % {total} = {mod}
+ AND NOT (class='place'
+ and (type='houses' or type='postcode'))
+ AND ST_IsValid(geometry)
+ """).format(columns=_COPY_COLUMNS,
+ total=pysql.Literal(placex_threads),
+ mod=pysql.Literal(imod)), None)
+
+ # Interpolations need to be copied seperately
+ await pool.put_query("""
+ INSERT INTO location_property_osmline (osm_id, address, linegeo)
+ SELECT osm_id, address, geometry FROM place
+ WHERE class='place' and type='houses' and osm_type='W'
+ and ST_GeometryType(geometry) = 'ST_LineString' """, None)
+
+ progress.cancel()
+
+ async with await psycopg.AsyncConnection.connect(dsn) as aconn:
+ await aconn.execute('ANALYSE')
+
+
+async def _progress_print() -> None:
+ while True:
+ try:
+ await asyncio.sleep(1)
+ except asyncio.CancelledError:
+ print('', flush=True)
+ break
print('.', end='', flush=True)
- print('\n')
-
- with connect(dsn) as syn_conn:
- with syn_conn.cursor() as cur:
- cur.execute('ANALYSE')
-def create_search_indices(conn: Connection, config: Configuration,
- drop: bool = False, threads: int = 1) -> None:
+async def create_search_indices(conn: Connection, config: Configuration,
+ drop: bool = False, threads: int = 1) -> None:
""" Create tables that have explicit partitioning.
"""
sql = SQLPreprocessor(conn, config)
- sql.run_parallel_sql_file(config.get_libpq_dsn(),
- 'indices.sql', min(8, threads), drop=drop)
+ await sql.run_parallel_sql_file(config.get_libpq_dsn(),
+ 'indices.sql', min(8, threads), drop=drop)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper functions for executing external programs.
+"""
+from typing import Any, Mapping, List, Optional
+import logging
+import os
+import re
+import subprocess
+import shutil
+
+from ..db.connection import get_pg_env
+from ..errors import UsageError
+from ..version import OSM2PGSQL_REQUIRED_VERSION
+
+LOG = logging.getLogger()
+
+
+def run_osm2pgsql(options: Mapping[str, Any]) -> None:
+ """ Run osm2pgsql with the given options.
+ """
+ _check_osm2pgsql_version(options['osm2pgsql'])
+
+ env = get_pg_env(options['dsn'])
+
+ cmd = [_find_osm2pgsql_cmd(options['osm2pgsql']),
+ '--append' if options['append'] else '--create',
+ '--slim',
+ '--log-progress', 'true',
+ '--number-processes', '1' if options['append'] else str(options['threads']),
+ '--cache', str(options['osm2pgsql_cache']),
+ '--style', str(options['osm2pgsql_style'])
+ ]
+
+ if str(options['osm2pgsql_style']).endswith('.lua'):
+ env['LUA_PATH'] = ';'.join((str(options['osm2pgsql_style_path'] / '?.lua'),
+ os.environ.get('LUA_PATH', ';')))
+ env['THEMEPARK_PATH'] = str(options['osm2pgsql_style_path'] / 'themes')
+ if 'THEMEPARK_PATH' in os.environ:
+ env['THEMEPARK_PATH'] += ':' + os.environ['THEMEPARK_PATH']
+ cmd.extend(('--output', 'flex'))
+
+ for flavour in ('data', 'index'):
+ if options['tablespaces'][f"main_{flavour}"]:
+ env[f"NOMINATIM_TABLESPACE_PLACE_{flavour.upper()}"] = \
+ options['tablespaces'][f"main_{flavour}"]
+ else:
+ cmd.extend(('--output', 'gazetteer', '--hstore', '--latlon'))
+ cmd.extend(_mk_tablespace_options('main', options))
+
+ if options['flatnode_file']:
+ cmd.extend(('--flat-nodes', options['flatnode_file']))
+
+ cmd.extend(_mk_tablespace_options('slim', options))
+
+ if options.get('disable_jit', False):
+ env['PGOPTIONS'] = '-c jit=off -c max_parallel_workers_per_gather=0'
+
+ if 'import_data' in options:
+ cmd.extend(('-r', 'xml', '-'))
+ elif isinstance(options['import_file'], list):
+ for fname in options['import_file']:
+ cmd.append(str(fname))
+ else:
+ cmd.append(str(options['import_file']))
+
+ subprocess.run(cmd, cwd=options.get('cwd', '.'),
+ input=options.get('import_data'),
+ env=env, check=True)
+
+
+def _mk_tablespace_options(ttype: str, options: Mapping[str, Any]) -> List[str]:
+ cmds: List[str] = []
+ for flavour in ('data', 'index'):
+ if options['tablespaces'][f"{ttype}_{flavour}"]:
+ cmds.extend((f"--tablespace-{ttype}-{flavour}",
+ options['tablespaces'][f"{ttype}_{flavour}"]))
+
+ return cmds
+
+
+def _find_osm2pgsql_cmd(cmdline: Optional[str]) -> str:
+ if cmdline is not None:
+ return cmdline
+
+ in_path = shutil.which('osm2pgsql')
+ if in_path is None:
+ raise UsageError('osm2pgsql executable not found. Please install osm2pgsql first.')
+
+ return str(in_path)
+
+
+def _check_osm2pgsql_version(cmdline: Optional[str]) -> None:
+ cmd = [_find_osm2pgsql_cmd(cmdline), '--version']
+
+ result = subprocess.run(cmd, capture_output=True, check=True)
+
+ if not result.stderr:
+ raise UsageError("osm2pgsql does not print version information.")
+
+ verinfo = result.stderr.decode('UTF-8')
+
+ match = re.search(r'osm2pgsql version (\d+)\.(\d+)', verinfo)
+ if match is None:
+ raise UsageError(f"No version information found in output: {verinfo}")
+
+ if (int(match[1]), int(match[2])) < OSM2PGSQL_REQUIRED_VERSION:
+ raise UsageError(f"osm2pgsql is too old. Found version {match[1]}.{match[2]}. "
+ f"Need at least version {'.'.join(map(str, OSM2PGSQL_REQUIRED_VERSION))}.")
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for removing unnecessary data from the database.
from typing import Optional
from pathlib import Path
-from psycopg2 import sql as pysql
+from psycopg import sql as pysql
-from nominatim.db.connection import Connection
+from ..db.connection import Connection, drop_tables, table_exists
UPDATE_TABLES = [
'address_levels',
'wikipedia_%'
]
+
def drop_update_tables(conn: Connection) -> None:
""" Drop all tables only necessary for updating the database from
OSM replication data.
+ pysql.SQL(' or ').join(parts))
tables = [r[0] for r in cur]
- for table in tables:
- cur.drop_table(table, cascade=True)
-
+ drop_tables(conn, *tables, cascade=True)
conn.commit()
"""
if fpath and fpath.exists():
fpath.unlink()
+
+
+def is_frozen(conn: Connection) -> bool:
+ """ Returns true if database is in a frozen state
+ """
+ return table_exists(conn, 'place') is False
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Functions for database migration to newer software versions.
+"""
+from typing import List, Tuple, Callable, Any
+import logging
+
+from ..errors import UsageError
+from ..config import Configuration
+from ..db import properties
+from ..db.connection import connect, Connection, \
+ table_exists, register_hstore
+from ..version import NominatimVersion, NOMINATIM_VERSION, parse_version
+from ..tokenizer import factory as tokenizer_factory
+from . import refresh
+
+LOG = logging.getLogger()
+
+_MIGRATION_FUNCTIONS: List[Tuple[NominatimVersion, Callable[..., None]]] = []
+
+
+def migrate(config: Configuration, paths: Any) -> int:
+ """ Check for the current database version and execute migrations,
+ if necesssary.
+ """
+ with connect(config.get_libpq_dsn()) as conn:
+ register_hstore(conn)
+ if table_exists(conn, 'nominatim_properties'):
+ db_version_str = properties.get_property(conn, 'database_version')
+ else:
+ db_version_str = None
+
+ if db_version_str is not None:
+ db_version = parse_version(db_version_str)
+ else:
+ db_version = None
+
+ if db_version is None or db_version < (4, 3, 0, 0):
+ LOG.fatal('Your database version is older than 4.3. '
+ 'Direct migration is not possible.\n'
+ 'You should strongly consider a reimport. If that is not possible\n'
+ 'please upgrade to 4.3 first and then to the newest version.')
+ raise UsageError('Migration not possible.')
+
+ if db_version == NOMINATIM_VERSION:
+ LOG.warning("Database already at latest version (%s)", db_version_str)
+ return 0
+
+ LOG.info("Detected database version: %s", db_version_str)
+
+ for version, func in _MIGRATION_FUNCTIONS:
+ if db_version < version:
+ title = func.__doc__ or ''
+ LOG.warning("Running: %s (%s)", title.split('\n', 1)[0], version)
+ kwargs = dict(conn=conn, config=config, paths=paths)
+ func(**kwargs)
+ conn.commit()
+
+ LOG.warning('Updating SQL functions.')
+ refresh.create_functions(conn, config)
+ tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
+ tokenizer.update_sql_functions(config)
+
+ properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))
+
+ conn.commit()
+
+ return 0
+
+
+def _migration(major: int, minor: int, patch: int = 0,
+ dbpatch: int = 0) -> Callable[[Callable[..., None]], Callable[..., None]]:
+ """ Decorator for a single migration step. The parameters describe the
+ version after which the migration is applicable, i.e before changing
+ from the given version to the next, the migration is required.
+
+ All migrations are run in the order in which they are defined in this
+ file. Do not run global SQL scripts for migrations as you cannot be sure
+ that these scripts do the same in later versions.
+
+ Functions will always be reimported in full at the end of the migration
+ process, so the migration functions may leave a temporary state behind
+ there.
+ """
+ def decorator(func: Callable[..., None]) -> Callable[..., None]:
+ version = NominatimVersion(major, minor, patch, dbpatch)
+ _MIGRATION_FUNCTIONS.append((version, func))
+ return func
+
+ return decorator
+
+
+@_migration(4, 4, 99, 0)
+def create_postcode_area_lookup_index(conn: Connection, **_: Any) -> None:
+ """ Create index needed for looking up postcode areas from postocde points.
+ """
+ with conn.cursor() as cur:
+ cur.execute("""CREATE INDEX IF NOT EXISTS idx_placex_postcode_areas
+ ON placex USING BTREE (country_code, postcode)
+ WHERE osm_type = 'R' AND class = 'boundary' AND type = 'postal_code'
+ """)
+
+
+@_migration(4, 4, 99, 1)
+def create_postcode_parent_index(conn: Connection, **_: Any) -> None:
+ """ Create index needed for updating postcodes when a parent changes.
+ """
+ if table_exists(conn, 'planet_osm_ways'):
+ with conn.cursor() as cur:
+ cur.execute("""CREATE INDEX IF NOT EXISTS
+ idx_location_postcode_parent_place_id
+ ON location_postcode USING BTREE (parent_place_id)""")
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for importing, updating and otherwise maintaining the table
import logging
from math import isfinite
-from psycopg2 import sql as pysql
+from psycopg import sql as pysql
-from nominatim.db.connection import connect, Connection
-from nominatim.utils.centroid import PointsCentroid
-from nominatim.data.postcode_format import PostcodeFormatter, CountryPostcodeMatcher
-from nominatim.tokenizer.base import AbstractAnalyzer, AbstractTokenizer
+from ..db.connection import connect, Connection, table_exists
+from ..utils.centroid import PointsCentroid
+from ..data.postcode_format import PostcodeFormatter, CountryPostcodeMatcher
+from ..tokenizer.base import AbstractAnalyzer, AbstractTokenizer
LOG = logging.getLogger()
+
def _to_float(numstr: str, max_value: float) -> float:
""" Convert the number in string into a float. The number is expected
to be in the range of [-max_value, max_value]. Otherwise rises a
return num
+
class _PostcodeCollector:
""" Collector for postcodes of a single country.
"""
self.collected: Dict[str, PointsCentroid] = defaultdict(PointsCentroid)
self.normalization_cache: Optional[Tuple[str, Optional[str]]] = None
-
def add(self, postcode: str, x: float, y: float) -> None:
""" Add the given postcode to the collection cache. If the postcode
already existed, it is overwritten with the new centroid.
if normalized:
self.collected[normalized] += (x, y)
-
def commit(self, conn: Connection, analyzer: AbstractAnalyzer, project_dir: Path) -> None:
""" Update postcodes for the country from the postcodes selected so far
as well as any externally supplied postcodes.
with conn.cursor() as cur:
if to_add:
- cur.execute_values(
+ cur.executemany(pysql.SQL(
"""INSERT INTO location_postcode
(place_id, indexed_status, country_code,
- postcode, geometry) VALUES %s""",
- to_add,
- template=pysql.SQL("""(nextval('seq_place'), 1, {},
- %s, 'SRID=4326;POINT(%s %s)')
- """).format(pysql.Literal(self.country)))
+ postcode, geometry)
+ VALUES (nextval('seq_place'), 1, {}, %s,
+ ST_SetSRID(ST_MakePoint(%s, %s), 4326))
+ """).format(pysql.Literal(self.country)),
+ to_add)
if to_delete:
cur.execute("""DELETE FROM location_postcode
WHERE country_code = %s and postcode = any(%s)
""", (self.country, to_delete))
if to_update:
- cur.execute_values(
+ cur.executemany(
pysql.SQL("""UPDATE location_postcode
SET indexed_status = 2,
- geometry = ST_SetSRID(ST_Point(v.x, v.y), 4326)
- FROM (VALUES %s) AS v (pc, x, y)
- WHERE country_code = {} and postcode = pc
- """).format(pysql.Literal(self.country)), to_update)
-
-
- def _compute_changes(self, conn: Connection) \
- -> Tuple[List[Tuple[str, float, float]], List[str], List[Tuple[str, float, float]]]:
+ geometry = ST_SetSRID(ST_Point(%s, %s), 4326)
+ WHERE country_code = {} and postcode = %s
+ """).format(pysql.Literal(self.country)),
+ to_update)
+
+ def _compute_changes(
+ self, conn: Connection
+ ) -> Tuple[List[Tuple[str, float, float]], List[str], List[Tuple[float, float, str]]]:
""" Compute which postcodes from the collected postcodes have to be
added or modified and which from the location_postcode table
have to be deleted.
if pcobj:
newx, newy = pcobj.centroid()
if (x - newx) > 0.0000001 or (y - newy) > 0.0000001:
- to_update.append((postcode, newx, newy))
+ to_update.append((newx, newy, postcode))
else:
to_delete.append(postcode)
return to_add, to_delete, to_update
-
def _update_from_external(self, analyzer: AbstractAnalyzer, project_dir: Path) -> None:
""" Look for an external postcode file for the active country in
the project directory and add missing postcodes when found.
finally:
csvfile.close()
-
def _open_external(self, project_dir: Path) -> Optional[TextIO]:
fname = project_dir / f'{self.country}_postcodes.csv'
analyzer.update_postcodes_from_db()
+
def can_compute(dsn: str) -> bool:
"""
Check that the place table exists so that
postcodes can be computed.
"""
with connect(dsn) as conn:
- return conn.table_exists('place')
+ return table_exists(conn, 'place')
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for bringing auxiliary data in the database up-to-date.
"""
from typing import MutableSequence, Tuple, Any, Type, Mapping, Sequence, List, cast
+import csv
+import gzip
import logging
-from textwrap import dedent
from pathlib import Path
-from psycopg2 import sql as pysql
+from psycopg import sql as pysql
-from nominatim.config import Configuration
-from nominatim.db.connection import Connection, connect
-from nominatim.db.utils import execute_file
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.version import version_str
+from ..config import Configuration
+from ..db.connection import Connection, connect, drop_tables
+from ..db.utils import execute_file
+from ..db.sql_preprocessor import SQLPreprocessor
LOG = logging.getLogger()
OSM_TYPE = {'N': 'node', 'W': 'way', 'R': 'relation'}
+
def _add_address_level_rows_from_entry(rows: MutableSequence[Tuple[Any, ...]],
entry: Mapping[str, Any]) -> None:
""" Converts a single entry from the JSON format for address rank
The table has the following columns:
country, class, type, rank_search, rank_address
"""
- rows: List[Tuple[Any, ...]] = []
+ rows: List[Tuple[Any, ...]] = []
for entry in levels:
_add_address_level_rows_from_entry(rows, entry)
- with conn.cursor() as cur:
- cur.drop_table(table)
+ drop_tables(conn, table)
+ with conn.cursor() as cur:
cur.execute(pysql.SQL("""CREATE TABLE {} (
country_code varchar(2),
class TEXT,
rank_address SMALLINT)
""").format(pysql.Identifier(table)))
- cur.execute_values(pysql.SQL("INSERT INTO {} VALUES %s")
- .format(pysql.Identifier(table)), rows)
+ cur.executemany(pysql.SQL("INSERT INTO {} VALUES (%s, %s, %s, %s, %s)")
+ .format(pysql.Identifier(table)), rows)
cur.execute(pysql.SQL('CREATE UNIQUE INDEX ON {} (country_code, class, type)')
.format(pysql.Identifier(table)))
debug=enable_debug)
-
-WEBSITE_SCRIPTS = (
- 'deletable.php',
- 'details.php',
- 'lookup.php',
- 'polygons.php',
- 'reverse.php',
- 'search.php',
- 'status.php'
-)
-
-# constants needed by PHP scripts: PHP name, config name, type
-PHP_CONST_DEFS = (
- ('Database_DSN', 'DATABASE_DSN', str),
- ('Default_Language', 'DEFAULT_LANGUAGE', str),
- ('Log_DB', 'LOG_DB', bool),
- ('Log_File', 'LOG_FILE', Path),
- ('NoAccessControl', 'CORS_NOACCESSCONTROL', bool),
- ('Places_Max_ID_count', 'LOOKUP_MAX_COUNT', int),
- ('PolygonOutput_MaximumTypes', 'POLYGON_OUTPUT_MAX_TYPES', int),
- ('Search_BatchMode', 'SEARCH_BATCH_MODE', bool),
- ('Search_NameOnlySearchFrequencyThreshold', 'SEARCH_NAME_ONLY_THRESHOLD', str),
- ('Use_US_Tiger_Data', 'USE_US_TIGER_DATA', bool),
- ('MapIcon_URL', 'MAPICON_URL', str),
-)
-
-
def import_wikipedia_articles(dsn: str, data_path: Path, ignore_errors: bool = False) -> int:
""" Replaces the wikipedia importance tables with new data.
The import is run in a single transaction so that the new data
Returns 0 if all was well and 1 if the importance file could not
be found. Throws an exception if there was an error reading the file.
"""
- datafile = data_path / 'wikimedia-importance.sql.gz'
+ if import_importance_csv(dsn, data_path / 'wikimedia-importance.csv.gz') == 0 \
+ or import_importance_sql(dsn, data_path / 'wikimedia-importance.sql.gz',
+ ignore_errors) == 0:
+ return 0
- if not datafile.exists():
+ return 1
+
+
+def import_importance_csv(dsn: str, data_file: Path) -> int:
+ """ Replace wikipedia importance table with data from a
+ single CSV file.
+
+ The file must be a gzipped CSV and have the following columns:
+ language, title, importance, wikidata_id
+
+ Other columns may be present but will be ignored.
+ """
+ if not data_file.exists():
+ return 1
+
+ # Only import the first occurrence of a wikidata ID.
+ # This keeps indexes and table small.
+ wd_done = set()
+
+ with connect(dsn) as conn:
+ drop_tables(conn, 'wikipedia_article', 'wikipedia_redirect', 'wikimedia_importance')
+ with conn.cursor() as cur:
+ cur.execute("""CREATE TABLE wikimedia_importance (
+ language TEXT NOT NULL,
+ title TEXT NOT NULL,
+ importance double precision NOT NULL,
+ wikidata TEXT
+ ) """)
+
+ copy_cmd = """COPY wikimedia_importance(language, title, importance, wikidata)
+ FROM STDIN"""
+ with gzip.open(str(data_file), 'rt') as fd, cur.copy(copy_cmd) as copy:
+ for row in csv.DictReader(fd, delimiter='\t', quotechar='|'):
+ wd_id = int(row['wikidata_id'][1:])
+ copy.write_row((row['language'],
+ row['title'],
+ row['importance'],
+ None if wd_id in wd_done else row['wikidata_id']))
+ wd_done.add(wd_id)
+
+ cur.execute("""CREATE INDEX IF NOT EXISTS idx_wikimedia_importance_title
+ ON wikimedia_importance (title)""")
+ cur.execute("""CREATE INDEX IF NOT EXISTS idx_wikimedia_importance_wikidata
+ ON wikimedia_importance (wikidata)
+ WHERE wikidata is not null""")
+
+ conn.commit()
+
+ return 0
+
+
+def import_importance_sql(dsn: str, data_file: Path, ignore_errors: bool) -> int:
+ """ Replace wikipedia importance table with data from an SQL file.
+ """
+ if not data_file.exists():
return 1
pre_code = """BEGIN;
DROP TABLE IF EXISTS "wikipedia_article";
- DROP TABLE IF EXISTS "wikipedia_redirect"
+ DROP TABLE IF EXISTS "wikipedia_redirect";
+ DROP TABLE IF EXISTS "wikipedia_importance";
"""
post_code = "COMMIT"
- execute_file(dsn, datafile, ignore_errors=ignore_errors,
+ execute_file(dsn, data_file, ignore_errors=ignore_errors,
pre_code=pre_code, post_code=post_code)
return 0
+
def import_secondary_importance(dsn: str, data_path: Path, ignore_errors: bool = False) -> int:
""" Replaces the secondary importance raster data table with new data.
if not datafile.exists():
return 1
- with connect(dsn) as conn:
- postgis_version = conn.postgis_version_tuple()
- if postgis_version[0] < 3:
- LOG.error('PostGIS version is too old for using OSM raster data.')
- return 2
-
execute_file(dsn, datafile, ignore_errors=ignore_errors)
return 0
+
def recompute_importance(conn: Connection) -> None:
""" Recompute wikipedia links and importance for all entries in placex.
This is a long-running operations that must not be executed in
cur.execute("""
UPDATE placex SET (wikipedia, importance) =
(SELECT wikipedia, importance
- FROM compute_importance(extratags, country_code, osm_type, osm_id, centroid))
+ FROM compute_importance(extratags, country_code, rank_search, centroid))
""")
cur.execute("""
UPDATE placex s SET wikipedia = d.wikipedia, importance = d.importance
return f"'{quoted}'"
-def setup_website(basedir: Path, config: Configuration, conn: Connection) -> None:
- """ Create the website script stubs.
- """
- if not basedir.exists():
- LOG.info('Creating website directory.')
- basedir.mkdir()
-
- template = dedent(f"""\
- <?php
-
- @define('CONST_Debug', $_GET['debug'] ?? false);
- @define('CONST_LibDir', '{config.lib_dir.php}');
- @define('CONST_TokenizerDir', '{config.project_dir / 'tokenizer'}');
- @define('CONST_NominatimVersion', '{version_str()}');
-
- """)
-
- for php_name, conf_name, var_type in PHP_CONST_DEFS:
- varout = _quote_php_variable(var_type, config, conf_name)
-
- template += f"@define('CONST_{php_name}', {varout});\n"
-
- template += f"\nrequire_once('{config.lib_dir.php}/website/{{}}');\n"
-
- search_name_table_exists = bool(conn and conn.table_exists('search_name'))
-
- for script in WEBSITE_SCRIPTS:
- if not search_name_table_exists and script == 'search.php':
- (basedir / script).write_text(template.format('reverse-only-search.php'), 'utf-8')
- else:
- (basedir / script).write_text(template.format(script), 'utf-8')
-
-
def invalidate_osm_object(osm_type: str, osm_id: int, conn: Connection,
recursive: bool = True) -> None:
""" Mark the given OSM object for reindexing. When 'recursive' is set
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for updating a database from a replication source.
"""
-from typing import ContextManager, MutableMapping, Any, Generator, cast
+from typing import ContextManager, MutableMapping, Any, Generator, cast, Iterator
from contextlib import contextmanager
import datetime as dt
from enum import Enum
import logging
import time
+import types
+import urllib.request as urlrequest
-from nominatim.db import status
-from nominatim.db.connection import Connection
-from nominatim.tools.exec_utils import run_osm2pgsql
-from nominatim.errors import UsageError
+from ..errors import UsageError
+from ..db import status
+from ..db.connection import Connection, connect
+from .exec_utils import run_osm2pgsql
try:
from osmium.replication.server import ReplicationServer
from osmium import WriteHandler
-except ImportError as exc:
+ from osmium import version as pyo_version
+ import requests
+except ModuleNotFoundError as exc:
logging.getLogger().critical("pyosmium not installed. Replication functions not available.\n"
- "To install pyosmium via pip: pip3 install osmium")
+ "To install pyosmium via pip: pip install osmium")
raise UsageError("replication tools not available") from exc
LOG = logging.getLogger()
-def init_replication(conn: Connection, base_url: str) -> None:
+
+def init_replication(conn: Connection, base_url: str,
+ socket_timeout: int = 60) -> None:
""" Set up replication for the server at the given base URL.
"""
LOG.info("Using replication source: %s", base_url)
# margin of error to make sure we get all data
date -= dt.timedelta(hours=3)
- repl = ReplicationServer(base_url)
-
- seq = repl.timestamp_to_sequence(date)
+ with _make_replication_server(base_url, socket_timeout) as repl:
+ seq = repl.timestamp_to_sequence(date)
if seq is None:
LOG.fatal("Cannot reach the configured replication service '%s'.\n"
LOG.warning("Updates initialised at sequence %s (%s)", seq, date)
-def check_for_updates(conn: Connection, base_url: str) -> int:
+def check_for_updates(conn: Connection, base_url: str,
+ socket_timeout: int = 60) -> int:
""" Check if new data is available from the replication service at the
given base URL.
"""
"Please run 'nominatim replication --init' first.")
return 254
- state = ReplicationServer(base_url).get_state_info()
+ with _make_replication_server(base_url, socket_timeout) as repl:
+ state = repl.get_state_info()
if state is None:
LOG.error("Cannot get state for URL %s.", base_url)
LOG.warning("New data available (%i => %i).", seq, state.sequence)
return 0
+
class UpdateState(Enum):
""" Possible states after an update has run.
"""
NO_CHANGES = 3
-def update(conn: Connection, options: MutableMapping[str, Any]) -> UpdateState:
+def update(dsn: str, options: MutableMapping[str, Any],
+ socket_timeout: int = 60) -> UpdateState:
""" Update database from the next batch of data. Returns the state of
updates according to `UpdateState`.
"""
- startdate, startseq, indexed = status.get_status(conn)
+ with connect(dsn) as conn:
+ startdate, startseq, indexed = status.get_status(conn)
+ conn.commit()
if startseq is None:
LOG.error("Replication not set up. "
options['import_file'].unlink()
# Read updates into file.
- with _make_replication_server(options['base_url']) as repl:
+ with _make_replication_server(options['base_url'], socket_timeout) as repl:
outhandler = WriteHandler(str(options['import_file']))
endseq = repl.apply_diffs(outhandler, startseq + 1,
max_size=options['max_diff_size'] * 1024)
if endseq is None:
return UpdateState.NO_CHANGES
- # Consume updates with osm2pgsql.
- options['append'] = True
- options['disable_jit'] = conn.server_version_tuple() >= (11, 0)
- run_osm2pgsql(options)
+ with connect(dsn) as conn:
+ run_osm2pgsql_updates(conn, options)
- # Write the current status to the file
- endstate = repl.get_state_info(endseq)
- status.set_status(conn, endstate.timestamp if endstate else None,
- seq=endseq, indexed=False)
+ # Write the current status to the file
+ endstate = repl.get_state_info(endseq)
+ status.set_status(conn, endstate.timestamp if endstate else None,
+ seq=endseq, indexed=False)
+ conn.commit()
return UpdateState.UP_TO_DATE
-def _make_replication_server(url: str) -> ContextManager[ReplicationServer]:
+def run_osm2pgsql_updates(conn: Connection, options: MutableMapping[str, Any]) -> None:
+ """ Run osm2pgsql in append mode.
+ """
+ # Remove any stale deletion marks.
+ with conn.cursor() as cur:
+ cur.execute('TRUNCATE place_to_be_deleted')
+ conn.commit()
+
+ # Consume updates with osm2pgsql.
+ options['append'] = True
+ options['disable_jit'] = True
+ run_osm2pgsql(options)
+
+ # Handle deletions
+ with conn.cursor() as cur:
+ cur.execute('SELECT flush_deleted_places()')
+ conn.commit()
+
+
+def _make_replication_server(url: str, timeout: int) -> ContextManager[ReplicationServer]:
""" Returns a ReplicationServer in form of a context manager.
Creates a light wrapper around older versions of pyosmium that did
not support the context manager interface.
"""
if hasattr(ReplicationServer, '__enter__'):
- return cast(ContextManager[ReplicationServer], ReplicationServer(url))
+ # Patches the open_url function for pyosmium >= 3.2
+ # where the socket timeout is no longer respected.
+ def patched_open_url(self: ReplicationServer, url: urlrequest.Request) -> Any:
+ """ Download a resource from the given URL and return a byte sequence
+ of the content.
+ """
+ headers = {"User-Agent": f"Nominatim (pyosmium/{pyo_version.pyosmium_release})"}
+
+ if self.session is not None:
+ return self.session.get(url.get_full_url(),
+ headers=headers, timeout=timeout or None,
+ stream=True)
+
+ @contextmanager
+ def _get_url_with_session() -> Iterator[requests.Response]:
+ with requests.Session() as session:
+ request = session.get(url.get_full_url(),
+ headers=headers, timeout=timeout or None,
+ stream=True)
+ yield request
+
+ return _get_url_with_session()
+
+ repl = ReplicationServer(url)
+ setattr(repl, 'open_url', types.MethodType(patched_open_url, repl))
+
+ return cast(ContextManager[ReplicationServer], repl)
@contextmanager
def get_cm() -> Generator[ReplicationServer, None, None]:
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Contains the class which handles statistics for the
import logging
LOG = logging.getLogger()
+
class SpecialPhrasesImporterStatistics():
"""
Class handling statistics of the import
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Module containing the SPCsvLoader class.
from typing import Iterable
import csv
import os
-from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
-from nominatim.errors import UsageError
+
+from ...errors import UsageError
+from .special_phrase import SpecialPhrase
+
class SPCsvLoader:
"""
def __init__(self, csv_path: str) -> None:
self.csv_path = csv_path
-
def generate_phrases(self) -> Iterable[SpecialPhrase]:
""" Open and parse the given csv file.
Create the corresponding SpecialPhrases.
for row in reader:
yield SpecialPhrase(row['phrase'], row['class'], row['type'], row['operator'])
-
def _check_csv_validity(self) -> None:
"""
Check that the csv file has the right extension.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Module containing the class handling the import
import logging
import re
-from psycopg2.sql import Identifier, SQL
+from psycopg.sql import Identifier, SQL
-from nominatim.config import Configuration
-from nominatim.db.connection import Connection
-from nominatim.tools.special_phrases.importer_statistics import SpecialPhrasesImporterStatistics
-from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
-from nominatim.tokenizer.base import AbstractTokenizer
-from nominatim.typing import Protocol
+from ...typing import Protocol
+from ...config import Configuration
+from ...db.connection import Connection, drop_tables, index_exists
+from .importer_statistics import SpecialPhrasesImporterStatistics
+from .special_phrase import SpecialPhrase
+from ...tokenizer.base import AbstractTokenizer
LOG = logging.getLogger()
+
def _classtype_table(phrase_class: str, phrase_type: str) -> str:
""" Return the name of the table for the given class and type.
"""
class SPImporter():
- # pylint: disable-msg=too-many-instance-attributes
"""
Class handling the process of special phrases importation into the database.
LOG.warning('Import done.')
self.statistics_handler.notify_import_done()
-
def _fetch_existing_place_classtype_tables(self) -> None:
"""
Fetch existing place_classtype tables.
self.table_phrases_to_delete.add(row[0])
def _load_white_and_black_lists(self) \
- -> Tuple[Mapping[str, Sequence[str]], Mapping[str, Sequence[str]]]:
+ -> Tuple[Mapping[str, Sequence[str]], Mapping[str, Sequence[str]]]:
"""
Load white and black lists from phrases-settings.json.
"""
return (phrase.p_class, phrase.p_type)
-
def _create_classtype_table_and_indexes(self,
class_type_pairs: Iterable[Tuple[str, str]]) -> None:
"""
with self.db_connection.cursor() as db_cursor:
db_cursor.execute("DROP INDEX idx_placex_classtype")
-
def _create_place_classtype_table(self, sql_tablespace: str,
phrase_class: str, phrase_type: str) -> None:
"""
""").format(Identifier(table_name), SQL(sql_tablespace)),
(phrase_class, phrase_type))
-
def _create_place_classtype_indexes(self, sql_tablespace: str,
phrase_class: str, phrase_type: str) -> None:
"""
index_prefix = f'idx_place_classtype_{phrase_class}_{phrase_type}_'
base_table = _classtype_table(phrase_class, phrase_type)
# Index on centroid
- if not self.db_connection.index_exists(index_prefix + 'centroid'):
+ if not index_exists(self.db_connection, index_prefix + 'centroid'):
with self.db_connection.cursor() as db_cursor:
db_cursor.execute(SQL("CREATE INDEX {} ON {} USING GIST (centroid) {}")
.format(Identifier(index_prefix + 'centroid'),
SQL(sql_tablespace)))
# Index on place_id
- if not self.db_connection.index_exists(index_prefix + 'place_id'):
+ if not index_exists(self.db_connection, index_prefix + 'place_id'):
with self.db_connection.cursor() as db_cursor:
db_cursor.execute(SQL("CREATE INDEX {} ON {} USING btree(place_id) {}")
.format(Identifier(index_prefix + 'place_id'),
Identifier(base_table),
SQL(sql_tablespace)))
-
def _grant_access_to_webuser(self, phrase_class: str, phrase_type: str) -> None:
"""
Grant access on read to the table place_classtype for the webuser.
# Delete place_classtype tables corresponding to class/type which
# are not on the wiki anymore.
- with self.db_connection.cursor() as db_cursor:
- for table in self.table_phrases_to_delete:
- self.statistics_handler.notify_one_table_deleted()
- db_cursor.drop_table(table)
+ drop_tables(self.db_connection, *self.table_phrases_to_delete)
+ for _ in self.table_phrases_to_delete:
+ self.statistics_handler.notify_one_table_deleted()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Module containing the SPWikiLoader class.
import re
import logging
-from nominatim.config import Configuration
-from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
-from nominatim.tools.exec_utils import get_url
+from ...config import Configuration
+from ...utils.url_utils import get_url
+from .special_phrase import SpecialPhrase
LOG = logging.getLogger()
+
def _get_wiki_content(lang: str) -> str:
"""
Request and return the wiki page's content
self.type_fix_pattern = re.compile(r'\"|"')
self.languages = self.config.get_str_list('LANGUAGES') or \
- ['af', 'ar', 'br', 'ca', 'cs', 'de', 'en', 'es',
- 'et', 'eu', 'fa', 'fi', 'fr', 'gl', 'hr', 'hu',
- 'ia', 'is', 'it', 'ja', 'mk', 'nl', 'no', 'pl',
- 'ps', 'pt', 'ru', 'sk', 'sl', 'sv', 'uk', 'vi']
-
+ ['af', 'ar', 'br', 'ca', 'cs', 'de', 'en', 'es',
+ 'et', 'eu', 'fa', 'fi', 'fr', 'gl', 'hr', 'hu',
+ 'ia', 'is', 'it', 'ja', 'mk', 'nl', 'no', 'pl',
+ 'ps', 'pt', 'ru', 'sk', 'sl', 'sv', 'uk', 'vi',
+ 'lv', 'tr']
def generate_phrases(self) -> Iterable[SpecialPhrase]:
""" Download the wiki pages for the configured languages
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Module containing the class SpecialPhrase.
"""
from typing import Any
+
class SpecialPhrase:
"""
Model representing a special phrase.
return False
return self.p_label == other.p_label \
- and self.p_class == other.p_class \
- and self.p_type == other.p_type \
- and self.p_operator == other.p_operator
+ and self.p_class == other.p_class \
+ and self.p_type == other.p_type \
+ and self.p_operator == other.p_operator
def __hash__(self) -> int:
return hash((self.p_label, self.p_class, self.p_type, self.p_operator))
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for importing tiger data and handling tarbar and directory files
"""
-from typing import Any, TextIO, List, Union, cast
+from typing import Any, TextIO, List, Union, cast, Iterator, Dict
import csv
import io
import logging
import os
import tarfile
-from psycopg2.extras import Json
+from psycopg.types.json import Json
-from nominatim.config import Configuration
-from nominatim.db.connection import connect
-from nominatim.db.async_connection import WorkerPool
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.errors import UsageError
-from nominatim.data.place_info import PlaceInfo
-from nominatim.tokenizer.base import AbstractAnalyzer, AbstractTokenizer
+from ..config import Configuration
+from ..db.connection import connect
+from ..db.sql_preprocessor import SQLPreprocessor
+from ..errors import UsageError
+from ..db.query_pool import QueryPool
+from ..data.place_info import PlaceInfo
+from ..tokenizer.base import AbstractTokenizer
+from . import freeze
LOG = logging.getLogger()
+
class TigerInput:
""" Context manager that goes through Tiger input files which may
either be in a directory or gzipped together in a tar file.
if data_dir.endswith('.tar.gz'):
try:
- self.tar_handle = tarfile.open(data_dir) # pylint: disable=consider-using-with
+ self.tar_handle = tarfile.open(data_dir)
except tarfile.ReadError as err:
LOG.fatal("Cannot open '%s'. Is this a tar file?", data_dir)
raise UsageError("Cannot open Tiger data file.") from err
if not self.files:
LOG.warning("Tiger data import selected but no files found at %s", data_dir)
-
def __enter__(self) -> 'TigerInput':
return self
-
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
if self.tar_handle:
self.tar_handle.close()
self.tar_handle = None
+ def __bool__(self) -> bool:
+ return bool(self.files)
- def next_file(self) -> TextIO:
+ def get_file(self, fname: Union[str, tarfile.TarInfo]) -> TextIO:
""" Return a file handle to the next file to be processed.
Raises an IndexError if there is no file left.
"""
- fname = self.files.pop(0)
-
if self.tar_handle is not None:
extracted = self.tar_handle.extractfile(fname)
assert extracted is not None
return open(cast(str, fname), encoding='utf-8')
-
- def __len__(self) -> int:
- return len(self.files)
+ def __iter__(self) -> Iterator[Dict[str, Any]]:
+ """ Iterate over the lines in each file.
+ """
+ for fname in self.files:
+ fd = self.get_file(fname)
+ yield from csv.DictReader(fd, delimiter=';')
-def handle_threaded_sql_statements(pool: WorkerPool, fd: TextIO,
- analyzer: AbstractAnalyzer) -> None:
- """ Handles sql statement with multiplexing
- """
- lines = 0
- # Using pool of database connections to execute sql statements
-
- sql = "SELECT tiger_line_import(%s, %s, %s, %s, %s, %s)"
-
- for row in csv.DictReader(fd, delimiter=';'):
- try:
- address = dict(street=row['street'], postcode=row['postcode'])
- args = ('SRID=4326;' + row['geometry'],
- int(row['from']), int(row['to']), row['interpolation'],
- Json(analyzer.process_place(PlaceInfo({'address': address}))),
- analyzer.normalize_postcode(row['postcode']))
- except ValueError:
- continue
- pool.next_free_worker().perform(sql, args=args)
-
- lines += 1
- if lines == 1000:
- print('.', end='', flush=True)
- lines = 0
-
-
-def add_tiger_data(data_dir: str, config: Configuration, threads: int,
- tokenizer: AbstractTokenizer) -> int:
+async def add_tiger_data(data_dir: str, config: Configuration, threads: int,
+ tokenizer: AbstractTokenizer) -> int:
""" Import tiger data from directory or tar file `data dir`.
"""
dsn = config.get_libpq_dsn()
+ with connect(dsn) as conn:
+ if freeze.is_frozen(conn):
+ raise UsageError("Tiger cannot be imported when database frozen (Github issue #3048)")
+
with TigerInput(data_dir) as tar:
if not tar:
return 1
# sql_query in <threads - 1> chunks.
place_threads = max(1, threads - 1)
- with WorkerPool(dsn, place_threads, ignore_sql_errors=True) as pool:
+ async with QueryPool(dsn, place_threads, autocommit=True) as pool:
with tokenizer.name_analyzer() as analyzer:
- while tar:
- with tar.next_file() as fd:
- handle_threaded_sql_statements(pool, fd, analyzer)
-
- print('\n')
+ lines = 0
+ for row in tar:
+ try:
+ address = dict(street=row['street'], postcode=row['postcode'])
+ args = ('SRID=4326;' + row['geometry'],
+ int(row['from']), int(row['to']), row['interpolation'],
+ Json(analyzer.process_place(PlaceInfo({'address': address}))),
+ analyzer.normalize_postcode(row['postcode']))
+ except ValueError:
+ continue
+
+ await pool.put_query(
+ """SELECT tiger_line_import(%s::GEOMETRY, %s::INT,
+ %s::INT, %s::TEXT, %s::JSONB, %s::TEXT)""",
+ args)
+
+ lines += 1
+ if lines == 1000:
+ print('.', end='', flush=True)
+ lines = 0
+
+ print('', flush=True)
LOG.warning("Creating indexes on Tiger data")
with connect(dsn) as conn:
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Type definitions for typing annotations.
"""
from typing import Any, Union, Mapping, TypeVar, Sequence, TYPE_CHECKING
-# Generics variable names do not confirm to naming styles, ignore globally here.
-# pylint: disable=invalid-name,abstract-method,multiple-statements
-# pylint: disable=missing-class-docstring,useless-import-alias
if TYPE_CHECKING:
- import psycopg2.sql
- import psycopg2.extensions
- import psycopg2.extras
import os
StrPath = Union[str, 'os.PathLike[str]']
SysEnv = Mapping[str, str]
-# psycopg2-related types
-
-Query = Union[str, bytes, 'psycopg2.sql.Composable']
+# psycopg-related types
T_ResultKey = TypeVar('T_ResultKey', int, str)
+
class DictCursorResult(Mapping[str, Any]):
def __getitem__(self, x: Union[int, str]) -> Any: ...
-DictCursorResults = Sequence[DictCursorResult]
-T_cursor = TypeVar('T_cursor', bound='psycopg2.extensions.cursor')
+DictCursorResults = Sequence[DictCursorResult]
# The following typing features require typing_extensions to work
# on all supported Python versions.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Functions for computation of centroids.
from typing import Tuple, Any
from collections.abc import Collection
+
class PointsCentroid:
""" Centroid computation from single points using an online algorithm.
More points may be added at any time.
return (float(self.sum_x/self.count)/10000000,
float(self.sum_y/self.count)/10000000)
-
def __len__(self) -> int:
return self.count
-
def __iadd__(self, other: Any) -> 'PointsCentroid':
if isinstance(other, Collection) and len(other) == 2:
if all(isinstance(p, (float, int)) for p in other):
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper functions for accessing URL.
+"""
+from typing import IO # noqa
+import logging
+import urllib.request as urlrequest
+
+from ..version import NOMINATIM_VERSION
+
+LOG = logging.getLogger()
+
+
+def get_url(url: str) -> str:
+ """ Get the contents from the given URL and return it as a UTF-8 string.
+
+ This version makes sure that an appropriate user agent is sent.
+ """
+ headers = {"User-Agent": f"Nominatim/{NOMINATIM_VERSION!s}"}
+
+ try:
+ request = urlrequest.Request(url, headers=headers)
+ with urlrequest.urlopen(request) as response: # type: IO[bytes]
+ return response.read().decode('utf-8')
+ except Exception:
+ LOG.fatal('Failed to load URL: %s', url)
+ raise
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Version information for Nominatim.
+"""
+from typing import NamedTuple, Optional
+
+
+class NominatimVersion(NamedTuple):
+ """ Version information for Nominatim. We follow semantic versioning.
+
+ Major, minor and patch_level refer to the last released version.
+ The database patch level tracks important changes between releases
+ and must always be increased when there is a change to the database or code
+ that requires a migration.
+
+ When adding a migration on the development branch, raise the patch level
+ to 99 to make sure that the migration is applied when updating from a
+ patch release to the next minor version. Patch releases usually shouldn't
+ have migrations in them. When they are needed, then make sure that the
+ migration can be reapplied and set the migration version to the appropriate
+ patch level when cherry-picking the commit with the migration.
+ """
+
+ major: int
+ minor: int
+ patch_level: int
+ db_patch_level: int
+
+ def __str__(self) -> str:
+ if self.db_patch_level is None:
+ return f"{self.major}.{self.minor}.{self.patch_level}"
+
+ return f"{self.major}.{self.minor}.{self.patch_level}-{self.db_patch_level}"
+
+ def release_version(self) -> str:
+ """ Return the release version in semantic versioning format.
+
+ The release version does not include the database patch version.
+ """
+ return f"{self.major}.{self.minor}.{self.patch_level}"
+
+
+def parse_version(version: str) -> NominatimVersion:
+ """ Parse a version string into a version consisting of a tuple of
+ four ints: major, minor, patch level, database patch level
+
+ This is the reverse operation of `version_str()`.
+ """
+ parts = version.split('.')
+ return NominatimVersion(*[int(x) for x in parts[:2] + parts[2].split('-')])
+
+
+NOMINATIM_VERSION = parse_version('5.0.0-0')
+
+POSTGRESQL_REQUIRED_VERSION = (12, 0)
+POSTGIS_REQUIRED_VERSION = (3, 0)
+OSM2PGSQL_REQUIRED_VERSION = (1, 8)
+
+# Cmake sets a variable @GIT_HASH@ by executing 'git --log'. It is not run
+# on every execution of 'make'.
+# cmake/tool-installed.tmpl is used to build the binary 'nominatim'. Inside
+# there is a call to set the variable value below.
+GIT_COMMIT_HASH: Optional[str] = None
-all: bdd php
-no-test-db: bdd-no-test-db php
+all: bdd python
bdd:
cd bdd && behave -DREMOVE_TEMPLATE=1
-icu:
- cd bdd && behave -DREMOVE_TEMPLATE=1 -DTOKENIZER=icu
-
-php:
- cd php && phpunit ./
-
python:
pytest python
-.PHONY: bdd php no-test-db
+.PHONY: bdd python
+@SQLITE
@APIDB
Feature: Localization of search results
Feature: Object details
Testing different parameter options for details API.
+ @SQLITE
Scenario: JSON Details
When sending json details query for W297699560
Then the result is valid json
And result has attributes geometry
And result has not attributes keywords,address,linked_places,parentof
+ And results contain in field geometry
+ | type |
+ | Point |
+ @SQLITE
Scenario: JSON Details with pretty printing
When sending json details query for W297699560
| pretty |
And result has attributes geometry
And result has not attributes keywords,address,linked_places,parentof
+ @SQLITE
Scenario: JSON Details with addressdetails
When sending json details query for W297699560
| addressdetails |
Then the result is valid json
And result has attributes address
+ @SQLITE
Scenario: JSON Details with linkedplaces
When sending json details query for R123924
| linkedplaces |
Then the result is valid json
And result has attributes linked_places
+ @SQLITE
Scenario: JSON Details with hierarchy
When sending json details query for W297699560
| hierarchy |
Then the result is valid json
And result has attributes hierarchy
+ @SQLITE
Scenario: JSON Details with grouped hierarchy
When sending json details query for W297699560
| hierarchy | group_hierarchy |
| keywords |
| 1 |
Then the result is valid json
+ And result has attributes keywords
+ @SQLITE
Scenario Outline: JSON details with full geometry
When sending json details query for <osmid>
| polygon_geojson |
| 1 |
Then the result is valid json
And result has attributes geometry
+ And results contain in field geometry
+ | type |
+ | <geometry> |
Examples:
- | osmid |
- | W297699560 |
- | W243055645 |
- | W243055716 |
- | W43327921 |
+ | osmid | geometry |
+ | W297699560 | LineString |
+ | W243055645 | Polygon |
+ | W243055716 | Polygon |
+ | W43327921 | LineString |
+@SQLITE
@APIDB
Feature: Object details
Check details page for correctness
- Scenario: Details by place ID
- When sending details query for 107077
- Then the result is valid json
- And results contain
- | place_id |
- | 107077 |
-
Scenario Outline: Details via OSM id
When sending details query for <type><id>
Then the result is valid json
| W | 43327921 |
| R | 123924 |
- Scenario: Details for interpolation way just return the dependent street
- When sending details query for W1
- Then the result is valid json
- And results contain
- | category |
- | highway |
Scenario Outline: Details for different class types for the same OSM id
When sending details query for N300209696:<class>
Examples:
| class |
| tourism |
- | natural |
| mountain_pass |
+
Scenario Outline: Details via unknown OSM id
When sending details query for <object>
Then a HTTP 404 is returned
| N300209696:highway |
+ Scenario: Details for interpolation way return the interpolation
+ When sending details query for W1
+ Then the result is valid json
+ And results contain
+ | category | type | osm_type | osm_id | admin_level |
+ | place | houses | W | 1 | 15 |
+
+
+ @Fail
+ Scenario: Details for interpolation way return the interpolation
+ When sending details query for 112871
+ Then the result is valid json
+ And results contain
+ | category | type | admin_level |
+ | place | houses | 15 |
+ And result has not attributes osm_type,osm_id
+
+
+ @Fail
+ Scenario: Details for interpolation way return the interpolation
+ When sending details query for 112820
+ Then the result is valid json
+ And results contain
+ | category | type | admin_level |
+ | place | postcode | 15 |
+ And result has not attributes osm_type,osm_id
+
+
+ Scenario Outline: Details debug output returns no errors
+ When sending debug details query for <feature>
+ Then the result is valid html
+
+ Examples:
+ | feature |
+ | N5484325405 |
+ | W1 |
+ | 112820 |
+ | 112871 |
+@SQLITE
@APIDB
Feature: Places by osm_type and osm_id Tests
Simple tests for errors in various response formats.
+@SQLITE
@APIDB
Feature: Places by osm_type and osm_id Tests
Simple tests for response format.
| jsonv2 |
| geojson |
| xml |
+
+
+ Scenario: Lookup of a linked place
+ When sending geocodejson lookup query for N1932181216
+ Then exactly 1 result is returned
+ And results contain
+ | name |
+ | Vaduz |
+++ /dev/null
-@APIDB
-Feature: Parameters for Reverse API
- Testing correctness of geocodejson output.
-
- Scenario: City housenumber-level address with street
- When sending geocodejson reverse coordinates 47.1068011,9.52810091
- Then results contain
- | housenumber | street | postcode | city | country |
- | 8 | Im Winkel | 9495 | Triesen | Liechtenstein |
-
- Scenario: Town street-level address with street
- When sending geocodejson reverse coordinates 47.066,9.504
- | zoom |
- | 16 |
- Then results contain
- | name | city | postcode | country |
- | Gnetsch | Balzers | 9496 | Liechtenstein |
-
- Scenario: Poi street-level address with footway
- When sending geocodejson reverse coordinates 47.0653,9.5007
- Then results contain
- | street | city | postcode | country |
- | Burgweg | Balzers | 9496 | Liechtenstein |
-
- Scenario: City address with suburb
- When sending geocodejson reverse coordinates 47.146861,9.511771
- Then results contain
- | housenumber | street | district | city | postcode | country |
- | 5 | Lochgass | Ebenholz | Vaduz | 9490 | Liechtenstein |
--- /dev/null
+@SQLITE
+@APIDB
+Feature: Geometries for reverse geocoding
+ Tests for returning geometries with reverse
+
+
+ Scenario: Polygons are returned fully by default
+ When sending v1/reverse at 47.13803,9.52264
+ | polygon_text |
+ | 1 |
+ Then results contain
+ | geotext |
+ | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226143 47.1379257, ?9.522615 47.137917, ?9.5226225 47.1379098, ?9.5226334 47.1379052, ?9.5226461 47.1379037, ?9.5226588 47.1379056, ?9.5226693 47.1379107, ?9.5226762 47.1379181, ?9.5226762 47.1379268, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
+
+
+ Scenario: Polygons can be slightly simplified
+ When sending v1/reverse at 47.13803,9.52264
+ | polygon_text | polygon_threshold |
+ | 1 | 0.00001 |
+ Then results contain
+ | geotext |
+ | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226225 47.1379098, ?9.5226588 47.1379056, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
+
+
+ Scenario: Polygons can be much simplified
+ When sending v1/reverse at 47.13803,9.52264
+ | polygon_text | polygon_threshold |
+ | 1 | 0.9 |
+ Then results contain
+ | geotext |
+ | ^POLYGON\(\([0-9. ]+, ?[0-9. ]+, ?[0-9. ]+, ?[0-9. ]+(, ?[0-9. ]+)?\)\) |
+
+
+ Scenario: For polygons return the centroid as center point
+ When sending v1/reverse at 47.13836,9.52304
+ Then results contain
+ | centroid |
+ | 9.52271080 47.13818045 |
+
+
+ Scenario: For streets return the closest point as center point
+ When sending v1/reverse at 47.13368,9.52942
+ Then results contain
+ | centroid |
+ | 9.529431527 47.13368172 |
+@SQLITE
@APIDB
Feature: Localization of reverse search results
Scenario: default language
- When sending json reverse coordinates 47.14,9.55
+ When sending v1/reverse at 47.14,9.55
Then result addresses contain
| ID | country |
| 0 | Liechtenstein |
Scenario: accept-language parameter
- When sending json reverse coordinates 47.14,9.55
+ When sending v1/reverse at 47.14,9.55
| accept-language |
| ja,en |
Then result addresses contain
Given the HTTP header
| accept-language |
| fo-ca,fo;q=0.8,en-ca;q=0.5,en;q=0.3 |
- When sending json reverse coordinates 47.14,9.55
+ When sending v1/reverse at 47.14,9.55
Then result addresses contain
| ID | country |
| 0 | Liktinstein |
Given the HTTP header
| accept-language |
| fo-ca,fo;q=0.8,en-ca;q=0.5,en;q=0.3 |
- When sending json reverse coordinates 47.14,9.55
+ When sending v1/reverse at 47.14,9.55
| accept-language |
| en |
Then result addresses contain
--- /dev/null
+@SQLITE
+@APIDB
+Feature: Layer parameter in reverse geocoding
+ Testing correct function of layer selection while reverse geocoding
+
+ Scenario: POIs are selected by default
+ When sending v1/reverse at 47.14077,9.52414
+ Then results contain
+ | category | type |
+ | tourism | viewpoint |
+
+
+ Scenario Outline: Same address level POI with different layers
+ When sending v1/reverse at 47.14077,9.52414
+ | layer |
+ | <layer> |
+ Then results contain
+ | category |
+ | <category> |
+
+
+ Examples:
+ | layer | category |
+ | address | highway |
+ | poi,address | tourism |
+ | address,poi | tourism |
+ | natural | waterway |
+ | address,natural | highway |
+ | natural,poi | tourism |
+
+
+ Scenario Outline: POIs are not selected without housenumber for address layer
+ When sending v1/reverse at 47.13816,9.52168
+ | layer |
+ | <layer> |
+ Then results contain
+ | category | type |
+ | <category> | <type> |
+
+ Examples:
+ | layer | category | type |
+ | address,poi | highway | bus_stop |
+ | address | amenity | parking |
+
+
+ Scenario: Between natural and low-zoom address prefer natural
+ When sending v1/reverse at 47.13636,9.52094
+ | layer | zoom |
+ | natural,address | 15 |
+ Then results contain
+ | category |
+ | waterway |
+
+
+ Scenario Outline: Search for mountain peaks begins at level 12
+ When sending v1/reverse at 47.08293,9.57109
+ | layer | zoom |
+ | natural | <zoom> |
+ Then results contain
+ | category | type |
+ | <category> | <type> |
+
+ Examples:
+ | zoom | category | type |
+ | 12 | natural | peak |
+ | 13 | waterway | river |
+
+
+ Scenario Outline: Reverse search with manmade layers
+ When sending v1/reverse at 32.46904,-86.44439
+ | layer |
+ | <layer> |
+ Then results contain
+ | category | type |
+ | <category> | <type> |
+
+ Examples:
+ | layer | category | type |
+ | manmade | leisure | park |
+ | address | highway | residential |
+ | poi | leisure | pitch |
+ | natural | waterway | river |
+ | natural,manmade | leisure | park |
+++ /dev/null
-@APIDB
-Feature: Parameters for Reverse API
- Testing different parameter options for reverse API.
-
- Scenario Outline: Reverse-geocoding without address
- When sending <format> reverse coordinates 47.13,9.56
- | addressdetails |
- | 0 |
- Then exactly 1 result is returned
- And result has not attributes address
-
- Examples:
- | format |
- | json |
- | jsonv2 |
- | geojson |
- | xml |
-
- Scenario Outline: Coordinates must be floating-point numbers
- When sending reverse coordinates <coords>
- Then a HTTP 400 is returned
-
- Examples:
- | coords |
- | -45.3,; |
- | gkjd,50 |
-
- Scenario Outline: Zoom levels between 4 and 18 are allowed
- When sending reverse coordinates 47.14122383,9.52169581334
- | zoom |
- | <zoom> |
- Then exactly 1 result is returned
- And result addresses contain
- | country_code |
- | li |
-
- Examples:
- | zoom |
- | 4 |
- | 5 |
- | 6 |
- | 7 |
- | 8 |
- | 9 |
- | 10 |
- | 11 |
- | 12 |
- | 13 |
- | 14 |
- | 15 |
- | 16 |
- | 17 |
- | 18 |
-
- Scenario: Non-numerical zoom levels return an error
- When sending reverse coordinates 47.14122383,9.52169581334
- | zoom |
- | adfe |
- Then a HTTP 400 is returned
-
- Scenario Outline: Reverse Geocoding with extratags
- When sending <format> reverse coordinates 47.1395013150811,9.522098077031046
- | extratags |
- | 1 |
- Then result 0 has attributes extratags
-
- Examples:
- | format |
- | xml |
- | json |
- | jsonv2 |
- | geojson |
-
- Scenario Outline: Reverse Geocoding with namedetails
- When sending <format> reverse coordinates 47.1395013150811,9.522098077031046
- | namedetails |
- | 1 |
- Then result 0 has attributes namedetails
-
- Examples:
- | format |
- | xml |
- | json |
- | jsonv2 |
- | geojson |
-
- Scenario Outline: Reverse Geocoding contains TEXT geometry
- When sending <format> reverse coordinates 47.165989816710066,9.515774846076965
- | polygon_text |
- | 1 |
- Then result 0 has attributes <response_attribute>
-
- Examples:
- | format | response_attribute |
- | xml | geotext |
- | json | geotext |
- | jsonv2 | geotext |
-
- Scenario Outline: Reverse Geocoding contains SVG geometry
- When sending <format> reverse coordinates 47.165989816710066,9.515774846076965
- | polygon_svg |
- | 1 |
- Then result 0 has attributes <response_attribute>
-
- Examples:
- | format | response_attribute |
- | xml | geosvg |
- | json | svg |
- | jsonv2 | svg |
-
- Scenario Outline: Reverse Geocoding contains KML geometry
- When sending <format> reverse coordinates 47.165989816710066,9.515774846076965
- | polygon_kml |
- | 1 |
- Then result 0 has attributes <response_attribute>
-
- Examples:
- | format | response_attribute |
- | xml | geokml |
- | json | geokml |
- | jsonv2 | geokml |
-
- Scenario Outline: Reverse Geocoding contains GEOJSON geometry
- When sending <format> reverse coordinates 47.165989816710066,9.515774846076965
- | polygon_geojson |
- | 1 |
- Then result 0 has attributes <response_attribute>
-
- Examples:
- | format | response_attribute |
- | xml | geojson |
- | json | geojson |
- | jsonv2 | geojson |
- | geojson | geojson |
-
- Scenario Outline: Reverse Geocoding in geojson format contains no non-geojson geometry
- When sending geojson reverse coordinates 47.165989816710066,9.515774846076965
- | polygon_text | polygon_svg | polygon_geokml |
- | 1 | 1 | 1 |
- Then result 0 has not attributes <response_attribute>
-
- Examples:
- | response_attribute |
- | geotext |
- | polygonpoints |
- | svg |
- | geokml |
+@SQLITE
@APIDB
Feature: Reverse geocoding
Testing the reverse function
+ Scenario Outline: Simple reverse-geocoding with no results
+ When sending v1/reverse at <lat>,<lon>
+ Then exactly 0 results are returned
+
+ Examples:
+ | lat | lon |
+ | 0.0 | 0.0 |
+ | 91.3 | 0.4 |
+ | -700 | 0.4 |
+ | 0.2 | 324.44 |
+ | 0.2 | -180.4 |
+
+
+ Scenario: Unknown countries fall back to default country grid
+ When sending v1/reverse at 45.174,-103.072
+ Then results contain
+ | category | type | display_name |
+ | place | country | United States |
+
+
@Tiger
Scenario: TIGER house number
- When sending jsonv2 reverse coordinates 32.4752389363,-86.4810198619
+ When sending v1/reverse at 32.4752389363,-86.4810198619
Then results contain
- | osm_type | category | type |
- | way | place | house |
+ | category | type |
+ | place | house |
And result addresses contain
| house_number | road | postcode | country_code |
| 707 | Upper Kingston Road | 36067 | us |
@Tiger
Scenario: No TIGER house number for zoom < 18
- When sending jsonv2 reverse coordinates 32.4752389363,-86.4810198619
+ When sending v1/reverse at 32.4752389363,-86.4810198619
| zoom |
| 17 |
Then results contain
| way | highway |
And result addresses contain
| road | postcode | country_code |
- | Upper Kingston Road | 30607 | us |
+ | Upper Kingston Road | 36067 | us |
Scenario: Interpolated house number
- When sending jsonv2 reverse coordinates 47.118533,9.57056562
+ When sending v1/reverse at 47.118533,9.57056562
Then results contain
| osm_type | category | type |
| way | place | house |
| 1019 | Grosssteg |
Scenario: Address with non-numerical house number
- When sending jsonv2 reverse coordinates 47.107465,9.52838521614
+ When sending v1/reverse at 47.107465,9.52838521614
Then result addresses contain
| house_number | road |
| 39A/B | Dorfstrasse |
Scenario: Address with numerical house number
- When sending jsonv2 reverse coordinates 47.168440329479594,9.511551699184338
+ When sending v1/reverse at 47.168440329479594,9.511551699184338
Then result addresses contain
| house_number | road |
| 6 | Schmedgässle |
Scenario Outline: Zoom levels below 5 result in country
- When sending jsonv2 reverse coordinates 47.16,9.51
+ When sending v1/reverse at 47.16,9.51
| zoom |
| <zoom> |
Then results contain
| 4 |
Scenario: When on a street, the closest interpolation is shown
- When sending jsonv2 reverse coordinates 47.118457166193245,9.570678289621355
+ When sending v1/reverse at 47.118457166193245,9.570678289621355
| zoom |
| 18 |
Then results contain
# github 2214
Scenario: Interpolations do not override house numbers when they are closer
- When sending jsonv2 reverse coordinates 47.11778,9.57255
+ When sending v1/reverse at 47.11778,9.57255
| zoom |
| 18 |
Then results contain
| 5, Grosssteg, Steg, Triesenberg, Oberland, 9497, Liechtenstein |
Scenario: Interpolations do not override house numbers when they are closer (2)
- When sending jsonv2 reverse coordinates 47.11834,9.57167
+ When sending v1/reverse at 47.11834,9.57167
| zoom |
| 18 |
Then results contain
| 3, Grosssteg, Sücka, Triesenberg, Oberland, 9497, Liechtenstein |
Scenario: When on a street with zoom 18, the closest housenumber is returned
- When sending jsonv2 reverse coordinates 47.11755503977281,9.572722250405036
+ When sending v1/reverse at 47.11755503977281,9.572722250405036
| zoom |
| 18 |
Then result addresses contain
+++ /dev/null
-@APIDB
-Feature: Simple Reverse Tests
- Simple tests for internal server errors and response format.
-
- Scenario Outline: Simple reverse-geocoding
- When sending reverse coordinates <lat>,<lon>
- Then the result is valid xml
- When sending xml reverse coordinates <lat>,<lon>
- Then the result is valid xml
- When sending json reverse coordinates <lat>,<lon>
- Then the result is valid json
- When sending jsonv2 reverse coordinates <lat>,<lon>
- Then the result is valid json
- When sending geojson reverse coordinates <lat>,<lon>
- Then the result is valid geojson
-
- Examples:
- | lat | lon |
- | 0.0 | 0.0 |
- | -34.830 | -56.105 |
- | 45.174 | -103.072 |
- | 21.156 | -12.2744 |
-
- Scenario Outline: Testing different parameters
- When sending reverse coordinates 53.603,10.041
- | param | value |
- | <parameter> | <value> |
- Then the result is valid xml
- When sending xml reverse coordinates 53.603,10.041
- | param | value |
- | <parameter> | <value> |
- Then the result is valid xml
- When sending json reverse coordinates 53.603,10.041
- | param | value |
- | <parameter> | <value> |
- Then the result is valid json
- When sending jsonv2 reverse coordinates 53.603,10.041
- | param | value |
- | <parameter> | <value> |
- Then the result is valid json
- When sending geojson reverse coordinates 53.603,10.041
- | param | value |
- | <parameter> | <value> |
- Then the result is valid geojson
- When sending geocodejson reverse coordinates 53.603,10.041
- | param | value |
- | <parameter> | <value> |
- Then the result is valid geocodejson
-
- Examples:
- | parameter | value |
- | polygon_text | 1 |
- | polygon_text | 0 |
- | polygon_kml | 1 |
- | polygon_kml | 0 |
- | polygon_geojson | 1 |
- | polygon_geojson | 0 |
- | polygon_svg | 1 |
- | polygon_svg | 0 |
-
- Scenario Outline: Wrapping of legal jsonp requests
- When sending <format> reverse coordinates 67.3245,0.456
- | json_callback |
- | foo |
- Then the result is valid <outformat>
-
- Examples:
- | format | outformat |
- | json | json |
- | jsonv2 | json |
- | geojson | geojson |
-
- Scenario Outline: Boundingbox is returned
- When sending <format> reverse coordinates 47.11,9.57
- | zoom |
- | 8 |
- Then result has bounding box in 47,48,9,10
-
- Examples:
- | format |
- | json |
- | jsonv2 |
- | geojson |
- | xml |
-
- Scenario Outline: Reverse-geocoding with zoom
- When sending <format> reverse coordinates 47.11,9.57
- | zoom |
- | 10 |
- Then exactly 1 result is returned
-
- Examples:
- | format |
- | json |
- | jsonv2 |
- | geojson |
- | xml |
-
- Scenario: Missing lon parameter
- When sending reverse coordinates 52.52,
- Then a HTTP 400 is returned
-
- Scenario: Missing lat parameter
- When sending reverse coordinates ,52.52
- Then a HTTP 400 is returned
-
- Scenario: Missing osm_id parameter
- When sending reverse coordinates ,
- | osm_type |
- | N |
- Then a HTTP 400 is returned
-
- Scenario: Missing osm_type parameter
- When sending reverse coordinates ,
- | osm_id |
- | 3498564 |
- Then a HTTP 400 is returned
-
- Scenario Outline: Bad format for lat or lon
- When sending reverse coordinates ,
- | lat | lon |
- | <lat> | <lon> |
- Then a HTTP 400 is returned
-
- Examples:
- | lat | lon |
- | 48.9660 | 8,4482 |
- | 48,9660 | 8.4482 |
- | 48,9660 | 8,4482 |
- | 48.966.0 | 8.4482 |
- | 48.966 | 8.448.2 |
- | Nan | 8.448 |
- | 48.966 | Nan |
-
- Scenario: Reverse Debug output returns no errors
- When sending debug reverse coordinates 47.11,9.57
- Then a HTTP 200 is returned
--- /dev/null
+@SQLITE
+@APIDB
+Feature: Geocodejson for Reverse API
+ Testing correctness of geocodejson output (API version v1).
+
+ Scenario Outline: Simple OSM result
+ When sending v1/reverse at 47.066,9.504 with format geocodejson
+ | addressdetails |
+ | <has_address> |
+ Then result has attributes place_id, accuracy
+ And result has <attributes> country,postcode,county,city,district,street,housenumber, admin
+ Then results contain
+ | osm_type | osm_id | osm_key | osm_value | type |
+ | node | 6522627624 | shop | bakery | house |
+ And results contain
+ | name | label |
+ | Dorfbäckerei Herrmann | Dorfbäckerei Herrmann, 29, Gnetsch, Mäls, Balzers, Oberland, 9496, Liechtenstein |
+ And results contain in field geojson
+ | type | coordinates |
+ | Point | [9.5036065, 47.0660892] |
+ And results contain in field __geocoding
+ | version | licence | attribution |
+ | 0.1.0 | ODbL | ^Data © OpenStreetMap contributors, ODbL 1.0. https?://osm.org/copyright$ |
+
+ Examples:
+ | has_address | attributes |
+ | 1 | attributes |
+ | 0 | not attributes |
+
+
+ Scenario: City housenumber-level address with street
+ When sending v1/reverse at 47.1068011,9.52810091 with format geocodejson
+ Then results contain
+ | housenumber | street | postcode | city | country |
+ | 8 | Im Winkel | 9495 | Triesen | Liechtenstein |
+ And results contain in field admin
+ | level6 | level8 |
+ | Oberland | Triesen |
+
+
+ Scenario: Town street-level address with street
+ When sending v1/reverse at 47.066,9.504 with format geocodejson
+ | zoom |
+ | 16 |
+ Then results contain
+ | name | city | postcode | country |
+ | Gnetsch | Balzers | 9496 | Liechtenstein |
+
+
+ Scenario: Poi street-level address with footway
+ When sending v1/reverse at 47.06515,9.50083 with format geocodejson
+ Then results contain
+ | street | city | postcode | country |
+ | Burgweg | Balzers | 9496 | Liechtenstein |
+
+
+ Scenario: City address with suburb
+ When sending v1/reverse at 47.146861,9.511771 with format geocodejson
+ Then results contain
+ | housenumber | street | district | city | postcode | country |
+ | 5 | Lochgass | Ebenholz | Vaduz | 9490 | Liechtenstein |
+
+
+ @Tiger
+ Scenario: Tiger address
+ When sending v1/reverse at 32.4752389363,-86.4810198619 with format geocodejson
+ Then results contain
+ | osm_type | osm_id | osm_key | osm_value | type |
+ | way | 396009653 | place | house | house |
+ And results contain
+ | housenumber | street | city | county | postcode | country |
+ | 707 | Upper Kingston Road | Prattville | Autauga County | 36067 | United States |
+
+
+ Scenario: Interpolation address
+ When sending v1/reverse at 47.118533,9.57056562 with format geocodejson
+ Then results contain
+ | osm_type | osm_id | osm_key | osm_value | type |
+ | way | 1 | place | house | house |
+ And results contain
+ | label |
+ | 1019, Grosssteg, Sücka, Triesenberg, Oberland, 9497, Liechtenstein |
+ And result has not attributes name
+
+
+ Scenario: Line geometry output is supported
+ When sending v1/reverse at 47.06597,9.50467 with format geocodejson
+ | param | value |
+ | polygon_geojson | 1 |
+ Then results contain in field geojson
+ | type |
+ | LineString |
+
+
+ Scenario Outline: Only geojson polygons are supported
+ When sending v1/reverse at 47.06597,9.50467 with format geocodejson
+ | param | value |
+ | <param> | 1 |
+ Then results contain in field geojson
+ | type |
+ | Point |
+
+ Examples:
+ | param |
+ | polygon_text |
+ | polygon_svg |
+ | polygon_kml |
--- /dev/null
+@SQLITE
+@APIDB
+Feature: Geojson for Reverse API
+ Testing correctness of geojson output (API version v1).
+
+ Scenario Outline: Simple OSM result
+ When sending v1/reverse at 47.066,9.504 with format geojson
+ | addressdetails |
+ | <has_address> |
+ Then result has attributes place_id, importance, __licence
+ And result has <attributes> address
+ And results contain
+ | osm_type | osm_id | place_rank | category | type | addresstype |
+ | node | 6522627624 | 30 | shop | bakery | shop |
+ And results contain
+ | name | display_name |
+ | Dorfbäckerei Herrmann | Dorfbäckerei Herrmann, 29, Gnetsch, Mäls, Balzers, Oberland, 9496, Liechtenstein |
+ And results contain
+ | boundingbox |
+ | [47.0660392, 47.0661392, 9.5035565, 9.5036565] |
+ And results contain in field geojson
+ | type | coordinates |
+ | Point | [9.5036065, 47.0660892] |
+
+ Examples:
+ | has_address | attributes |
+ | 1 | attributes |
+ | 0 | not attributes |
+
+
+ @Tiger
+ Scenario: Tiger address
+ When sending v1/reverse at 32.4752389363,-86.4810198619 with format geojson
+ Then results contain
+ | osm_type | osm_id | category | type | addresstype | place_rank |
+ | way | 396009653 | place | house | place | 30 |
+
+
+ Scenario: Interpolation address
+ When sending v1/reverse at 47.118533,9.57056562 with format geojson
+ Then results contain
+ | osm_type | osm_id | place_rank | category | type | addresstype |
+ | way | 1 | 30 | place | house | place |
+ And results contain
+ | boundingbox |
+ | ^\[47.118495\d*, 47.118595\d*, 9.570496\d*, 9.570596\d*\] |
+ And results contain
+ | display_name |
+ | 1019, Grosssteg, Sücka, Triesenberg, Oberland, 9497, Liechtenstein |
+
+
+ Scenario: Line geometry output is supported
+ When sending v1/reverse at 47.06597,9.50467 with format geojson
+ | param | value |
+ | polygon_geojson | 1 |
+ Then results contain in field geojson
+ | type |
+ | LineString |
+
+
+ Scenario Outline: Only geojson polygons are supported
+ When sending v1/reverse at 47.06597,9.50467 with format geojson
+ | param | value |
+ | <param> | 1 |
+ Then results contain in field geojson
+ | type |
+ | Point |
+
+ Examples:
+ | param |
+ | polygon_text |
+ | polygon_svg |
+ | polygon_kml |
--- /dev/null
+@SQLITE
+@APIDB
+Feature: Json output for Reverse API
+ Testing correctness of json and jsonv2 output (API version v1).
+
+ Scenario Outline: OSM result with and without addresses
+ When sending v1/reverse at 47.066,9.504 with format json
+ | addressdetails |
+ | <has_address> |
+ Then result has <attributes> address
+ When sending v1/reverse at 47.066,9.504 with format jsonv2
+ | addressdetails |
+ | <has_address> |
+ Then result has <attributes> address
+
+ Examples:
+ | has_address | attributes |
+ | 1 | attributes |
+ | 0 | not attributes |
+
+ Scenario Outline: Simple OSM result
+ When sending v1/reverse at 47.066,9.504 with format <format>
+ Then result has attributes place_id
+ And results contain
+ | licence |
+ | ^Data © OpenStreetMap contributors, ODbL 1.0. https?://osm.org/copyright$ |
+ And results contain
+ | osm_type | osm_id |
+ | node | 6522627624 |
+ And results contain
+ | centroid | boundingbox |
+ | 9.5036065 47.0660892 | ['47.0660392', '47.0661392', '9.5035565', '9.5036565'] |
+ And results contain
+ | display_name |
+ | Dorfbäckerei Herrmann, 29, Gnetsch, Mäls, Balzers, Oberland, 9496, Liechtenstein |
+ And result has not attributes namedetails,extratags
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+
+ Scenario: Extra attributes of jsonv2 result
+ When sending v1/reverse at 47.066,9.504 with format jsonv2
+ Then result has attributes importance
+ Then results contain
+ | category | type | name | place_rank | addresstype |
+ | shop | bakery | Dorfbäckerei Herrmann | 30 | shop |
+
+
+ @Tiger
+ Scenario: Tiger address
+ When sending v1/reverse at 32.4752389363,-86.4810198619 with format jsonv2
+ Then results contain
+ | osm_type | osm_id | category | type | addresstype |
+ | way | 396009653 | place | house | place |
+
+
+ Scenario Outline: Interpolation address
+ When sending v1/reverse at 47.118533,9.57056562 with format <format>
+ Then results contain
+ | osm_type | osm_id |
+ | way | 1 |
+ And results contain
+ | centroid | boundingbox |
+ | 9.57054676 47.118545392 | ^\['47.118495\d*', '47.118595\d*', '9.570496\d*', '9.570596\d*'\] |
+ And results contain
+ | display_name |
+ | 1019, Grosssteg, Sücka, Triesenberg, Oberland, 9497, Liechtenstein |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+
+
+ Scenario Outline: Output of geojson
+ When sending v1/reverse at 47.06597,9.50467 with format <format>
+ | param | value |
+ | polygon_geojson | 1 |
+ Then results contain in field geojson
+ | type | coordinates |
+ | LineString | [[9.5039353, 47.0657546], [9.5040437, 47.0657781], [9.5040808, 47.065787], [9.5054298, 47.0661407]] |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+
+
+ Scenario Outline: Output of WKT
+ When sending v1/reverse at 47.06597,9.50467 with format <format>
+ | param | value |
+ | polygon_text | 1 |
+ Then results contain
+ | geotext |
+ | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+
+
+ Scenario Outline: Output of SVG
+ When sending v1/reverse at 47.06597,9.50467 with format <format>
+ | param | value |
+ | polygon_svg | 1 |
+ Then results contain
+ | svg |
+ | M 9.5039353 -47.0657546 L 9.5040437 -47.0657781 9.5040808 -47.065787 9.5054298 -47.0661407 |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+
+
+ Scenario Outline: Output of KML
+ When sending v1/reverse at 47.06597,9.50467 with format <format>
+ | param | value |
+ | polygon_kml | 1 |
+ Then results contain
+ | geokml |
+ | ^<LineString><coordinates>9.5039\d*,47.0657\d* 9.5040\d*,47.0657\d* 9.5040\d*,47.065\d* 9.5054\d*,47.0661\d*</coordinates></LineString> |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
--- /dev/null
+@SQLITE
+@APIDB
+Feature: v1/reverse Parameter Tests
+ Tests for parameter inputs for the v1 reverse endpoint.
+ This file contains mostly bad parameter input. Valid parameters
+ are tested in the format tests.
+
+ Scenario: Bad format
+ When sending v1/reverse at 47.14122383,9.52169581334 with format sdf
+ Then a HTTP 400 is returned
+
+ Scenario: Missing lon parameter
+ When sending v1/reverse at 52.52,
+ Then a HTTP 400 is returned
+
+
+ Scenario: Missing lat parameter
+ When sending v1/reverse at ,52.52
+ Then a HTTP 400 is returned
+
+
+ Scenario Outline: Bad format for lat or lon
+ When sending v1/reverse at ,
+ | lat | lon |
+ | <lat> | <lon> |
+ Then a HTTP 400 is returned
+
+ Examples:
+ | lat | lon |
+ | 48.9660 | 8,4482 |
+ | 48,9660 | 8.4482 |
+ | 48,9660 | 8,4482 |
+ | 48.966.0 | 8.4482 |
+ | 48.966 | 8.448.2 |
+ | Nan | 8.448 |
+ | 48.966 | Nan |
+ | Inf | 5.6 |
+ | 5.6 | -Inf |
+ | <script></script> | 3.4 |
+ | 3.4 | <script></script> |
+ | -45.3 | ; |
+ | gkjd | 50 |
+
+
+ Scenario: Non-numerical zoom levels return an error
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | zoom |
+ | adfe |
+ Then a HTTP 400 is returned
+
+
+ Scenario Outline: Truthy values for boolean parameters
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | addressdetails |
+ | <value> |
+ Then exactly 1 result is returned
+ And result has attributes address
+
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | extratags |
+ | <value> |
+ Then exactly 1 result is returned
+ And result has attributes extratags
+
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | namedetails |
+ | <value> |
+ Then exactly 1 result is returned
+ And result has attributes namedetails
+
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | polygon_geojson |
+ | <value> |
+ Then exactly 1 result is returned
+ And result has attributes geojson
+
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | polygon_kml |
+ | <value> |
+ Then exactly 1 result is returned
+ And result has attributes geokml
+
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | polygon_svg |
+ | <value> |
+ Then exactly 1 result is returned
+ And result has attributes svg
+
+ When sending v1/reverse at 47.14122383,9.52169581334
+ | polygon_text |
+ | <value> |
+ Then exactly 1 result is returned
+ And result has attributes geotext
+
+ Examples:
+ | value |
+ | yes |
+ | no |
+ | -1 |
+ | 100 |
+ | false |
+ | 00 |
+
+
+ Scenario: Only one geometry can be requested
+ When sending v1/reverse at 47.165989816710066,9.515774846076965
+ | polygon_text | polygon_svg |
+ | 1 | 1 |
+ Then a HTTP 400 is returned
+
+
+ Scenario Outline: Wrapping of legal jsonp requests
+ When sending v1/reverse at 67.3245,0.456 with format <format>
+ | json_callback |
+ | foo |
+ Then the result is valid <outformat>
+
+ Examples:
+ | format | outformat |
+ | json | json |
+ | jsonv2 | json |
+ | geojson | geojson |
+ | geocodejson | geocodejson |
+
+
+ Scenario Outline: Illegal jsonp are not allowed
+ When sending v1/reverse at 47.165989816710066,9.515774846076965
+ | param | value |
+ |json_callback | <data> |
+ Then a HTTP 400 is returned
+
+ Examples:
+ | data |
+ | 1asd |
+ | bar(foo) |
+ | XXX['bad'] |
+ | foo; evil |
+
+
+ Scenario Outline: Reverse debug mode produces valid HTML
+ When sending v1/reverse at , with format debug
+ | lat | lon |
+ | <lat> | <lon> |
+ Then the result is valid html
+
+ Examples:
+ | lat | lon |
+ | 0.0 | 0.0 |
+ | 47.06645 | 9.56601 |
+ | 47.14081 | 9.52267 |
+
+
+ Scenario Outline: Full address display for city housenumber-level address with street
+ When sending v1/reverse at 47.1068011,9.52810091 with format <format>
+ Then address of result 0 is
+ | type | value |
+ | house_number | 8 |
+ | road | Im Winkel |
+ | neighbourhood | Oberdorf |
+ | village | Triesen |
+ | ISO3166-2-lvl8 | LI-09 |
+ | county | Oberland |
+ | postcode | 9495 |
+ | country | Liechtenstein |
+ | country_code | li |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+ | geojson |
+ | xml |
+
+
+ Scenario Outline: Results with name details
+ When sending v1/reverse at 47.14052,9.52202 with format <format>
+ | zoom | namedetails |
+ | 14 | 1 |
+ Then results contain in field namedetails
+ | name |
+ | Ebenholz |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+ | xml |
+ | geojson |
+
+
+ Scenario Outline: Results with extratags
+ When sending v1/reverse at 47.14052,9.52202 with format <format>
+ | zoom | extratags |
+ | 14 | 1 |
+ Then results contain in field extratags
+ | wikidata |
+ | Q4529531 |
+
+ Examples:
+ | format |
+ | json |
+ | jsonv2 |
+ | xml |
+ | geojson |
+
+
--- /dev/null
+@SQLITE
+@APIDB
+Feature: XML output for Reverse API
+ Testing correctness of xml output (API version v1).
+
+ Scenario Outline: OSM result with and without addresses
+ When sending v1/reverse at 47.066,9.504 with format xml
+ | addressdetails |
+ | <has_address> |
+ Then result has attributes place_id
+ Then result has <attributes> address
+ And results contain
+ | osm_type | osm_id | place_rank | address_rank |
+ | node | 6522627624 | 30 | 30 |
+ And results contain
+ | centroid | boundingbox |
+ | 9.5036065 47.0660892 | 47.0660392,47.0661392,9.5035565,9.5036565 |
+ And results contain
+ | ref | display_name |
+ | Dorfbäckerei Herrmann | Dorfbäckerei Herrmann, 29, Gnetsch, Mäls, Balzers, Oberland, 9496, Liechtenstein |
+
+ Examples:
+ | has_address | attributes |
+ | 1 | attributes |
+ | 0 | not attributes |
+
+
+ @Tiger
+ Scenario: Tiger address
+ When sending v1/reverse at 32.4752389363,-86.4810198619 with format xml
+ Then results contain
+ | osm_type | osm_id | place_rank | address_rank |
+ | way | 396009653 | 30 | 30 |
+ And results contain
+ | centroid | boundingbox |
+ | -86.4808553 32.4753580 | ^32.4753080\d*,32.4754080\d*,-86.4809053\d*,-86.4808053\d* |
+ And results contain
+ | display_name |
+ | 707, Upper Kingston Road, Upper Kingston, Prattville, Autauga County, 36067, United States |
+
+
+ Scenario: Interpolation address
+ When sending v1/reverse at 47.118533,9.57056562 with format xml
+ Then results contain
+ | osm_type | osm_id | place_rank | address_rank |
+ | way | 1 | 30 | 30 |
+ And results contain
+ | centroid | boundingbox |
+ | 9.57054676 47.118545392 | ^47.118495\d*,47.118595\d*,9.570496\d*,9.570596\d* |
+ And results contain
+ | display_name |
+ | 1019, Grosssteg, Sücka, Triesenberg, Oberland, 9497, Liechtenstein |
+
+
+ Scenario: Output of geojson
+ When sending v1/reverse at 47.06597,9.50467 with format xml
+ | param | value |
+ | polygon_geojson | 1 |
+ Then results contain
+ | geojson |
+ | {"type":"LineString","coordinates":[[9.5039353,47.0657546],[9.5040437,47.0657781],[9.5040808,47.065787],[9.5054298,47.0661407]]} |
+
+
+ Scenario: Output of WKT
+ When sending v1/reverse at 47.06597,9.50467 with format xml
+ | param | value |
+ | polygon_text | 1 |
+ Then results contain
+ | geotext |
+ | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
+
+
+ Scenario: Output of SVG
+ When sending v1/reverse at 47.06597,9.50467 with format xml
+ | param | value |
+ | polygon_svg | 1 |
+ Then results contain
+ | geosvg |
+ | M 9.5039353 -47.0657546 L 9.5040437 -47.0657781 9.5040808 -47.065787 9.5054298 -47.0661407 |
+
+
+ Scenario: Output of KML
+ When sending v1/reverse at 47.06597,9.50467 with format xml
+ | param | value |
+ | polygon_kml | 1 |
+ Then results contain
+ | geokml |
+ | ^<geokml><LineString><coordinates>9.5039\d*,47.0657\d* 9.5040\d*,47.0657\d* 9.5040\d*,47.065\d* 9.5054\d*,47.0661\d*</coordinates></LineString></geokml> |
+@SQLITE
@APIDB
Feature: Parameters for Search API
Testing correctness of geocodejson output.
+@SQLITE
@APIDB
Feature: Localization of search results
+@SQLITE
@APIDB
Feature: Search queries
Testing different queries and parameters
| 0 |
Then there are duplicates
- @fail-legacy
Scenario: Search with bounded viewbox in right area
When sending json search query "post" with address
| bounded | viewbox |
Then a HTTP 400 is returned
Scenario: Restrict to feature type country
- When sending xml search query "fürstentum"
- Then results contain
- | ID | class |
- | 1 | building |
When sending xml search query "fürstentum"
| featureType |
| country |
Scenario: Restrict to feature type state
When sending xml search query "Wangerberg"
- Then more than 1 result is returned
+ Then at least 1 result is returned
When sending xml search query "Wangerberg"
| featureType |
| state |
Scenario: Restrict to feature type city
When sending xml search query "vaduz"
- Then results contain
- | ID | place_rank |
- | 1 | 30 |
+ Then at least 1 result is returned
When sending xml search query "vaduz"
| featureType |
| city |
| svg |
| geokml |
- Scenario: Search along a route
- When sending json search query "rathaus" with address
- Then result addresses contain
- | ID | town |
- | 0 | Schaan |
- When sending json search query "rathaus" with address
- | bounded | routewidth | route |
- | 1 | 0.1 | 9.54353,47.11772,9.54314,47.11894 |
- Then result addresses contain
- | town |
- | Triesenberg |
-
Scenario: Array parameters are ignored
When sending json search query "Vaduz" with address
+@SQLITE
@APIDB
Feature: Searches with postcodes
Various searches involving postcodes
+ @Fail
Scenario: US 5+4 ZIP codes are shortened to 5 ZIP codes if not found
When sending json search query "36067 1111, us" with address
Then result addresses contain
| postcode |
| 36067 |
+ And results contain
+ | type |
+ | postcode |
Scenario: Postcode search with address
When sending json search query "9486, mauren"
+@SQLITE
@APIDB
Feature: Search queries
Generic search result correctness
+ Scenario: Search for natural object
+ When sending json search query "Samina"
+ | accept-language |
+ | en |
+ Then results contain
+ | ID | class | type | display_name |
+ | 0 | waterway | river | Samina, Austria |
+
Scenario: House number search for non-street address
When sending json search query "6 Silum, Liechtenstein" with address
| accept-language |
| class | type |
| club | scout |
- Scenario: With multiple amenity search only the first is used
- When sending json search query "[club=scout] [church] vaduz"
- Then results contain
- | class | type |
- | club | scout |
- When sending json search query "[amenity=place_of_worship] [club=scout] vaduz"
- Then results contain
- | class | type |
- | amenity | place_of_worship |
-
Scenario: POI search near given coordinate
When sending json search query "restaurant near 47.16712,9.51100"
Then results contain
| class | type |
| leisure | firepit |
- Scenario: Arbitrary key/value search near given coordinate and named place
- When sending json search query "[leisure=firepit] ebenholz 47° 9′ 26″ N 9° 36′ 45″ E"
+
+ Scenario: POI search in a bounded viewbox
+ When sending json search query "restaurants"
+ | viewbox | bounded |
+ | 9.50830,47.15253,9.52043,47.14866 | 1 |
Then results contain
- | class | type |
- | leisure | firepit |
+ | class | type |
+ | amenity | restaurant |
Scenario Outline: Key/value search near given coordinate can be restricted to country
When sending json search query "[natural=peak] 47.06512,9.53965" with address
Then result addresses contain
| ID | house_number |
| 0 | 11 |
- | 1 | 11 a |
Scenario Outline: Coordinate searches with white spaces
When sending json search query "<data>"
Then exactly 1 result is returned
And results contain
| class |
- | natural |
+ | water |
Examples:
| data |
+@SQLITE
@APIDB
Feature: Simple Tests
Simple tests for internal server errors and response format.
| querystring | pub |
| viewbox | 12,33,77,45.13 |
- Scenario: Empty XML search with exluded place ids
+ Scenario: Empty XML search with excluded place ids
When sending xml search query "jghrleoxsbwjer"
| exclude_place_ids |
| 123,76,342565 |
| attr | value |
| exclude_place_ids | 123,76,342565 |
- Scenario: Empty XML search with bad exluded place ids
+ Scenario: Empty XML search with bad excluded place ids
When sending xml search query "jghrleoxsbwjer"
| exclude_place_ids |
| , |
| foo | foo |
| FOO | FOO |
| __world | __world |
- | $me | \$me |
- | m1[4] | m1\[4\] |
- | d_r[$d] | d_r\[\$d\] |
Scenario Outline: Wrapping of illegal jsonp search requests
When sending json search query "Tokyo"
+@SQLITE
@APIDB
Feature: Structured search queries
Testing correctness of results with
| Liechtenstein |
And results contain
| class | type |
- | amenity | ^(pub)\|(bar) |
+ | amenity | ^(pub)\|(bar)\|(restaurant) |
#176
Scenario: Structured search restricts rank
+@SQLITE
@APIDB
Feature: Status queries
Testing status query
| N1 | R1 | True |
| N1 | R2 | True |
- Scenario: with boundaries of same rank the one with the closer centroid is prefered
+ Scenario: with boundaries of same rank the one with the closer centroid is preferred
Given the grid
| 1 | | | 3 | | 5 |
| | 9 | | | | |
| object | address |
| W1 | R2 |
+ Scenario: Full name is prefered for unlisted addr:place tags
+ Given the grid
+ | | 1 | 2 | |
+ | 8 | | | 9 |
+ And the places
+ | osm | class | type | name | geometry |
+ | W10 | place | city | Away | (8,1,2,9,8) |
+ And the places
+ | osm | class | type | name | addr+city | geometry |
+ | W1 | highway | residential | Royal Terrace | Gardens | 8,9 |
+ And the places
+ | osm | class | type | housenr | addr+place | geometry | extra+foo |
+ | N1 | place | house | 1 | Royal Terrace Gardens | 1 | bar |
+ And the places
+ | osm | class | type | housenr | addr+street | geometry |
+ | N2 | place | house | 2 | Royal Terrace | 2 |
+ When importing
+ When sending search query "1, Royal Terrace Gardens"
+ Then results contain
+ | ID | osm |
+ | 0 | N1 |
Then results contain
| osm | display_name |
| N1 | Wenig, Loudou |
- Scenario: OSM country relations outside expected boundaries are ignored
+
+ Scenario: OSM country relations outside expected boundaries are ignored for naming
Given the grid
| 1 | | 2 |
| 4 | | 3 |
Then results contain
| osm | display_name |
| N1 | Wenig, Germany |
+
Scenario: Pre-defined country names are used
Given the grid with origin CH
| 1 |
Then results contain
| osm | display_name |
| N1 | Ingb, Switzerland |
+
+ Scenario: For overlapping countries, pre-defined countries are tie-breakers
+ Given the grid with origin US
+ | 1 | | 2 | | 5 |
+ | | 9 | | 8 | |
+ | 4 | | 3 | | 6 |
+ Given the named places
+ | osm | class | type | admin | country | geometry |
+ | R1 | boundary | administrative | 2 | de | (1,5,6,4,1) |
+ | R2 | boundary | administrative | 2 | us | (1,2,3,4,1) |
+ And the named places
+ | osm | class | type | geometry |
+ | N1 | place | town | 9 |
+ | N2 | place | town | 8 |
+ When importing
+ Then placex contains
+ | object | country_code |
+ | N1 | us |
+ | N2 | de |
+
+ Scenario: For overlapping countries outside pre-define countries prefer smaller partition
+ Given the grid with origin US
+ | 1 | | 2 | | 5 |
+ | | 9 | | 8 | |
+ | 4 | | 3 | | 6 |
+ Given the named places
+ | osm | class | type | admin | country | geometry |
+ | R1 | boundary | administrative | 2 | ch | (1,5,6,4,1) |
+ | R2 | boundary | administrative | 2 | de | (1,2,3,4,1) |
+ And the named places
+ | osm | class | type | geometry |
+ | N1 | place | town | 9 |
+ | N2 | place | town | 8 |
+ When importing
+ Then placex contains
+ | object | country_code |
+ | N1 | de |
+ | N2 | ch |
Feature: Import of address interpolations
Tests that interpolated addresses are added correctly
+ Scenario: Simple even interpolation line with two points and no street nearby
+ Given the grid with origin 1,1
+ | 1 | | 9 | | 2 |
+ Given the places
+ | osm | class | type | housenr |
+ | N1 | place | house | 2 |
+ | N2 | place | house | 6 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W1 | place | houses | even | 1,2 |
+ And the ways
+ | id | nodes |
+ | 1 | 1,2 |
+ When importing
+ Then W1 expands to no interpolation
+
Scenario: Simple even interpolation line with two points
Given the grid with origin 1,1
| 1 | | 9 | | 2 |
+ | 4 | | | | 5 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
Scenario: Backwards even two point interpolation line
Given the grid with origin 1,1
| 1 | 8 | 9 | 2 |
+ | 4 | | | 5 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
| N2 | place | house | 8 |
And the places
| osm | class | type | addr+interpolation | geometry |
- | W1 | place | houses | even | 1,2 |
+ | W1 | place | houses | even | 2,1 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 2,1 |
When importing
Then W1 expands to interpolation
| start | end | geometry |
- | 4 | 6 | 8,9 |
+ | 4 | 6 | 9,8 |
Scenario: Simple odd two point interpolation
Given the grid with origin 1,1
| 1 | 8 | | | 9 | 2 |
+ | 4 | | | | 5 | |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 1 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | odd | 1,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
Scenario: Simple all two point interpolation
Given the grid with origin 1,1
| 1 | 8 | 9 | 2 |
+ | 4 | | | 5 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 1 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | all | 1,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
Scenario: Even two point interpolation line with intermediate empty node
Given the grid
| 1 | 8 | | 3 | 9 | 2 |
+ | 4 | | | | 5 | |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,3,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,2 |
Scenario: Even two point interpolation line with intermediate duplicated empty node
Given the grid
+ | 4 | | | | 5 |
| 1 | 8 | 3 | 9 | 2 |
Given the places
| osm | class | type | housenr |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,3,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,3,2 |
Scenario: Simple even three point interpolation line
Given the grid
+ | 4 | | | | | | 5 |
| 1 | 8 | | 9 | 3 | 7 | 2 |
Given the places
| osm | class | type | housenr |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,3,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,3,2,4 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 1,3,2,4 |
And the ways
| id | nodes |
| 1 | 1,3,2,4 |
Scenario: Reverse simple even three point interpolation line
Given the grid
| 1 | 8 | | 9 | 3 | 7 | 2 |
+ | 4 | | | | | | 5 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 2,3,1 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 2,3,1 |
Scenario: Even three point interpolation line with odd center point
Given the grid
| 1 | | 10 | | | 11 | 3 | 2 |
+ | 4 | | | | | | | 5 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,3,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,2,3,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 1,2,3 |
And the ways
| id | nodes |
| 1 | 1,2,3,2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,2,3,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 1,2,3 |
And the ways
| id | nodes |
| 1 | 1,2,3,2 |
| W3 | 14 | 14 |
When sending search query "16 Cloud Street"
Then results contain
- | ID | osm_type | osm_id |
- | 0 | N | 4 |
+ | ID | osm |
+ | 0 | N4 |
When sending search query "14 Cloud Street"
Then results contain
- | ID | osm_type | osm_id |
- | 0 | W | 11 |
+ | ID | osm |
+ | 0 | W11 |
Scenario: addr:street on housenumber way
Given the grid
| W3 | 14 | 14 |
When sending search query "16 Cloud Street"
Then results contain
- | ID | osm_type | osm_id |
- | 0 | N | 4 |
+ | ID | osm |
+ | 0 | N4 |
When sending search query "14 Cloud Street"
Then results contain
- | ID | osm_type | osm_id |
- | 0 | W | 11 |
+ | ID | osm |
+ | 0 | W11 |
Scenario: Geometry of points and way don't match (github #253)
Given the places
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 144.9632341 -37.76163,144.9630541 -37.7628172,144.9629794 -37.7630755 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 144.9632341 -37.76163,144.9629794 -37.7630755 |
And the ways
| id | nodes |
| 1 | 1,2,3 |
Then W1 expands to interpolation
| start | end | geometry |
| 4 | 4 | 144.963016 -37.762946 |
- | 8 | 8 | 144.963144 -37.7622237 |
+ | 8 | 8 | 144.96314407 -37.762223692 |
Scenario: Place with missing address information
Given the grid
| 1 | | 2 | | | 3 |
+ | 4 | | | | | 5 |
And the places
| osm | class | type | housenr |
| N1 | place | house | 23 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | odd | 1,2,3 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2,3 |
Given the places
| osm | class | type | housenr | geometry |
| W1 | place | houses | even | 1 1, 1 1.001 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 1 1, 1 1.001 |
When importing
Then W1 expands to no interpolation
Scenario: Ways with nodes without housenumbers are ignored
Given the grid
- | 1 | | 2 |
+ | 1 | | 2 |
+ | 4 | | 5 |
Given the places
| osm | class | type |
| N1 | place | house |
Given the places
| osm | class | type | housenr | geometry |
| W1 | place | houses | even | 1,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
When importing
Then W1 expands to no interpolation
Scenario: Two point interpolation starting at 0
Given the grid with origin 1,1
| 1 | 10 | | | 11 | 2 |
+ | 4 | | | | | 5 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 0 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,2 |
+ And the places
+ | osm | class | type | name | geometry |
+ | W10 | highway | residential | London Road |4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
Then W1 expands to interpolation
| start | end | geometry |
| 2 | 8 | 10,11 |
- When sending jsonv2 reverse coordinates 1,1
+ When sending v1/reverse at 1,1
Then results contain
- | ID | osm_type | osm_id | type | display_name |
- | 0 | node | 1 | house | 0 |
+ | ID | osm | type | display_name |
+ | 0 | N1 | house | 0, London Road |
Scenario: Parenting of interpolation with additional tags
Given the grid
And W10 expands to interpolation
| start | end | parent_place_id |
| 12 | 14 | W2 |
+
+
+ Scenario Outline: Bad interpolation values are ignored
+ Given the grid with origin 1,1
+ | 1 | | 9 | | 2 |
+ | 4 | | | | 5 |
+ Given the places
+ | osm | class | type | housenr |
+ | N1 | place | house | 2 |
+ | N2 | place | house | 6 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W1 | place | houses | <value> | 1,2 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
+ And the ways
+ | id | nodes |
+ | 1 | 1,2 |
+ When importing
+ Then W1 expands to no interpolation
+
+ Examples:
+ | value |
+ | foo |
+ | x |
+ | 12-2 |
+
+
+ Scenario: Interpolation line where points have been moved (Github #3022)
+ Given the 0.00001 grid
+ | 1 | | | | | | | | 2 | 3 | 9 | | | | | | | | 4 |
+ Given the places
+ | osm | class | type | housenr | geometry |
+ | N1 | place | house | 2 | 1 |
+ | N2 | place | house | 18 | 3 |
+ | N3 | place | house | 24 | 9 |
+ | N4 | place | house | 42 | 4 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W1 | place | houses | even | 1,2,3,4 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 1,4 |
+ And the ways
+ | id | nodes |
+ | 1 | 1,2,3,4 |
+ When importing
+ Then W1 expands to interpolation
+ | start | end |
+ | 4 | 16 |
+ | 20 | 22 |
+ | 26 | 40 |
+
+
+ Scenario: Interpolation line with duplicated points
+ Given the grid
+ | 7 | 10 | 8 | 11 | 9 |
+ | 4 | | | | 5 |
+ Given the places
+ | osm | class | type | housenr | geometry |
+ | N1 | place | house | 2 | 7 |
+ | N2 | place | house | 6 | 8 |
+ | N3 | place | house | 10 | 8 |
+ | N4 | place | house | 14 | 9 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W1 | place | houses | even | 7,8,8,9 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 4,5 |
+ And the ways
+ | id | nodes |
+ | 1 | 1,2,3,4 |
+ When importing
+ Then W1 expands to interpolation
+ | start | end | geometry |
+ | 4 | 4 | 10 |
+ | 12 | 12 | 11 |
+
+
+ Scenario: Interpolaton line with broken way geometry (Github #2986)
+ Given the grid
+ | 1 | 8 | 10 | 11 | 9 | 2 | 3 | 4 |
+ Given the places
+ | osm | class | type | housenr |
+ | N1 | place | house | 2 |
+ | N2 | place | house | 8 |
+ | N3 | place | house | 12 |
+ | N4 | place | house | 14 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W1 | place | houses | even | 8,9 |
+ And the named places
+ | osm | class | type | geometry |
+ | W10 | highway | residential | 1,4 |
+ And the ways
+ | id | nodes |
+ | 1 | 1,8,9,2,3,4 |
+ When importing
+ Then W1 expands to interpolation
+ | start | end | geometry |
+ | 4 | 6 | 10,11 |
| R23 | - |
When sending search query "rhein"
Then results contain
- | osm_type |
- | R |
+ | osm |
+ | R13 |
Scenario: Relations are not linked when in waterway relations
Given the grid
| R2 | - |
When sending search query "rhein"
Then results contain
- | ID | osm_type |
- | 0 | R |
- | 1 | W |
+ | ID | osm |
+ | 0 | R1 |
+ | 1 | W2 |
Scenario: Empty waterway relations are handled correctly
| W2 | R1 |
When sending search query "rhein2"
Then results contain
- | osm_type |
- | W |
+ | osm |
+ | W1 |
# github #573
Scenario: Boundaries should only be linked to places
| city |
| Berlin |
Then results contain
- | ID | osm_type | osm_id |
- | 0 | R | 13 |
+ | ID | osm |
+ | 0 | R13 |
When sending search query ""
| state |
| Berlin |
Then results contain
- | ID | osm_type | osm_id |
- | 0 | R | 13 |
+ | ID | osm |
+ | 0 | R13 |
Scenario: Boundaries without place tags only link against same admin level
| state |
| Berlin |
Then results contain
- | ID | osm_type | osm_id |
- | 0 | R | 13 |
+ | ID | osm |
+ | 0 | R13 |
When sending search query ""
| city |
| Berlin |
Then results contain
- | ID | osm_type | osm_id |
- | 0 | N | 2 |
+ | ID | osm |
+ | 0 | N2 |
# github #1352
Scenario: Do not use linked centroid when it is outside the area
| R1 | 'linked_place' : 'city', 'wikidata': 'Q1234' |
| R2 | 'wikidata': 'Q1234' |
+
+ Scenario: Boundaries without names inherit names from linked places
+ Given the 0.05 grid
+ | 1 | | 2 |
+ | | 9 | |
+ | 4 | | 3 |
+ Given the places
+ | osm | class | type | extra+wikidata | admin | geometry |
+ | R1 | boundary | administrative | 34 | 8 | (1,2,3,4,1) |
+ And the places
+ | osm | class | type | name+name |
+ | N9 | place | city | LabelPlace |
+ And the relations
+ | id | members |
+ | 1 | N9:label |
+ When importing
+ Then placex contains
+ | object | name+_place_name |
+ | R1 | LabelPlace |
+
| N2 | W1 |
When sending search query "4 galoo"
Then results contain
- | ID | osm_type | osm_id | display_name |
- | 0 | N | 1 | 4, galoo, 12345, Deutschland |
+ | ID | osm | display_name |
+ | 0 | N1 | 4, galoo, 12345, Deutschland |
When sending search query "5 galoo"
Then results contain
- | ID | osm_type | osm_id | display_name |
- | 0 | N | 2 | 5, galoo, 99999, Deutschland |
+ | ID | osm | display_name |
+ | 0 | N2 | 5, galoo, 99999, Deutschland |
Scenario: Address without tags, closest street
Given the grid
| N3 | W2 |
| N4 | W1 |
- @fail-legacy
Scenario: addr:street tag parents to appropriately named street, locale names
Given the grid
| 10 | | | | | 11 |
| object | parent_place_id |
| N9 | R14 |
+
+ Scenario: Choose closest street in associatedStreet relation
+ Given the grid
+ | 1 | | | | 3 |
+ | 10 | | 11 | | 12 |
+ And the places
+ | osm | class | type | housenr | geometry |
+ | N1 | place | house | 1 | 1 |
+ | N3 | place | house | 3 | 3 |
+ And the named places
+ | osm | class | type | geometry |
+ | W100 | highway | residential | 10,11 |
+ | W101 | highway | residential | 11,12 |
+ And the relations
+ | id | members | tags+type |
+ | 1 | N1:house,N3:house,W100:street,W101:street | associatedStreet |
+ When importing
+ Then placex contains
+ | object | parent_place_id |
+ | N1 | W100 |
+ | N3 | W101 |
+
+
Scenario: POIs in building inherit address
Given the grid
| 10 | | | | | | 11 |
| object | postcode |
| W93 | 45023 |
+ Scenario: Road areas get postcodes from nearby named buildings without other info
+ Given the grid with origin US
+ | 10 | | | | 11 |
+ | 13 | | | | 12 |
+ | | 1 | 2 | | |
+ | | 4 | 3 | | |
+ And the named places
+ | osm | class | type | geometry |
+ | W93 | highway | pedestriant | (10,11,12,13,10) |
+ And the named places
+ | osm | class | type | addr+postcode | geometry |
+ | W22 | building | yes | 45023 | (1,2,3,4,1) |
+ When importing
+ Then placex contains
+ | object | postcode |
+ | W93 | 45023 |
+
Scenario: Roads get postcodes from nearby unnamed buildings without other info
Given the grid with origin US
| 10 | | | | 11 |
| E45 2 | gb | 23 | 5 |
| Y45 | gb | 21 | 5 |
- @fail-legacy
Scenario: Postcodes outside all countries are not added to the postcode and word table
Given the places
| osm | class | type | addr+postcode | addr+housenumber | addr+place | geometry |
| W1 | R10 | True | 18 |
| W1 | R2 | True | 16 |
| W1 | N9 | False | 18 |
+
+
+ Scenario: POI nodes with place tags
+ Given the places
+ | osm | class | type | name | extratags |
+ | N23 | amenity | playground | AB | "place": "city" |
+ | N23 | place | city | AB | "amenity": "playground" |
+ When importing
+ Then placex contains exactly
+ | object | rank_search | rank_address |
+ | N23:amenity | 30 | 30 |
+ | N23:place | 16 | 16 |
+
+ Scenario: Address rank 25 is only used for addr:place
+ Given the grid
+ | 10 | 33 | 34 | 11 |
+ Given the places
+ | osm | class | type | name |
+ | N10 | place | village | vil |
+ | N11 | place | farm | farm |
+ And the places
+ | osm | class | type | name | geometry |
+ | W1 | highway | residential | RD | 33,11 |
+ And the places
+ | osm | class | type | name | addr+farm | geometry |
+ | W2 | highway | residential | RD2 | farm | 34,11 |
+ And the places
+ | osm | class | type | housenr |
+ | N33 | place | house | 23 |
+ And the places
+ | osm | class | type | housenr | addr+place |
+ | N34 | place | house | 23 | farm |
+ When importing
+ Then placex contains
+ | object | parent_place_id |
+ | N11 | N10 |
+ | N33 | W1 |
+ | N34 | N11 |
+ And place_addressline contains
+ | object | address |
+ | W1 | N10 |
+ | W2 | N10 |
+ | W2 | N11 |
Feature: Creation of search terms
Tests that search_name table is filled correctly
- Scenario Outline: Comma- and semicolon separated names appear as full names
+ Scenario: Semicolon-separated names appear as separate full names
Given the places
| osm | class | type | name+alt_name |
- | N1 | place | city | New York<sep>Big Apple |
+ | N1 | place | city | New York; Big Apple |
When importing
Then search_name contains
| object | name_vector |
| N1 | #New York, #Big Apple |
- Examples:
- | sep |
- | , |
- | ; |
+ Scenario: Comma-separated names appear as a single full name
+ Given the places
+ | osm | class | type | name+alt_name |
+ | N1 | place | city | New York, Big Apple |
+ When importing
+ Then search_name contains
+ | object | name_vector |
+ | N1 | #New York Big Apple |
- Scenario Outline: Name parts before brackets appear as full names
+ Scenario: Name parts before brackets appear as full names
Given the places
| osm | class | type | name+name |
| N1 | place | city | Halle (Saale) |
| N1 |
- @fail-legacy
Scenario Outline: Numeral housenumbers in any script are found
Given the places
| osm | class | type | housenr | geometry |
| 2, 4, 12 |
- @fail-legacy
Scenario Outline: Housenumber - letter combinations are found
Given the places
| osm | class | type | housenr | geometry |
| 34/10 |
- @fail-legacy
Scenario Outline: a bis housenumber is found
Given the places
| osm | class | type | housenr | geometry |
| 45 bis |
- @fail-legacy
Scenario Outline: a ter housenumber is found
Given the places
| osm | class | type | housenr | geometry |
| 45 TER |
- @fail-legacy
Scenario Outline: a number - letter - number combination housenumber is found
Given the places
| osm | class | type | housenr | geometry |
| 501h1 |
- @fail-legacy
Scenario Outline: Russian housenumbers are found
Given the places
| osm | class | type | housenr | geometry |
| id | nodes |
| 1 | 1,3 |
When importing
- When sending jsonv2 reverse point 2
+ When sending v1/reverse N2
Then results contain
| ID | display_name |
| 0 | 3, Nickway |
And the places
| osm | class | type | housenr | geometry |
| N1 | place | house | 2 | 1 |
- | N3 | place | house | 16 | 3 |
+ | N3 | place | house | 18 | 3 |
And the ways
| id | nodes |
| 1 | 1,3 |
When importing
- When sending jsonv2 reverse point 2
+ When sending v1/reverse N2
Then results contain
| ID | display_name | centroid |
| 0 | 10, Nickway | 2 |
--- /dev/null
+@DB
+Feature: Searches in Japan
+ Test specifically for searches of Japanese addresses and in Japanese language.
+ Scenario: A block house-number is parented to the neighbourhood
+ Given the grid with origin JP
+ | 1 | | | | 2 |
+ | | 3 | | | |
+ | | | 9 | | |
+ | | | | 6 | |
+ And the places
+ | osm | class | type | name | geometry |
+ | W1 | highway | residential | 雉子橋通り | 1,2 |
+ And the places
+ | osm | class | type | housenr | addr+block_number | addr+neighbourhood | geometry |
+ | N3 | amenity | restaurant | 2 | 6 | 2丁目 | 3 |
+ And the places
+ | osm | class | type | name | geometry |
+ | N9 | place | neighbourhood | 2丁目 | 9 |
+ And the places
+ | osm | class | type | name | geometry |
+ | N6 | place | quarter | 加瀬 | 6 |
+ When importing
+ Then placex contains
+ | object | parent_place_id |
+ | N3 | N9 |
+ When sending search query "2丁目 6-2"
+ Then results contain
+ | osm |
+ | N3 |
| | 2 | |
| 1 | | 3 |
Given the places
- | osm | class | type | postcode | geometry |
- | R1 | boundary | postal_code | 12345 | (1,2,3,1) |
+ | osm | class | type | postcode | geometry |
+ | R1 | boundary | postal_code | 123-45 | (1,2,3,1) |
When importing
- When sending search query "12345"
+ When sending search query "123-45"
Then results contain
| ID | osm |
| 0 | R1 |
When importing
And sending search query "Main St <nr>"
Then results contain
- | osm | display_name |
- | N1 | <nr-list>, Main St |
+ | ID | osm | display_name |
+ | 0 | N1 | <nr-list>, Main St |
Examples:
| nr-list | nr |
When sending search query "399174"
Then results contain
| ID | type | display_name |
- | 0 | postcode | 399174 |
+ | 0 | postcode | 399174, Singapore |
- @fail-legacy
Scenario Outline: Postcodes in the Netherlands (mixed postcode with spaces)
Given the grid with origin NL
| 10 | | | | 11 |
When sending search query "3993 DX"
Then results contain
| ID | type | display_name |
- | 0 | postcode | 3993 DX |
+ | 0 | postcode | 3993 DX, Nederland |
When sending search query "3993dx"
Then results contain
| ID | type | display_name |
- | 0 | postcode | 3993 DX |
+ | 0 | postcode | 3993 DX, Nederland |
Examples:
| postcode |
| 3993 dx |
- @fail-legacy
Scenario: Postcodes in Singapore (6-digit postcode)
Given the grid with origin SG
| 10 | | | | 11 |
When sending search query "399174"
Then results contain
| ID | type | display_name |
- | 0 | postcode | 399174 |
+ | 0 | postcode | 399174, Singapore |
- @fail-legacy
Scenario Outline: Postcodes in Andorra (with country code)
Given the grid with origin AD
| 10 | | | | 11 |
When sending search query "675"
Then results contain
| ID | type | display_name |
- | 0 | postcode | AD675 |
+ | 0 | postcode | AD675, Andorra |
When sending search query "AD675"
Then results contain
| ID | type | display_name |
- | 0 | postcode | AD675 |
+ | 0 | postcode | AD675, Andorra |
Examples:
| postcode |
When sending search query "EH4 7EA"
Then results contain
| type | display_name |
- | postcode | EH4 7EA |
+ | postcode | EH4 7EA, United Kingdom |
When sending search query "E4 7EA"
Then results contain
| type | display_name |
- | postcode | E4 7EA |
+ | postcode | E4 7EA, United Kingdom |
+
+ Scenario: Postcode areas are preferred over postcode points
+ Given the grid with origin DE
+ | 1 | 2 |
+ | 4 | 3 |
+ Given the places
+ | osm | class | type | postcode | geometry |
+ | R23 | boundary | postal_code | 12345 | (1,2,3,4,1) |
+ When importing
+ Then location_postcode contains exactly
+ | country | postcode |
+ | de | 12345 |
+ When sending search query "12345, de"
+ Then results contain
+ | osm |
+ | R23 |
--- /dev/null
+@DB
+Feature: Reverse searches
+ Test results of reverse queries
+
+ Scenario: POI in POI area
+ Given the 0.0001 grid with origin 1,1
+ | 1 | | | | | | | | 2 |
+ | | 9 | | | | | | | |
+ | 4 | | | | | | | | 3 |
+ And the places
+ | osm | class | type | geometry |
+ | W1 | aeroway | terminal | (1,2,3,4,1) |
+ | N1 | amenity | restaurant | 9 |
+ When importing
+ And sending v1/reverse at 1.0001,1.0001
+ Then results contain
+ | osm |
+ | N1 |
+ When sending v1/reverse at 1.0003,1.0001
+ Then results contain
+ | osm |
+ | W1 |
| | 10 | |
| 4 | | 3 |
- @fail-legacy
Scenario: When country names are changed old ones are no longer searchable
Given the places
| osm | class | type | admin | name+name:xy | country | geometry |
When sending search query "Wenig, Loudou"
Then exactly 0 results are returned
- @fail-legacy
Scenario: When country names are deleted they are no longer searchable
Given the places
| osm | class | type | admin | name+name:xy | country | geometry |
| N10 | Wenig, Lilly |
- @fail-legacy
Scenario: When a localised name is deleted, the standard name takes over
Given the places
| osm | class | type | admin | name+name:de | country | geometry |
| parent_place_id | start | end |
| W1 | 4 | 6 |
+ Scenario: Legal interpolation type changed to illegal one
+ Given the grid
+ | 1 | | 2 |
+ | 3 | | 4 |
+ And the places
+ | osm | class | type | name | geometry |
+ | W1 | highway | unclassified | Cloud Street | 1, 2 |
+ And the ways
+ | id | nodes |
+ | 2 | 3,4 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W2 | place | houses | even | 3,4 |
+ And the places
+ | osm | class | type | housenr |
+ | N3 | place | house | 2 |
+ | N4 | place | house | 6 |
+ When importing
+ Then W2 expands to interpolation
+ | parent_place_id | start | end |
+ | W1 | 4 | 4 |
+ When updating places
+ | osm | class | type | addr+interpolation | geometry |
+ | W2 | place | houses | 12-2 | 3,4 |
+ Then W2 expands to no interpolation
+
| dups |
| 1 |
Then results contain
- | osm_type |
- | R |
+ | osm |
+ | R1 |
When updating places
| osm | class | type | name | admin | geometry |
| R1 | boundary | administrative | foobar | 8 | (10,11,12,13,10) |
| dups |
| 1 |
Then results contain
- | osm_type |
- | N |
+ | osm |
+ | N1 |
Scenario: Add linked place when linking relation is removed
Given the 0.1 grid
| dups |
| 1 |
Then results contain
- | osm_type |
- | R |
+ | osm |
+ | R1 |
When marking for delete R1
Then placex contains
| object | linked_place_id |
| dups |
| 1 |
Then results contain
- | osm_type |
- | N |
+ | osm |
+ | N1 |
Scenario: Remove linked place when linking relation is added
Given the 0.1 grid
| dups |
| 1 |
Then results contain
- | osm_type |
- | N |
+ | osm |
+ | N1 |
When updating places
| osm | class | type | name | admin | geometry |
| R1 | boundary | administrative | foo | 8 | (10,11,12,13,10) |
| dups |
| 1 |
Then results contain
- | osm_type |
- | R |
+ | osm |
+ | R1 |
Scenario: Remove linked place when linking relation is renamed
Given the 0.1 grid
| dups |
| 1 |
Then results contain
- | osm_type |
- | N |
+ | osm |
+ | N1 |
When updating places
| osm | class | type | name | admin | geometry |
| R1 | boundary | administrative | foo | 8 | (10,11,12,13,10) |
| dups |
| 1 |
Then results contain
- | osm_type |
- | R |
+ | osm |
+ | R1 |
Scenario: Update linking relation when linkee name is updated
Given the 0.1 grid
| R1 | boundary | administrative | rel | 8 | (10,11,12,13,10) |
And the places
| osm | class | type | name+name:de |
- | N3 | place | city | pnt |
+ | N3 | place | city | greeny |
And the relations
| id | members |
| 1 | N3:label |
When importing
Then placex contains
| object | linked_place_id | name+_place_name:de |
- | R1 | - | pnt |
+ | R1 | - | greeny |
And placex contains
| object | linked_place_id | name+name:de |
- | N3 | R1 | pnt |
+ | N3 | R1 | greeny |
When updating places
| osm | class | type | name+name:de |
| N3 | place | city | newname |
| R1 | boundary | administrative | rel | 8 | (10,11,12,13,10) |
And the places
| osm | class | type | name |
- | N3 | place | city | pnt |
+ | N3 | place | city | greeny |
And the relations
| id | members |
| 1 | N3:label |
When importing
Then placex contains
| object | linked_place_id | name+_place_name | name+name |
- | R1 | - | pnt | rel |
+ | R1 | - | greeny | rel |
And placex contains
| object | linked_place_id | name+name |
- | N3 | R1 | pnt |
- When sending search query "pnt"
+ | N3 | R1 | greeny |
+ When sending search query "greeny"
Then results contain
| osm |
| R1 |
And placex contains
| object | linked_place_id | name+_place_name:de | name+name |
| R1 | - | depnt | rel |
- When sending search query "pnt"
+ When sending search query "greeny"
Then exactly 0 results are returned
Scenario: Updating linkee extratags keeps linker's extratags
When marking for delete N1
Then placex contains
| object | extratags |
- | R1 | |
+ | R1 | - |
Scenario: Update linked_place info when linkee type changes
Given the 0.1 grid
| 4 | 3 |
Given the places
| osm | class | type | postcode | geometry |
- | R1 | boundary | postal_code | 12345 | (1,2,3,4,1) |
+ | R1 | boundary | postal_code | 123-45 | (1,2,3,4,1) |
When importing
- And sending search query "12345"
+ And sending search query "123-45"
Then results contain
| ID | osm |
| 0 | R1 |
| country | postcode | geometry |
| de | 01982 | country:de |
And there are word tokens for postcodes 01982
+
+ Scenario: When a parent is deleted, the postcode gets a new parent
+ Given the grid with origin DE
+ | 1 | | 3 | 4 |
+ | | 9 | | |
+ | 2 | | 5 | 6 |
+ Given the places
+ | osm | class | type | name | admin | geometry |
+ | R1 | boundary | administrative | Big | 6 | (1,4,6,2,1) |
+ | R2 | boundary | administrative | Small | 6 | (1,3,5,2,1) |
+ Given the named places
+ | osm | class | type | addr+postcode | geometry |
+ | N9 | place | postcode | 12345 | 9 |
+ When importing
+ And updating postcodes
+ Then location_postcode contains exactly
+ | country | postcode | geometry | parent_place_id |
+ | de | 12345 | 9 | R2 |
+ When marking for delete R2
+ Then location_postcode contains exactly
+ | country | postcode | geometry | parent_place_id |
+ | de | 12345 | 9 | R1 |
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
from pathlib import Path
+import sys
from behave import *
+sys.path.insert(1, str(Path(__file__, '..', '..', '..', 'src').resolve()))
+
from steps.geometry_factory import GeometryFactory
from steps.nominatim_environment import NominatimEnvironment
-TEST_BASE_DIR = Path(__file__) / '..' / '..'
+TEST_BASE_DIR = Path(__file__, '..', '..').resolve()
userconfig = {
- 'BUILDDIR' : (TEST_BASE_DIR / '..' / 'build').resolve(),
'REMOVE_TEMPLATE' : False,
'KEEP_TEST_DB' : False,
'DB_HOST' : None,
'TEMPLATE_DB' : 'test_template_nominatim',
'TEST_DB' : 'test_nominatim',
'API_TEST_DB' : 'test_api_nominatim',
- 'API_TEST_FILE' : (TEST_BASE_DIR / 'testdb' / 'apidb-test-data.pbf').resolve(),
- 'SERVER_MODULE_PATH' : None,
+ 'API_TEST_FILE' : TEST_BASE_DIR / 'testdb' / 'apidb-test-data.pbf',
'TOKENIZER' : None, # Test with a custom tokenizer
- 'PHPCOV' : False, # set to output directory to enable code coverage
+ 'STYLE' : 'extratags',
+ 'API_ENGINE': 'falcon'
}
use_step_matcher("re")
def before_scenario(context, scenario):
- if 'DB' in context.tags:
+ if not 'SQLITE' in context.tags \
+ and context.config.userdata['API_TEST_DB'].startswith('sqlite:'):
+ context.scenario.skip("Not usable with Sqlite database.")
+ elif 'DB' in context.tags:
context.nominatim.setup_db(context)
elif 'APIDB' in context.tags:
context.nominatim.setup_api_db()
def after_scenario(context, scenario):
if 'DB' in context.tags:
context.nominatim.teardown_db(context)
-
-
-def before_tag(context, tag):
- if tag == 'fail-legacy':
- if context.config.userdata['TOKENIZER'] == 'legacy':
- context.scenario.skip("Not implemented in legacy tokenizer")
--- /dev/null
+@DB
+Feature: Import with custom styles by osm2pgsql
+ Tests for the example customizations given in the documentation.
+
+ Scenario: Custom main tags (set new ones)
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_main_tags{
+ boundary = {administrative = 'named'},
+ highway = {'always', street_lamp = 'named'},
+ landuse = 'fallback'
+ }
+ """
+ When loading osm data
+ """
+ n10 Tboundary=administrative x0 y0
+ n11 Tboundary=administrative,name=Foo x0 y0
+ n12 Tboundary=electoral x0 y0
+ n13 Thighway=primary x0 y0
+ n14 Thighway=street_lamp x0 y0
+ n15 Thighway=primary,landuse=street x0 y0
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N11 | boundary | administrative |
+ | N13 | highway | primary |
+ | N15 | highway | primary |
+
+ Scenario: Custom main tags (modify existing)
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.modify_main_tags{
+ amenity = {prison = 'delete'},
+ highway = {stop = 'named'},
+ aeroway = 'named'
+ }
+ """
+ When loading osm data
+ """
+ n10 Tamenity=hotel x0 y0
+ n11 Tamenity=prison x0 y0
+ n12 Thighway=stop x0 y0
+ n13 Thighway=stop,name=BigStop x0 y0
+ n14 Thighway=give_way x0 y0
+ n15 Thighway=bus_stop x0 y0
+ n16 Taeroway=no,name=foo x0 y0
+ n17 Taeroway=taxiway,name=D15 x0 y0
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N10 | amenity | hotel |
+ | N13 | highway | stop |
+ | N15 | highway | bus_stop |
+ | N17 | aeroway | taxiway |
+
+ Scenario: Prefiltering tags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_prefilters{
+ delete_keys = {'source', 'source:*'},
+ extra_tags = {amenity = {'yes', 'no'}}
+ }
+ flex.set_main_tags{
+ amenity = 'always',
+ tourism = 'always'
+ }
+ """
+ When loading osm data
+ """
+ n1 Tamenity=yes x0 y6
+ n2 Tamenity=hospital,source=survey x3 y6
+ n3 Ttourism=hotel,amenity=yes x0 y0
+ n4 Ttourism=hotel,amenity=telephone x0 y0
+ """
+ Then place contains exactly
+ | object | extratags |
+ | N2:amenity | - |
+ | N3:tourism | 'amenity': 'yes' |
+ | N4:tourism | - |
+ | N4:amenity | - |
+
+ Scenario: Ignore some tags
+ Given the lua style file
+ """
+ local flex = require('import-extratags')
+
+ flex.ignore_keys{'ref:*', 'surface'}
+ """
+ When loading osm data
+ """
+ n100 Thighway=residential,ref=34,ref:bodo=34,surface=gray,extra=1 x0 y0
+ """
+ Then place contains exactly
+ | object | name | extratags |
+ | N100 | 'ref' : '34' | 'extra': '1' |
+
+
+ Scenario: Add for extratags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.add_for_extratags{'ref:*', 'surface'}
+ """
+ When loading osm data
+ """
+ n100 Thighway=residential,ref=34,ref:bodo=34,surface=gray,extra=1 x0 y0
+ """
+ Then place contains exactly
+ | object | name | extratags |
+ | N100 | 'ref' : '34' | 'ref:bodo': '34', 'surface': 'gray' |
+
+
+ Scenario: Name tags
+ Given the lua style file
+ """
+ local flex = require('flex-base')
+
+ flex.set_main_tags{highway = {traffic_light = 'named'}}
+ flex.set_name_tags{main = {'name', 'name:*'},
+ extra = {'ref'}
+ }
+ """
+ When loading osm data
+ """
+ n1 Thighway=stop,name=Something x0 y0
+ n2 Thighway=traffic_light,ref=453-4 x0 y0
+ n3 Thighway=traffic_light,name=Greens x0 y0
+ n4 Thighway=traffic_light,name=Red,ref=45 x0 y0
+ """
+ Then place contains exactly
+ | object | name |
+ | N3:highway | 'name': 'Greens' |
+ | N4:highway | 'name': 'Red', 'ref': '45' |
+
+ Scenario: Modify name tags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.modify_name_tags{house = {}, extra = {'o'}}
+ """
+ When loading osm data
+ """
+ n1 Ttourism=hotel,ref=45,o=good
+ n2 Taddr:housename=Old,addr:street=Away
+ """
+ Then place contains exactly
+ | object | name |
+ | N1:tourism | 'o': 'good' |
+
+ Scenario: Address tags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode'},
+ country = {'country-code', 'ISO3166-1'}
+ }
+ """
+ When loading osm data
+ """
+ n1 Ttourism=hotel,addr:street=Foo x0 y0
+ n2 Taddr:housenumber=23,addr:street=Budd,postal_code=5567 x0 y0
+ n3 Taddr:street=None,addr:city=Where x0 y0
+ """
+ Then place contains exactly
+ | object | type | address |
+ | N1:tourism | hotel | 'street': 'Foo' |
+ | N2:place | house | 'housenumber': '23', 'street': 'Budd', 'postcode': '5567' |
+
+ Scenario: Modify address tags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_address_tags{
+ extra = {'addr:*'},
+ }
+ """
+ When loading osm data
+ """
+ n2 Taddr:housenumber=23,addr:street=Budd,is_in:city=Faraway,postal_code=5567 x0 y0
+ """
+ Then place contains exactly
+ | object | type | address |
+ | N2:place | house | 'housenumber': '23', 'street': 'Budd', 'postcode': '5567' |
+
+ Scenario: Unused handling (delete)
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*', 'tiger:county'}
+ }
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ """
+ When loading osm data
+ """
+ n1 Ttourism=hotel,tiger:county=Fargo x0 y0
+ n2 Ttourism=hotel,tiger:xxd=56,else=other x0 y0
+ """
+ Then place contains exactly
+ | object | type | address | extratags |
+ | N1:tourism | hotel | 'tiger:county': 'Fargo' | - |
+ | N2:tourism | hotel | - | 'else': 'other' |
+
+ Scenario: Unused handling (extra)
+ Given the lua style file
+ """
+ local flex = require('flex-base')
+ flex.set_main_tags{highway = 'always',
+ wikipedia = 'extra'}
+ flex.add_for_extratags{'wikipedia:*', 'wikidata'}
+ flex.set_unused_handling{extra_keys = {'surface'}}
+ """
+ When loading osm data
+ """
+ n100 Thighway=path,foo=bar,wikipedia=en:Path x0 y0
+ n234 Thighway=path,surface=rough x0 y0
+ n445 Thighway=path,name=something x0 y0
+ n446 Thighway=path,wikipedia:en=Path,wikidata=Q23 x0 y0
+ n567 Thighway=path,surface=dirt,wikipedia:en=Path x0 y0
+ """
+ Then place contains exactly
+ | object | type | extratags |
+ | N100:highway | path | 'wikipedia': 'en:Path' |
+ | N234:highway | path | 'surface': 'rough' |
+ | N445:highway | path | - |
+ | N446:highway | path | 'wikipedia:en': 'Path', 'wikidata': 'Q23' |
+ | N567:highway | path | 'surface': 'dirt', 'wikipedia:en': 'Path' |
+
+ Scenario: Additional relation types
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.RELATION_TYPES['site'] = flex.relation_as_multipolygon
+ """
+ And the grid
+ | 1 | 2 |
+ | 4 | 3 |
+ When loading osm data
+ """
+ n1
+ n2
+ n3
+ n4
+ w1 Nn1,n2,n3,n4,n1
+ r1 Ttype=multipolygon,amenity=school Mw1@
+ r2 Ttype=site,amenity=school Mw1@
+ """
+ Then place contains exactly
+ | object | type |
+ | R1:amenity | school |
+ | R2:amenity | school |
+
+ Scenario: Exclude country relations
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ function osm2pgsql.process_relation(object)
+ if object.tags.boundary ~= 'administrative' or object.tags.admin_level ~= '2' then
+ flex.process_relation(object)
+ end
+ end
+ """
+ And the grid
+ | 1 | 2 |
+ | 4 | 3 |
+ When loading osm data
+ """
+ n1
+ n2
+ n3
+ n4
+ w1 Nn1,n2,n3,n4,n1
+ r1 Ttype=multipolygon,boundary=administrative,admin_level=4,name=Small Mw1@
+ r2 Ttype=multipolygon,boundary=administrative,admin_level=2,name=Big Mw1@
+ """
+ Then place contains exactly
+ | object | type |
+ | R1:boundary | administrative |
+
+ Scenario: Customize processing functions
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ local original_process_tags = flex.process_tags
+
+ function flex.process_tags(o)
+ if o.object.tags.highway ~= nil and o.object.tags.access == 'no' then
+ return
+ end
+
+ original_process_tags(o)
+ end
+ """
+ When loading osm data
+ """
+ n1 Thighway=residential x0 y0
+ n2 Thighway=residential,access=no x0 y0
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:highway | residential |
--- /dev/null
+@DB
+Feature: Tag evaluation
+ Tests if tags are correctly imported into the place table
+
+ Scenario: Main tags as fallback
+ When loading osm data
+ """
+ n100 Tjunction=yes,highway=bus_stop
+ n101 Tjunction=yes,name=Bar
+ n200 Tbuilding=yes,amenity=cafe
+ n201 Tbuilding=yes,name=Intersting
+ n202 Tbuilding=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N100 | highway | bus_stop |
+ | N101 | junction | yes |
+ | N200 | amenity | cafe |
+ | N201 | building | yes |
+
+
+ Scenario: Name and reg tags
+ When loading osm data
+ """
+ n2001 Thighway=road,name=Foo,alt_name:de=Bar,ref=45
+ n2002 Thighway=road,name:prefix=Pre,name:suffix=Post,ref:de=55
+ n2003 Thighway=yes,name:%20%de=Foo,name=real1
+ n2004 Thighway=yes,name:%a%de=Foo,name=real2
+ n2005 Thighway=yes,name:%9%de=Foo,name:\\=real3
+ n2006 Thighway=yes,name:%9%de=Foo,name=rea\l3
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2001 | highway | road | 'name': 'Foo', 'alt_name:de': 'Bar', 'ref': '45' |
+ | N2002 | highway | road | - |
+ | N2003 | highway | yes | 'name: de': 'Foo', 'name': 'real1' |
+ | N2004 | highway | yes | 'name:\nde': 'Foo', 'name': 'real2' |
+ | N2005 | highway | yes | 'name:\tde': 'Foo', 'name:\\\\': 'real3' |
+ | N2006 | highway | yes | 'name:\tde': 'Foo', 'name': 'rea\\l3' |
+
+ And place contains
+ | object | extratags |
+ | N2002 | 'name:prefix': 'Pre', 'name:suffix': 'Post', 'ref:de': '55' |
+
+
+ Scenario: Name when using with_name flag
+ When loading osm data
+ """
+ n3001 Tbridge=yes,bridge:name=GoldenGate
+ n3002 Tbridge=yes,bridge:name:en=Rainbow
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N3001 | bridge | yes | 'name': 'GoldenGate' |
+ | N3002 | bridge | yes | 'name:en': 'Rainbow' |
+
+
+ Scenario: Address tags
+ When loading osm data
+ """
+ n4001 Taddr:housenumber=34,addr:city=Esmarald,addr:county=Land
+ n4002 Taddr:streetnumber=10,is_in:city=Rootoo,is_in=Gold
+ """
+ Then place contains exactly
+ | object | class | address |
+ | N4001 | place | 'housenumber': '34', 'city': 'Esmarald', 'county': 'Land' |
+ | N4002 | place | 'streetnumber': '10', 'city': 'Rootoo' |
+
+
+ Scenario: Country codes
+ When loading osm data
+ """
+ n5001 Tshop=yes,country_code=DE
+ n5002 Tshop=yes,country_code=toolong
+ n5003 Tshop=yes,country_code=x
+ n5004 Tshop=yes,addr:country=us
+ n5005 Tshop=yes,country=be
+ n5006 Tshop=yes,addr:country=France
+ """
+ Then place contains exactly
+ | object | class | address |
+ | N5001 | shop | 'country': 'DE' |
+ | N5002 | shop | - |
+ | N5003 | shop | - |
+ | N5004 | shop | 'country': 'us' |
+ | N5005 | shop | - |
+ | N5006 | shop | - |
+
+
+ Scenario: Postcodes
+ When loading osm data
+ """
+ n6001 Tshop=bank,addr:postcode=12345
+ n6002 Tshop=bank,tiger:zip_left=34343
+ n6003 Tshop=bank,is_in:postcode=9009
+ """
+ Then place contains exactly
+ | object | class | address |
+ | N6001 | shop | 'postcode': '12345' |
+ | N6002 | shop | 'postcode': '34343' |
+ | N6003 | shop | - |
+
+
+ Scenario: Postcode areas
+ When loading osm data
+ """
+ n1 x12.36853 y51.50618
+ n2 x12.36853 y51.42362
+ n3 x12.63666 y51.42362
+ n4 x12.63666 y51.50618
+ w1 Tboundary=postal_code,ref=3456 Nn1,n2,n3,n4,n1
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | W1 | boundary | postal_code | 'ref': '3456' |
+
+ Scenario: Main with extra
+ When loading osm data
+ """
+ n7001 Thighway=primary,bridge=yes,name=1
+ n7002 Thighway=primary,bridge=yes,bridge:name=1
+ """
+ Then place contains exactly
+ | object | class | type | name | extratags+bridge:name |
+ | N7001 | highway | primary | 'name': '1' | - |
+ | N7002:highway | highway | primary | - | 1 |
+ | N7002:bridge | bridge | yes | 'name': '1' | 1 |
+
+
+ Scenario: Global fallback and skipping
+ When loading osm data
+ """
+ n8001 Tshop=shoes,note:de=Nein,xx=yy
+ n8002 Tshop=shoes,natural=no,ele=234
+ n8003 Tshop=shoes,name:source=survey
+ """
+ Then place contains exactly
+ | object | class | name | extratags |
+ | N8001 | shop | - | 'xx': 'yy' |
+ | N8002 | shop | - | 'ele': '234' |
+ | N8003 | shop | - | - |
+
+
+ Scenario: Admin levels
+ When loading osm data
+ """
+ n9001 Tplace=city
+ n9002 Tplace=city,admin_level=16
+ n9003 Tplace=city,admin_level=x
+ n9004 Tplace=city,admin_level=1
+ n9005 Tplace=city,admin_level=0
+ n9006 Tplace=city,admin_level=2.5
+ """
+ Then place contains exactly
+ | object | class | admin_level |
+ | N9001 | place | 15 |
+ | N9002 | place | 15 |
+ | N9003 | place | 15 |
+ | N9004 | place | 1 |
+ | N9005 | place | 15 |
+ | N9006 | place | 15 |
+
+
+ Scenario: Administrative boundaries with place tags
+ When loading osm data
+ """
+ n10001 Tboundary=administrative,place=city,name=A
+ n10002 Tboundary=natural,place=city,name=B
+ n10003 Tboundary=administrative,place=island,name=C
+ """
+ Then place contains
+ | object | class | type | extratags |
+ | N10001 | boundary | administrative | 'place': 'city' |
+ And place contains
+ | object | class | type |
+ | N10002:boundary | boundary | natural |
+ | N10002:place | place | city |
+ | N10003:boundary | boundary | administrative |
+ | N10003:place | place | island |
+
+
+ Scenario: Building fallbacks
+ When loading osm data
+ """
+ n12001 Ttourism=hotel,building=yes
+ n12002 Tbuilding=house
+ n12003 Tbuilding=shed,addr:housenumber=1
+ n12004 Tbuilding=yes,name=Das-Haus
+ n12005 Tbuilding=yes,addr:postcode=12345
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N12001 | tourism | hotel |
+ | N12003 | building | shed |
+ | N12004 | building | yes |
+ | N12005 | place | postcode |
+
+
+ Scenario: Address interpolations
+ When loading osm data
+ """
+ n13001 Taddr:interpolation=odd
+ n13002 Taddr:interpolation=even,place=city
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N13001 | place | houses | 'interpolation': 'odd' |
+ | N13002 | place | houses | 'interpolation': 'even' |
+
+
+ Scenario: Footways
+ When loading osm data
+ """
+ n1 x0.0 y0.0
+ n2 x0 y0.0001
+ w1 Thighway=footway Nn1,n2
+ w2 Thighway=footway,name=Road Nn1,n2
+ w3 Thighway=footway,name=Road,footway=sidewalk Nn1,n2
+ w4 Thighway=footway,name=Road,footway=crossing Nn1,n2
+ w5 Thighway=footway,name=Road,footway=residential Nn1,n2
+ """
+ Then place contains exactly
+ | object | name+name |
+ | W2 | Road |
+ | W5 | Road |
+
+
+ Scenario: Tourism information
+ When loading osm data
+ """
+ n100 Ttourism=information
+ n101 Ttourism=information,name=Generic
+ n102 Ttourism=information,information=guidepost
+ n103 Thighway=information,information=house
+ n104 Ttourism=information,information=yes,name=Something
+ n105 Ttourism=information,information=route_marker,name=3
+ """
+ Then place contains exactly
+ | object | type |
+ | N100:tourism | information |
+ | N101:tourism | information |
+ | N102:information | guidepost |
+ | N103:highway | information |
+ | N104:tourism | information |
+
+
+ Scenario: Water features
+ When loading osm data
+ """
+ n20 Tnatural=water
+ n21 Tnatural=water,name=SomePond
+ n22 Tnatural=water,water=pond
+ n23 Tnatural=water,water=pond,name=Pond
+ n24 Tnatural=water,water=river,name=BigRiver
+ n25 Tnatural=water,water=yes
+ n26 Tnatural=water,water=yes,name=Random
+ """
+ Then place contains exactly
+ | object | type |
+ | N21:natural | water |
+ | N23:water | pond |
+ | N26:natural | water |
+
+ Scenario: Drop name for address fallback
+ When loading osm data
+ """
+ n1 Taddr:housenumber=23,name=Foo
+ n2 Taddr:housenumber=23,addr:housename=Foo
+ n3 Taddr:housenumber=23
+ """
+ Then place contains exactly
+ | object | type | address | name |
+ | N1:place | house | 'housenumber': '23' | - |
+ | N2:place | house | 'housenumber': '23' | 'addr:housename': 'Foo' |
+ | N3:place | house | 'housenumber': '23' | - |
+
+
+ Scenario: Waterway locks
+ When loading osm data
+ """
+ n1 Twaterway=river,lock=yes
+ n2 Twaterway=river,lock=yes,lock_name=LeLock
+ n3 Twaterway=river,lock=yes,name=LeWater
+ n4 Tamenity=parking,lock=yes,lock_name=Gold
+ """
+ Then place contains exactly
+ | object | type | name |
+ | N2:lock | yes | 'name': 'LeLock' |
+ | N3:waterway | river | 'name': 'LeWater' |
+ | N4:amenity | parking | - |
--- /dev/null
+@DB
+Feature: Updates of address interpolation objects
+ Test that changes to address interpolation objects are correctly
+ propagated.
+
+ Background:
+ Given the grid
+ | 1 | 2 |
+
+
+ Scenario: Adding a new interpolation
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w33 Thighway=residential,name=Tao Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+
+ When updating osm data
+ """
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W33:highway | residential |
+ Then location_property_osmline contains exactly
+ | object |
+ | 99:5 |
+
+
+ Scenario: Delete an existing interpolation
+ When loading osm data
+ """
+ n1 Taddr:housenumber=2
+ n2 Taddr:housenumber=7
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+
+ When updating osm data
+ """
+ w99 v2 dD
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ Then location_property_osmline contains exactly
+ | object | indexed_status |
+
+
+ Scenario: Changing an object to an interpolation
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w33 Thighway=residential Nn1,n2
+ w99 Thighway=residential Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:highway | residential |
+
+ When updating osm data
+ """
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W33:highway | residential |
+ And location_property_osmline contains exactly
+ | object |
+ | 99:5 |
+
+
+ Scenario: Changing an interpolation to something else
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+
+ When updating osm data
+ """
+ w99 Thighway=residential Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:highway | residential |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:highway | residential |
+ And location_property_osmline contains exactly
+ | object |
+
--- /dev/null
+@DB
+Feature: Update of postcode only objects
+ Tests that changes to objects containing only a postcode are
+ propagated correctly.
+
+
+ Scenario: Adding a postcode-only node
+ When loading osm data
+ """
+ """
+ Then place contains exactly
+ | object |
+
+ When updating osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario: Deleting a postcode-only node
+ When loading osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+
+ When updating osm data
+ """
+ n34 v2 dD
+ """
+ Then place contains exactly
+ | object |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario Outline: Converting a regular object into a postcode-only node
+ When loading osm data
+ """
+ n34 T<class>=<type>
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:<class> | <type> |
+
+ When updating osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+ Examples:
+ | class | type |
+ | amenity | restaurant |
+ | place | hamlet |
+
+
+ Scenario Outline: Converting a postcode-only node into a regular object
+ When loading osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+
+ When updating osm data
+ """
+ n34 T<class>=<type>
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:<class> | <type> |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N34:<class> | <type> |
+
+ Examples:
+ | class | type |
+ | amenity | restaurant |
+ | place | hamlet |
+
+
+ Scenario: Converting na interpolation into a postcode-only node
+ Given the grid
+ | 1 | 2 |
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w34 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W34:place | houses |
+
+ When updating osm data
+ """
+ w34 Tpostcode=4456 Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W34:place | postcode |
+ When indexing
+ Then location_property_osmline contains exactly
+ | object |
+ And placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+
+
+ Scenario: Converting a postcode-only node into an interpolation
+ Given the grid
+ | 1 | 2 |
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w33 Thighway=residential Nn1,n2
+ w34 Tpostcode=4456 Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W33:highway | residential |
+ | W34:place | postcode |
+
+ When updating osm data
+ """
+ w34 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W33:highway | residential |
+ | W34:place | houses |
+ When indexing
+ Then location_property_osmline contains exactly
+ | object |
+ | 34:5 |
+ And placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W33:highway | residential |
Feature: Update of simple objects by osm2pgsql
Testing basic update functions of osm2pgsql.
- Scenario: Import object with two main tags
+ Scenario: Adding a new object
When loading osm data
"""
- n1 Ttourism=hotel,amenity=restaurant,name=foo
- n2 Tplace=locality,name=spotty
+ n1 Tplace=town,name=Middletown
"""
- Then place contains
- | object | type | name+name |
- | N1:tourism | hotel | foo |
- | N1:amenity | restaurant | foo |
- | N2:place | locality | spotty |
- When updating osm data
- """
- n1 dV Ttourism=hotel,name=foo
- n2 dD
- """
- Then place has no entry for N1:amenity
- And place has no entry for N2
- And place contains
- | object | class | type | name |
- | N1:tourism | tourism | hotel | 'name' : 'foo' |
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+
+ When updating osm data
+ """
+ n2 Tamenity=hotel,name=Posthotel
+ """
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+ | N2:amenity | hotel | Posthotel |
+ And placex contains exactly
+ | object | type | name+name | indexed_status |
+ | N1:place | town | Middletown | 0 |
+ | N2:amenity | hotel | Posthotel | 1 |
- Scenario: Downgrading a highway to one that is dropped without name
- When loading osm data
- """
- n100 x0 y0
- n101 x0.0001 y0.0001
- w1 Thighway=residential Nn100,n101
- """
- Then place contains
- | object |
- | W1:highway |
- When updating osm data
- """
- w1 Thighway=service Nn100,n101
- """
- Then place has no entry for W1
- Scenario: Downgrading a highway when a second tag is present
+ Scenario: Deleting an existing object
When loading osm data
"""
- n100 x0 y0
- n101 x0.0001 y0.0001
- w1 Thighway=residential,tourism=hotel Nn100,n101
- """
- Then place contains
- | object |
- | W1:highway |
- | W1:tourism |
- When updating osm data
+ n1 Tplace=town,name=Middletown
+ n2 Tamenity=hotel,name=Posthotel
"""
- w1 Thighway=service,tourism=hotel Nn100,n101
- """
- Then place has no entry for W1:highway
- And place contains
- | object |
- | W1:tourism |
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+ | N2:amenity | hotel | Posthotel |
+
+ When updating osm data
+ """
+ n2 dD
+ """
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+ And placex contains exactly
+ | object | type | name+name | indexed_status |
+ | N1:place | town | Middletown | 0 |
+ | N2:amenity | hotel | Posthotel | 100 |
--- /dev/null
+@DB
+Feature: Tag evaluation
+ Tests if tags are correctly updated in the place table
+
+ Background:
+ Given the grid
+ | 1 | 2 | 3 |
+ | 10 | 11 | |
+ | 45 | 46 | |
+
+ Scenario: Main tag deleted
+ When loading osm data
+ """
+ n1 Tamenity=restaurant
+ n2 Thighway=bus_stop,railway=stop,name=X
+ n3 Tamenity=prison
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N1 | amenity | restaurant |
+ | N2:highway | highway | bus_stop |
+ | N2:railway | railway | stop |
+ | N3 | amenity | prison |
+
+ When updating osm data
+ """
+ n1 Tnot_a=restaurant
+ n2 Thighway=bus_stop,name=X
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N2:highway | highway | bus_stop |
+ | N3 | amenity | prison |
+ And placex contains
+ | object | indexed_status |
+ | N3:amenity | 0 |
+ When indexing
+ Then placex contains exactly
+ | object | type | name |
+ | N2:highway | bus_stop | 'name': 'X' |
+ | N3:amenity | prison | - |
+
+
+ Scenario: Main tag added
+ When loading osm data
+ """
+ n1 Tatity=restaurant
+ n2 Thighway=bus_stop,name=X
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N2:highway | highway | bus_stop |
+
+ When updating osm data
+ """
+ n1 Tamenity=restaurant
+ n2 Thighway=bus_stop,railway=stop,name=X
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N1 | amenity | restaurant |
+ | N2:highway | highway | bus_stop |
+ | N2:railway | railway | stop |
+ When indexing
+ Then placex contains exactly
+ | object | type | name |
+ | N1:amenity | restaurant | - |
+ | N2:highway | bus_stop | 'name': 'X' |
+ | N2:railway | stop | 'name': 'X' |
+
+
+ Scenario: Main tag modified
+ When loading osm data
+ """
+ n10 Thighway=footway,name=X
+ n11 Tamenity=atm
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N10 | highway | footway |
+ | N11 | amenity | atm |
+
+ When updating osm data
+ """
+ n10 Thighway=path,name=X
+ n11 Thighway=primary
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N10 | highway | path |
+ | N11 | highway | primary |
+ When indexing
+ Then placex contains exactly
+ | object | type | name |
+ | N10:highway | path | 'name': 'X' |
+ | N11:highway | primary | - |
+
+
+ Scenario: Main tags with name, name added
+ When loading osm data
+ """
+ n45 Tlanduse=cemetry
+ n46 Tbuilding=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+
+ When updating osm data
+ """
+ n45 Tlanduse=cemetry,name=TODO
+ n46 Tbuilding=yes,addr:housenumber=1
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N45 | landuse | cemetry |
+ | N46 | building| yes |
+ When indexing
+ Then placex contains exactly
+ | object | type | name | address |
+ | N45:landuse | cemetry | 'name': 'TODO' | - |
+ | N46:building| yes | - | 'housenumber': '1' |
+
+
+ Scenario: Main tags with name, name removed
+ When loading osm data
+ """
+ n45 Tlanduse=cemetry,name=TODO
+ n46 Tbuilding=yes,addr:housenumber=1
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N45 | landuse | cemetry |
+ | N46 | building| yes |
+
+ When updating osm data
+ """
+ n45 Tlanduse=cemetry
+ n46 Tbuilding=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+ Scenario: Main tags with name, name modified
+ When loading osm data
+ """
+ n45 Tlanduse=cemetry,name=TODO
+ n46 Tbuilding=yes,addr:housenumber=1
+ """
+ Then place contains exactly
+ | object | class | type | name | address |
+ | N45 | landuse | cemetry | 'name' : 'TODO' | - |
+ | N46 | building| yes | - | 'housenumber': '1'|
+
+ When updating osm data
+ """
+ n45 Tlanduse=cemetry,name=DONE
+ n46 Tbuilding=yes,addr:housenumber=10
+ """
+ Then place contains exactly
+ | object | class | type | name | address |
+ | N45 | landuse | cemetry | 'name' : 'DONE' | - |
+ | N46 | building| yes | - | 'housenumber': '10'|
+ When indexing
+ Then placex contains exactly
+ | object | class | type | name | address |
+ | N45 | landuse | cemetry | 'name' : 'DONE' | - |
+ | N46 | building| yes | - | 'housenumber': '10'|
+
+
+ Scenario: Main tag added to address only node
+ When loading osm data
+ """
+ n1 Taddr:housenumber=345
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | place | house | 'housenumber': '345'|
+
+ When updating osm data
+ """
+ n1 Taddr:housenumber=345,building=yes
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | building | yes | 'housenumber': '345'|
+ When indexing
+ Then placex contains exactly
+ | object | class | type | address |
+ | N1 | building | yes | 'housenumber': '345'|
+
+
+ Scenario: Main tag removed from address only node
+ When loading osm data
+ """
+ n1 Taddr:housenumber=345,building=yes
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | building | yes | 'housenumber': '345'|
+
+ When updating osm data
+ """
+ n1 Taddr:housenumber=345
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | place | house | 'housenumber': '345'|
+ When indexing
+ Then placex contains exactly
+ | object | class | type | address |
+ | N1 | place | house | 'housenumber': '345'|
+
+
+ Scenario: Main tags with name key, adding key name
+ When loading osm data
+ """
+ n2 Tbridge=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+
+ When updating osm data
+ """
+ n2 Tbridge=yes,bridge:name=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+ When indexing
+ Then placex contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+
+
+ Scenario: Main tags with name key, deleting key name
+ When loading osm data
+ """
+ n2 Tbridge=yes,bridge:name=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+
+ When updating osm data
+ """
+ n2 Tbridge=yes
+ """
+ Then place contains exactly
+ | object |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario: Main tags with name key, changing key name
+ When loading osm data
+ """
+ n2 Tbridge=yes,bridge:name=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+
+ When updating osm data
+ """
+ n2 Tbridge=yes,bridge:name:en=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name:en': 'high' |
+ When indexing
+ Then placex contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name:en': 'high' |
+
+
+ Scenario: Downgrading a highway to one that is dropped without name
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=residential Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+ | W1:highway |
+
+ When updating osm data
+ """
+ w1 Thighway=service Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario: Upgrading a highway to one that is not dropped without name
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=service Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+
+ When updating osm data
+ """
+ w1 Thighway=unclassified Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+ | W1:highway |
+ When indexing
+ Then placex contains exactly
+ | object |
+ | W1:highway |
+
+
+ Scenario: Downgrading a highway when a second tag is present
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=residential,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:highway | residential |
+ | W1:tourism | hotel |
+
+ When updating osm data
+ """
+ w1 Thighway=service,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:tourism | hotel |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | W1:tourism | hotel |
+
+
+ Scenario: Upgrading a highway when a second tag is present
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=service,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:tourism | hotel |
+
+ When updating osm data
+ """
+ w1 Thighway=residential,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:highway | residential |
+ | W1:tourism | hotel |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | W1:highway | residential |
+ | W1:tourism | hotel |
+
+
+ Scenario: Replay on administrative boundary
+ When loading osm data
+ """
+ n10 x34.0 y-4.23
+ n11 x34.1 y-4.23
+ n12 x34.2 y-4.13
+ w10 Tboundary=administrative,waterway=river,name=Border,admin_level=2 Nn12,n11,n10
+ """
+ Then place contains exactly
+ | object | type | admin_level | name |
+ | W10:waterway | river | 2 | 'name': 'Border' |
+ | W10:boundary | administrative | 2 | 'name': 'Border' |
+
+ When updating osm data
+ """
+ w10 Tboundary=administrative,waterway=river,name=Border,admin_level=2 Nn12,n11,n10
+ """
+ Then place contains exactly
+ | object | type | admin_level | name |
+ | W10:waterway | river | 2 | 'name': 'Border' |
+ | W10:boundary | administrative | 2 | 'name': 'Border' |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level | name |
+ | W10:waterway | river | 2 | 'name': 'Border' |
+
+
+ Scenario: Change admin_level on administrative boundary
+ Given the grid
+ | 10 | 11 |
+ | 13 | 12 |
+ When loading osm data
+ """
+ n10
+ n11
+ n12
+ n13
+ w10 Nn10,n11,n12,n13,n10
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=2 Mw10@
+ """
+ Then place contains exactly
+ | object | admin_level |
+ | R10:boundary | 2 |
+
+ When updating osm data
+ """
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+
+
+ Scenario: Change boundary to administrative
+ Given the grid
+ | 10 | 11 |
+ | 13 | 12 |
+ When loading osm data
+ """
+ n10
+ n11
+ n12
+ n13
+ w10 Nn10,n11,n12,n13,n10
+ r10 Ttype=multipolygon,boundary=informal,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | informal | 4 |
+
+ When updating osm data
+ """
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+
+
+ Scenario: Change boundary away from administrative
+ Given the grid
+ | 10 | 11 |
+ | 13 | 12 |
+ When loading osm data
+ """
+ n10
+ n11
+ n12
+ n13
+ w10 Nn10,n11,n12,n13,n10
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+
+ When updating osm data
+ """
+ r10 Ttype=multipolygon,boundary=informal,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | informal | 4 |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level |
+ | R10:boundary | informal | 4 |
+
+
+ Scenario: Main tag and geometry is changed
+ When loading osm data
+ """
+ n1 x40 y40
+ n2 x40.0001 y40
+ n3 x40.0001 y40.0001
+ n4 x40 y40.0001
+ w5 Tbuilding=house,name=Foo Nn1,n2,n3,n4,n1
+ """
+ Then place contains exactly
+ | object | type |
+ | W5:building | house |
+
+ When updating osm data
+ """
+ n1 x39.999 y40
+ w5 Tbuilding=terrace,name=Bar Nn1,n2,n3,n4,n1
+ """
+ Then place contains exactly
+ | object | type |
+ | W5:building | terrace |
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-require_once 'SebastianBergmann/CodeCoverage/autoload.php';
-
-
-function coverage_shutdown($oCoverage)
-{
- $oCoverage->stop();
- $writer = new \SebastianBergmann\CodeCoverage\Report\PHP;
- $writer->process($oCoverage, $_SERVER['PHP_CODE_COVERAGE_FILE']);
-}
-
-$covfilter = new SebastianBergmann\CodeCoverage\Filter();
-if (method_exists($covfilter, 'addDirectoryToWhitelist')) {
- // pre PHPUnit 9
- $covfilter->addDirectoryToWhitelist($_SERVER['COV_PHP_DIR'].'/lib-php');
- $covfilter->addDirectoryToWhitelist($_SERVER['COV_PHP_DIR'].'/website');
- $coverage = new SebastianBergmann\CodeCoverage\CodeCoverage(null, $covfilter);
-} else {
- // since PHP Uit 9
- $covfilter->includeDirectory($_SERVER['COV_PHP_DIR'].'/lib-php');
- $covfilter->includeDirectory($_SERVER['COV_PHP_DIR'].'/website');
- $coverage = new SebastianBergmann\CodeCoverage\CodeCoverage(
- (new SebastianBergmann\CodeCoverage\Driver\Selector)->forLineCoverage($covfilter),
- $covfilter
- );
-}
-
-$coverage->start($_SERVER['COV_TEST_NAME']);
-
-register_shutdown_function('coverage_shutdown', $coverage);
-
-include $_SERVER['COV_SCRIPT_FILENAME'];
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2023 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Collection of assertion functions used for the steps.
"""
+import json
+import math
+import re
class Almost:
""" Compares a float value with a certain jitter.
def __eq__(self, other):
return abs(other - self.value) < self.offset
+
+OSM_TYPE = {'N' : 'node', 'W' : 'way', 'R' : 'relation',
+ 'n' : 'node', 'w' : 'way', 'r' : 'relation',
+ 'node' : 'n', 'way' : 'w', 'relation' : 'r'}
+
+
+class OsmType:
+ """ Compares an OSM type, accepting both N/R/W and node/way/relation.
+ """
+
+ def __init__(self, value):
+ self.value = value
+
+
+ def __eq__(self, other):
+ return other == self.value or other == OSM_TYPE[self.value]
+
+
+ def __str__(self):
+ return f"{self.value} or {OSM_TYPE[self.value]}"
+
+
+class Field:
+ """ Generic comparator for fields, which looks at the type of the
+ value compared.
+ """
+ def __init__(self, value, **extra_args):
+ self.value = value
+ self.extra_args = extra_args
+
+ def __eq__(self, other):
+ if isinstance(self.value, float):
+ return math.isclose(self.value, float(other), **self.extra_args)
+
+ if self.value.startswith('^'):
+ return re.fullmatch(self.value, str(other))
+
+ if isinstance(other, dict):
+ return other == eval('{' + self.value + '}')
+
+ return str(self.value) == str(other)
+
+ def __str__(self):
+ return str(self.value)
+
+
class Bbox:
""" Comparator for bounding boxes.
"""
def __str__(self):
return str(self.coord)
+
+
+
+def check_for_attributes(obj, attrs, presence='present'):
+ """ Check that the object has the given attributes. 'attrs' is a
+ string with a comma-separated list of attributes. If 'presence'
+ is set to 'absent' then the function checks that the attributes do
+ not exist for the object
+ """
+ def _dump_json():
+ return json.dumps(obj, sort_keys=True, indent=2, ensure_ascii=False)
+
+ for attr in attrs.split(','):
+ attr = attr.strip()
+ if presence == 'absent':
+ assert attr not in obj, \
+ f"Unexpected attribute {attr}. Full response:\n{_dump_json()}"
+ else:
+ assert attr in obj, \
+ f"No attribute '{attr}'. Full response:\n{_dump_json()}"
+
The function understands the following formats:
country:<country code>
- Point geoemtry guaranteed to be in the given country
+ Point geometry guaranteed to be in the given country
<P>
Point geometry
<P>,...,<P>
def mk_wkt_point(self, point):
""" Parse a point description.
- The point may either consist of 'x y' cooordinates or a number
+ The point may either consist of 'x y' coordinates or a number
that refers to a grid setup.
"""
geom = point.strip()
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2023 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Classes wrapping HTTP responses from the Nominatim API.
"""
-from collections import OrderedDict
import re
import json
import xml.etree.ElementTree as ET
-from check_functions import Almost
-
-OSM_TYPE = {'N' : 'node', 'W' : 'way', 'R' : 'relation',
- 'n' : 'node', 'w' : 'way', 'r' : 'relation',
- 'node' : 'n', 'way' : 'w', 'relation' : 'r'}
-
-def _geojson_result_to_json_result(geojson_result):
- result = geojson_result['properties']
- result['geojson'] = geojson_result['geometry']
- if 'bbox' in geojson_result:
- # bbox is minlon, minlat, maxlon, maxlat
- # boundingbox is minlat, maxlat, minlon, maxlon
- result['boundingbox'] = [geojson_result['bbox'][1],
- geojson_result['bbox'][3],
- geojson_result['bbox'][0],
- geojson_result['bbox'][2]]
- return result
-
-class BadRowValueAssert:
- """ Lazily formatted message for failures to find a field content.
- """
-
- def __init__(self, response, idx, field, value):
- self.idx = idx
- self.field = field
- self.value = value
- self.row = response.result[idx]
-
- def __str__(self):
- return "\nBad value for row {} field '{}'. Expected: {}, got: {}.\nFull row: {}"""\
- .format(self.idx, self.field, self.value,
- self.row[self.field], json.dumps(self.row, indent=4))
+from check_functions import Almost, OsmType, Field, check_for_attributes
class GenericResponse:
else:
code = m.group(2)
self.header['json_func'] = m.group(1)
- self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(code)
- if isinstance(self.result, OrderedDict):
+ self.result = json.JSONDecoder().decode(code)
+ if isinstance(self.result, dict):
if 'error' in self.result:
self.result = []
else:
self.result = [self.result]
+
def _parse_geojson(self):
self._parse_json()
if self.result:
- self.result = list(map(_geojson_result_to_json_result, self.result[0]['features']))
+ geojson = self.result[0]
+ # check for valid geojson
+ check_for_attributes(geojson, 'type,features')
+ assert geojson['type'] == 'FeatureCollection'
+ assert isinstance(geojson['features'], list)
+
+ self.result = []
+ for result in geojson['features']:
+ check_for_attributes(result, 'type,properties,geometry')
+ assert result['type'] == 'Feature'
+ new = result['properties']
+ check_for_attributes(new, 'geojson', 'absent')
+ new['geojson'] = result['geometry']
+ if 'bbox' in result:
+ check_for_attributes(new, 'boundingbox', 'absent')
+ # bbox is minlon, minlat, maxlon, maxlat
+ # boundingbox is minlat, maxlat, minlon, maxlon
+ new['boundingbox'] = [result['bbox'][1],
+ result['bbox'][3],
+ result['bbox'][0],
+ result['bbox'][2]]
+ for k, v in geojson.items():
+ if k not in ('type', 'features'):
+ check_for_attributes(new, '__' + k, 'absent')
+ new['__' + k] = v
+ self.result.append(new)
+
def _parse_geocodejson(self):
self._parse_geojson()
- if self.result is not None:
- self.result = [r['geocoding'] for r in self.result]
+ if self.result:
+ for r in self.result:
+ assert set(r.keys()) == {'geocoding', 'geojson', '__geocoding'}, \
+ f"Unexpected keys in result: {r.keys()}"
+ check_for_attributes(r['geocoding'], 'geojson', 'absent')
+ inner = r.pop('geocoding')
+ r.update(inner)
- def assert_field(self, idx, field, value):
- """ Check that result row `idx` has a field `field` with value `value`.
- Float numbers are matched approximately. When the expected value
- starts with a carat, regular expression matching is used.
- """
- assert field in self.result[idx], \
- "Result row {} has no field '{}'.\nFull row: {}"\
- .format(idx, field, json.dumps(self.result[idx], indent=4))
-
- if isinstance(value, float):
- assert Almost(value) == float(self.result[idx][field]), \
- BadRowValueAssert(self, idx, field, value)
- elif value.startswith("^"):
- assert re.fullmatch(value, self.result[idx][field]), \
- BadRowValueAssert(self, idx, field, value)
- elif isinstance(self.result[idx][field], OrderedDict):
- assert self.result[idx][field] == eval('{' + value + '}'), \
- BadRowValueAssert(self, idx, field, value)
- else:
- assert str(self.result[idx][field]) == str(value), \
- BadRowValueAssert(self, idx, field, value)
def assert_address_field(self, idx, field, value):
""" Check that result rows`idx` has a field `field` with value `value`
todo = [int(idx)]
for idx in todo:
- assert 'address' in self.result[idx], \
- "Result row {} has no field 'address'.\nFull row: {}"\
- .format(idx, json.dumps(self.result[idx], indent=4))
+ self.check_row(idx, 'address' in self.result[idx], "No field 'address'")
address = self.result[idx]['address']
- assert field in address, \
- "Result row {} has no field '{}' in address.\nFull address: {}"\
- .format(idx, field, json.dumps(address, indent=4))
+ self.check_row_field(idx, field, value, base=address)
- assert address[field] == value, \
- "\nBad value for row {} field '{}' in address. Expected: {}, got: {}.\nFull address: {}"""\
- .format(idx, field, value, address[field], json.dumps(address, indent=4))
- def match_row(self, row, context=None):
+ def match_row(self, row, context=None, field=None):
""" Match the result fields against the given behave table row.
"""
if 'ID' in row.headings:
todo = range(len(self.result))
for i in todo:
+ subdict = self.result[i]
+ if field is not None:
+ for key in field.split('.'):
+ self.check_row(i, key in subdict, f"Missing subfield {key}")
+ subdict = subdict[key]
+ self.check_row(i, isinstance(subdict, dict),
+ f"Subfield {key} not a dict")
+
for name, value in zip(row.headings, row.cells):
if name == 'ID':
pass
elif name == 'osm':
- assert 'osm_type' in self.result[i], \
- "Result row {} has no field 'osm_type'.\nFull row: {}"\
- .format(i, json.dumps(self.result[i], indent=4))
- assert self.result[i]['osm_type'] in (OSM_TYPE[value[0]], value[0]), \
- BadRowValueAssert(self, i, 'osm_type', value)
- self.assert_field(i, 'osm_id', value[1:])
- elif name == 'osm_type':
- assert self.result[i]['osm_type'] in (OSM_TYPE[value[0]], value[0]), \
- BadRowValueAssert(self, i, 'osm_type', value)
+ self.check_row_field(i, 'osm_type', OsmType(value[0]), base=subdict)
+ self.check_row_field(i, 'osm_id', Field(value[1:]), base=subdict)
elif name == 'centroid':
if ' ' in value:
lon, lat = value.split(' ')
lon, lat = context.osm.grid_node(int(value))
else:
raise RuntimeError("Context needed when using grid coordinates")
- self.assert_field(i, 'lat', float(lat))
- self.assert_field(i, 'lon', float(lon))
+ self.check_row_field(i, 'lat', Field(float(lat), abs_tol=1e-07), base=subdict)
+ self.check_row_field(i, 'lon', Field(float(lon), abs_tol=1e-07), base=subdict)
else:
- self.assert_field(i, name, value)
+ self.check_row_field(i, name, Field(value), base=subdict)
+
+
+ def check_row(self, idx, check, msg):
+ """ Assert for the condition 'check' and print 'msg' on fail together
+ with the contents of the failing result.
+ """
+ class _RowError:
+ def __init__(self, row):
+ self.row = row
+
+ def __str__(self):
+ return f"{msg}. Full row {idx}:\n" \
+ + json.dumps(self.row, indent=4, ensure_ascii=False)
+
+ assert check, _RowError(self.result[idx])
+
+
+ def check_row_field(self, idx, field, expected, base=None):
+ """ Check field 'field' of result 'idx' for the expected value
+ and print a meaningful error if the condition fails.
+ When 'base' is set to a dictionary, then the field is checked
+ in that base. The error message will still report the contents
+ of the full result.
+ """
+ if base is None:
+ base = self.result[idx]
+
+ self.check_row(idx, field in base, f"No field '{field}'")
+ value = base[field]
+
+ self.check_row(idx, expected == value,
+ f"\nBad value for field '{field}'. Expected: {expected}, got: {value}")
- def property_list(self, prop):
- return [x[prop] for x in self.result]
class SearchResponse(GenericResponse):
if child.tag == 'result':
assert not self.result, "More than one result in reverse result"
self.result.append(dict(child.attrib))
+ check_for_attributes(self.result[0], 'display_name', 'absent')
+ self.result[0]['display_name'] = child.text
elif child.tag == 'addressparts':
+ assert 'address' not in self.result[0], "More than one address in result"
address = {}
for sub in child:
+ assert len(sub) == 0, f"Address element '{sub.tag}' has subelements"
address[sub.tag] = sub.text
self.result[0]['address'] = address
elif child.tag == 'extratags':
+ assert 'extratags' not in self.result[0], "More than one extratags in result"
self.result[0]['extratags'] = {}
for tag in child:
+ assert len(tag) == 0, f"Extratags element '{tag.attrib['key']}' has subelements"
self.result[0]['extratags'][tag.attrib['key']] = tag.attrib['value']
elif child.tag == 'namedetails':
+ assert 'namedetails' not in self.result[0], "More than one namedetails in result"
self.result[0]['namedetails'] = {}
for tag in child:
+ assert len(tag) == 0, f"Namedetails element '{tag.attrib['desc']}' has subelements"
self.result[0]['namedetails'][tag.attrib['desc']] = tag.text
elif child.tag == 'geokml':
- self.result[0][child.tag] = True
+ assert 'geokml' not in self.result[0], "More than one geokml in result"
+ self.result[0]['geokml'] = ET.tostring(child, encoding='unicode')
else:
assert child.tag == 'error', \
- "Unknown XML tag {} on page: {}".format(child.tag, self.page)
+ f"Unknown XML tag {child.tag} on page: {self.page}"
class StatusResponse(GenericResponse):
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
from pathlib import Path
-import sys
+import importlib
import tempfile
-import psycopg2
-import psycopg2.extras
+import psycopg
+from psycopg import sql as pysql
-sys.path.insert(1, str((Path(__file__) / '..' / '..' / '..' / '..').resolve()))
-
-from nominatim import cli
-from nominatim.config import Configuration
-from nominatim.db.connection import Connection
-from nominatim.tools import refresh
-from nominatim.tokenizer import factory as tokenizer_factory
+from nominatim_db import cli
+from nominatim_db.config import Configuration
+from nominatim_db.db.connection import Connection, register_hstore, execute_scalar
+from nominatim_db.tools import refresh
+from nominatim_db.tokenizer import factory as tokenizer_factory
from steps.utils import run_script
class NominatimEnvironment:
"""
def __init__(self, config):
- self.build_dir = Path(config['BUILDDIR']).resolve()
self.src_dir = (Path(__file__) / '..' / '..' / '..' / '..').resolve()
self.db_host = config['DB_HOST']
self.db_port = config['DB_PORT']
self.api_test_db = config['API_TEST_DB']
self.api_test_file = config['API_TEST_FILE']
self.tokenizer = config['TOKENIZER']
- self.server_module_path = config['SERVER_MODULE_PATH']
+ self.import_style = config['STYLE']
self.reuse_template = not config['REMOVE_TEMPLATE']
self.keep_scenario_db = config['KEEP_TEST_DB']
- self.code_coverage_path = config['PHPCOV']
- self.code_coverage_id = 1
- self.default_config = Configuration(None, self.src_dir / 'settings').get_os_env()
+ self.default_config = Configuration(None).get_os_env()
self.test_env = None
self.template_db_done = False
self.api_db_done = False
self.website_dir = None
+ if not hasattr(self, f"create_api_request_func_{config['API_ENGINE']}"):
+ raise RuntimeError(f"Unknown API engine '{config['API_ENGINE']}'")
+ self.api_engine = getattr(self, f"create_api_request_func_{config['API_ENGINE']}")()
+
def connect_database(self, dbname):
""" Return a connection to the database with the given name.
Uses configured host, user and port.
"""
- dbargs = {'database': dbname}
+ dbargs = {'dbname': dbname, 'row_factory': psycopg.rows.dict_row}
if self.db_host:
dbargs['host'] = self.db_host
if self.db_port:
dbargs['user'] = self.db_user
if self.db_pass:
dbargs['password'] = self.db_pass
- conn = psycopg2.connect(connection_factory=Connection, **dbargs)
- return conn
-
- def next_code_coverage_file(self):
- """ Generate the next name for a coverage file.
- """
- fn = Path(self.code_coverage_path) / "{:06d}.cov".format(self.code_coverage_id)
- self.code_coverage_id += 1
+ return psycopg.connect(**dbargs)
- return fn.resolve()
def write_nominatim_config(self, dbname):
""" Set up a custom test configuration that connects to the given
be picked up by dotenv and creates a project directory with the
appropriate website scripts.
"""
- dsn = 'pgsql:dbname={}'.format(dbname)
+ if dbname.startswith('sqlite:'):
+ dsn = 'sqlite:dbname={}'.format(dbname[7:])
+ else:
+ dsn = 'pgsql:dbname={}'.format(dbname)
if self.db_host:
dsn += ';host=' + self.db_host
if self.db_port:
if self.db_pass:
dsn += ';password=' + self.db_pass
- if self.website_dir is not None \
- and self.test_env is not None \
- and dsn == self.test_env['NOMINATIM_DATABASE_DSN']:
- return # environment already set uo
-
self.test_env = dict(self.default_config)
self.test_env['NOMINATIM_DATABASE_DSN'] = dsn
self.test_env['NOMINATIM_LANGUAGES'] = 'en,de,fr,ja'
self.test_env['NOMINATIM_DATADIR'] = str((self.src_dir / 'data').resolve())
self.test_env['NOMINATIM_SQLDIR'] = str((self.src_dir / 'lib-sql').resolve())
self.test_env['NOMINATIM_CONFIGDIR'] = str((self.src_dir / 'settings').resolve())
- self.test_env['NOMINATIM_DATABASE_MODULE_SRC_PATH'] = str((self.build_dir / 'module').resolve())
- self.test_env['NOMINATIM_OSM2PGSQL_BINARY'] = str((self.build_dir / 'osm2pgsql' / 'osm2pgsql').resolve())
- self.test_env['NOMINATIM_NOMINATIM_TOOL'] = str((self.build_dir / 'nominatim').resolve())
if self.tokenizer is not None:
self.test_env['NOMINATIM_TOKENIZER'] = self.tokenizer
-
- if self.server_module_path:
- self.test_env['NOMINATIM_DATABASE_MODULE_PATH'] = self.server_module_path
- else:
- # avoid module being copied into the temporary environment
- self.test_env['NOMINATIM_DATABASE_MODULE_PATH'] = str((self.build_dir / 'module').resolve())
+ if self.import_style is not None:
+ self.test_env['NOMINATIM_IMPORT_STYLE'] = self.import_style
if self.website_dir is not None:
self.website_dir.cleanup()
self.website_dir = tempfile.TemporaryDirectory()
- try:
- conn = self.connect_database(dbname)
- except:
- conn = False
- refresh.setup_website(Path(self.website_dir.name) / 'website',
- self.get_test_config(), conn)
-
def get_test_config(self):
- cfg = Configuration(Path(self.website_dir.name), self.src_dir / 'settings',
- environ=self.test_env)
- cfg.set_libdirs(module=self.build_dir / 'module',
- osm2pgsql=self.build_dir / 'osm2pgsql' / 'osm2pgsql',
- php=self.src_dir / 'lib-php',
- sql=self.src_dir / 'lib-sql',
- data=self.src_dir / 'data')
+ cfg = Configuration(Path(self.website_dir.name), environ=self.test_env)
return cfg
def get_libpq_dsn(self):
def db_drop_database(self, name):
""" Drop the database with the given name.
"""
- conn = self.connect_database('postgres')
- conn.set_isolation_level(0)
- cur = conn.cursor()
- cur.execute('DROP DATABASE IF EXISTS {}'.format(name))
- conn.close()
+ with self.connect_database('postgres') as conn:
+ conn.autocommit = True
+ conn.execute(pysql.SQL('DROP DATABASE IF EXISTS')
+ + pysql.Identifier(name))
def setup_template_db(self):
""" Setup a template database that already contains common test data.
"""
self.write_nominatim_config(self.api_test_db)
+ if self.api_test_db.startswith('sqlite:'):
+ return
+
if not self.api_db_done:
self.api_db_done = True
self.run_nominatim('add-data', '--tiger-data', str(testdata / 'tiger'))
self.run_nominatim('freeze')
- if self.tokenizer == 'legacy':
- phrase_file = str(testdata / 'specialphrases_testdb.sql')
- run_script(['psql', '-d', self.api_test_db, '-f', phrase_file])
- else:
- csv_path = str(testdata / 'full_en_phrases_test.csv')
- self.run_nominatim('special-phrases', '--import-from-csv', csv_path)
+ csv_path = str(testdata / 'full_en_phrases_test.csv')
+ self.run_nominatim('special-phrases', '--import-from-csv', csv_path)
except:
self.db_drop_database(self.api_test_db)
raise
""" Setup a test against a fresh, empty test database.
"""
self.setup_template_db()
- conn = self.connect_database(self.template_db)
- conn.set_isolation_level(0)
- cur = conn.cursor()
- cur.execute('DROP DATABASE IF EXISTS {}'.format(self.test_db))
- cur.execute('CREATE DATABASE {} TEMPLATE = {}'.format(self.test_db, self.template_db))
- conn.close()
+ with self.connect_database(self.template_db) as conn:
+ conn.autocommit = True
+ conn.execute(pysql.SQL('DROP DATABASE IF EXISTS')
+ + pysql.Identifier(self.test_db))
+ conn.execute(pysql.SQL('CREATE DATABASE {} TEMPLATE = {}').format(
+ pysql.Identifier(self.test_db),
+ pysql.Identifier(self.template_db)))
+
self.write_nominatim_config(self.test_db)
context.db = self.connect_database(self.test_db)
context.db.autocommit = True
- psycopg2.extras.register_hstore(context.db, globally=False)
+ register_hstore(context.db)
def teardown_db(self, context, force_drop=False):
""" Remove the test database, if it exists.
self.db_drop_database(self.test_db)
def _reuse_or_drop_db(self, name):
- """ Check for the existance of the given DB. If reuse is enabled,
- then the function checks for existance and returns True if the
+ """ Check for the existence of the given DB. If reuse is enabled,
+ then the function checks for existnce and returns True if the
database is already there. Otherwise an existing database is
dropped and always false returned.
"""
if self.reuse_template:
- conn = self.connect_database('postgres')
- with conn.cursor() as cur:
- cur.execute('select count(*) from pg_database where datname = %s',
- (name,))
- if cur.fetchone()[0] == 1:
+ with self.connect_database('postgres') as conn:
+ num = execute_scalar(conn,
+ 'select count(*) from pg_database where datname = %s',
+ (name,))
+ if num == 1:
return True
- conn.close()
else:
self.db_drop_database(name)
return False
+
def reindex_placex(self, db):
""" Run the indexing step until all data in the placex has
been processed. Indexing during updates can produce more data
to index under some circumstances. That is why indexing may have
to be run multiple times.
"""
- with db.cursor() as cur:
- while True:
- self.run_nominatim('index')
+ self.run_nominatim('index')
- cur.execute("SELECT 'a' FROM placex WHERE indexed_status != 0 LIMIT 1")
- if cur.rowcount == 0:
- return
def run_nominatim(self, *cmdline):
""" Run the nominatim command-line tool via the library.
if self.website_dir is not None:
cmdline = list(cmdline) + ['--project-dir', self.website_dir.name]
- cli.nominatim(module_dir='',
- osm2pgsql_path=str(self.build_dir / 'osm2pgsql' / 'osm2pgsql'),
- phplib_dir=str(self.src_dir / 'lib-php'),
- sqllib_dir=str(self.src_dir / 'lib-sql'),
- data_dir=str(self.src_dir / 'data'),
- config_dir=str(self.src_dir / 'settings'),
+ cli.nominatim(osm2pgsql_path=None,
cli_args=cmdline,
- phpcgi_path='',
environ=self.test_env)
WHERE class='place' and type='houses'
and osm_type='W'
and ST_GeometryType(geometry) = 'ST_LineString'""")
+
+
+ def create_api_request_func_starlette(self):
+ import nominatim_api.server.starlette.server
+ from asgi_lifespan import LifespanManager
+ import httpx
+
+ async def _request(endpoint, params, project_dir, environ, http_headers):
+ app = nominatim_api.server.starlette.server.get_application(project_dir, environ)
+
+ async with LifespanManager(app):
+ async with httpx.AsyncClient(app=app, base_url="http://nominatim.test") as client:
+ response = await client.get(f"/{endpoint}", params=params,
+ headers=http_headers)
+
+ return response.text, response.status_code
+
+ return _request
+
+
+ def create_api_request_func_falcon(self):
+ import nominatim_api.server.falcon.server
+ import falcon.testing
+
+ async def _request(endpoint, params, project_dir, environ, http_headers):
+ app = nominatim_api.server.falcon.server.get_application(project_dir, environ)
+
+ async with falcon.testing.ASGIConductor(app) as conductor:
+ response = await conductor.get(f"/{endpoint}", params=params,
+ headers=http_headers)
+
+ return response.text, response.status_code
+
+ return _request
+
+
+
else:
self.columns[column] = {key: value}
+ def db_delete(self, cursor):
+ """ Issue a delete for the given OSM object.
+ """
+ cursor.execute('DELETE FROM place WHERE osm_type = %s and osm_id = %s',
+ (self.columns['osm_type'] , self.columns['osm_id']))
+
def db_insert(self, cursor):
""" Insert the collected data into the database.
"""
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
""" Steps that run queries against the API.
-
- Queries may either be run directly via PHP using the query script
- or via the HTTP interface using php-cgi.
"""
+from pathlib import Path
import json
import os
import re
import logging
+import asyncio
+import xml.etree.ElementTree as ET
from urllib.parse import urlencode
from utils import run_script
from http_responses import GenericResponse, SearchResponse, ReverseResponse, StatusResponse
-from check_functions import Bbox
+from check_functions import Bbox, check_for_attributes
from table_compare import NominatimID
LOG = logging.getLogger(__name__)
-BASE_SERVER_ENV = {
- 'HTTP_HOST' : 'localhost',
- 'HTTP_USER_AGENT' : 'Mozilla/5.0 (X11; Linux x86_64; rv:51.0) Gecko/20100101 Firefox/51.0',
- 'HTTP_ACCEPT' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
- 'HTTP_ACCEPT_ENCODING' : 'gzip, deflate',
- 'HTTP_CONNECTION' : 'keep-alive',
- 'SERVER_SIGNATURE' : '<address>Nominatim BDD Tests</address>',
- 'SERVER_SOFTWARE' : 'Nominatim test',
- 'SERVER_NAME' : 'localhost',
- 'SERVER_ADDR' : '127.0.1.1',
- 'SERVER_PORT' : '80',
- 'REMOTE_ADDR' : '127.0.0.1',
- 'DOCUMENT_ROOT' : '/var/www',
- 'REQUEST_SCHEME' : 'http',
- 'CONTEXT_PREFIX' : '/',
- 'SERVER_ADMIN' : 'webmaster@localhost',
- 'REMOTE_PORT' : '49319',
- 'GATEWAY_INTERFACE' : 'CGI/1.1',
- 'SERVER_PROTOCOL' : 'HTTP/1.1',
- 'REQUEST_METHOD' : 'GET',
- 'REDIRECT_STATUS' : 'CGI'
-}
+
+def make_todo_list(context, result_id):
+ if result_id is None:
+ context.execute_steps("then at least 1 result is returned")
+ return range(len(context.response.result))
+
+ context.execute_steps(f"then more than {result_id}results are returned")
+ return (int(result_id.strip()), )
def compare(operator, op1, op2):
elif operator == 'at most':
return op1 <= op2
else:
- raise Exception("unknown operator '%s'" % operator)
+ raise ValueError(f"Unknown operator '{operator}'")
def send_api_query(endpoint, params, fmt, context):
- if fmt is not None and fmt.strip() != 'debug':
- params['format'] = fmt.strip()
+ if fmt is not None:
+ if fmt.strip() == 'debug':
+ params['debug'] = '1'
+ else:
+ params['format'] = fmt.strip()
+
if context.table:
if context.table.headings[0] == 'param':
for line in context.table:
for h in context.table.headings:
params[h] = context.table[0][h]
- env = dict(BASE_SERVER_ENV)
- env['QUERY_STRING'] = urlencode(params)
-
- env['SCRIPT_NAME'] = '/%s.php' % endpoint
- env['REQUEST_URI'] = '%s?%s' % (env['SCRIPT_NAME'], env['QUERY_STRING'])
- env['CONTEXT_DOCUMENT_ROOT'] = os.path.join(context.nominatim.website_dir.name, 'website')
- env['SCRIPT_FILENAME'] = os.path.join(env['CONTEXT_DOCUMENT_ROOT'],
- '%s.php' % endpoint)
-
- LOG.debug("Environment:" + json.dumps(env, sort_keys=True, indent=2))
-
- if hasattr(context, 'http_headers'):
- env.update(context.http_headers)
-
- cmd = ['/usr/bin/env', 'php-cgi', '-f']
- if context.nominatim.code_coverage_path:
- env['XDEBUG_MODE'] = 'coverage'
- env['COV_SCRIPT_FILENAME'] = env['SCRIPT_FILENAME']
- env['COV_PHP_DIR'] = context.nominatim.src_dir
- env['COV_TEST_NAME'] = '%s:%s' % (context.scenario.filename, context.scenario.line)
- env['SCRIPT_FILENAME'] = \
- os.path.join(os.path.split(__file__)[0], 'cgi-with-coverage.php')
- cmd.append(env['SCRIPT_FILENAME'])
- env['PHP_CODE_COVERAGE_FILE'] = context.nominatim.next_code_coverage_file()
- else:
- cmd.append(env['SCRIPT_FILENAME'])
-
- for k,v in params.items():
- cmd.append("%s=%s" % (k, v))
-
- outp, err = run_script(cmd, cwd=context.nominatim.website_dir.name, env=env)
-
- assert len(err) == 0, "Unexpected PHP error: %s" % (err)
-
- if outp.startswith('Status: '):
- status = int(outp[8:11])
- else:
- status = 200
-
- content_start = outp.find('\r\n\r\n')
+ return asyncio.run(context.nominatim.api_engine(endpoint, params,
+ Path(context.nominatim.website_dir.name),
+ context.nominatim.test_env,
+ getattr(context, 'http_headers', {})))
- return outp[content_start + 4:], status
@given(u'the HTTP header')
def add_http_header(context):
context.http_headers = {}
for h in context.table.headings:
- envvar = 'HTTP_' + h.upper().replace('-', '_')
- context.http_headers[envvar] = context.table[0][h]
+ context.http_headers[h] = context.table[0][h]
@when(u'sending (?P<fmt>\S+ )?search query "(?P<query>.*)"(?P<addr> with address)?')
params['q'] = query
if addr is not None:
params['addressdetails'] = '1'
- if fmt and fmt.strip() == 'debug':
- params['debug'] = '1'
outp, status = send_api_query('search', params, fmt, context)
context.response = SearchResponse(outp, fmt or 'json', status)
-@when(u'sending (?P<fmt>\S+ )?reverse coordinates (?P<lat>.+)?,(?P<lon>.+)?')
-def website_reverse_request(context, fmt, lat, lon):
+
+@when('sending v1/reverse at (?P<lat>[\d.-]*),(?P<lon>[\d.-]*)(?: with format (?P<fmt>.+))?')
+def api_endpoint_v1_reverse(context, lat, lon, fmt):
params = {}
if lat is not None:
params['lat'] = lat
if lon is not None:
params['lon'] = lon
- if fmt and fmt.strip() == 'debug':
- params['debug'] = '1'
+ if fmt is None:
+ fmt = 'jsonv2'
+ elif fmt == "''":
+ fmt = None
outp, status = send_api_query('reverse', params, fmt, context)
-
context.response = ReverseResponse(outp, fmt or 'xml', status)
-@when(u'sending (?P<fmt>\S+ )?reverse point (?P<nodeid>.+)')
-def website_reverse_request(context, fmt, nodeid):
+
+@when('sending v1/reverse N(?P<nodeid>\d+)(?: with format (?P<fmt>.+))?')
+def api_endpoint_v1_reverse_from_node(context, nodeid, fmt):
params = {}
- if fmt and fmt.strip() == 'debug':
- params['debug'] = '1'
params['lon'], params['lat'] = (f'{c:f}' for c in context.osm.grid_node(int(nodeid)))
-
outp, status = send_api_query('reverse', params, fmt, context)
-
context.response = ReverseResponse(outp, fmt or 'xml', status)
-
@when(u'sending (?P<fmt>\S+ )?details query for (?P<query>.*)')
def website_details_request(context, fmt, query):
params = {}
@step(u'(?P<operator>less than|more than|exactly|at least|at most) (?P<number>\d+) results? (?:is|are) returned')
def validate_result_number(context, operator, number):
- assert context.response.errorcode == 200
+ context.execute_steps("Then a HTTP 200 is returned")
numres = len(context.response.result)
assert compare(operator, numres, int(number)), \
- "Bad number of results: expected {} {}, got {}.".format(operator, number, numres)
+ f"Bad number of results: expected {operator} {number}, got {numres}."
@then(u'a HTTP (?P<status>\d+) is returned')
def check_http_return_status(context, status):
assert context.response.errorcode == int(status), \
- "Return HTTP status is {}.".format(context.response.errorcode)
+ f"Return HTTP status is {context.response.errorcode}."\
+ f" Full response:\n{context.response.page}"
@then(u'the page contents equals "(?P<text>.+)"')
def check_page_content_equals(context, text):
@then(u'the result is valid (?P<fmt>\w+)')
def step_impl(context, fmt):
context.execute_steps("Then a HTTP 200 is returned")
- assert context.response.format == fmt
+ if fmt.strip() == 'html':
+ try:
+ tree = ET.fromstring(context.response.page)
+ except Exception as ex:
+ assert False, f"Could not parse page: {ex}\n{context.response.page}"
+
+ assert tree.tag == 'html'
+ body = tree.find('./body')
+ assert body is not None
+ assert body.find('.//script') is None
+ else:
+ assert context.response.format == fmt
+
@then(u'a (?P<fmt>\w+) user error is returned')
def check_page_error(context, fmt):
@then(u'result header contains')
def check_header_attr(context):
+ context.execute_steps("Then a HTTP 200 is returned")
for line in context.table:
- assert re.fullmatch(line['value'], context.response.header[line['attr']]) is not None, \
- "attribute '%s': expected: '%s', got '%s'" % (
- line['attr'], line['value'],
- context.response.header[line['attr']])
+ assert line['attr'] in context.response.header, \
+ f"Field '{line['attr']}' missing in header. Full header:\n{context.response.header}"
+ value = context.response.header[line['attr']]
+ assert re.fullmatch(line['value'], value) is not None, \
+ f"Attribute '{line['attr']}': expected: '{line['value']}', got '{value}'"
+
@then(u'result header has (?P<neg>not )?attributes (?P<attrs>.*)')
def check_header_no_attr(context, neg, attrs):
- for attr in attrs.split(','):
- if neg:
- assert attr not in context.response.header, \
- "Unexpected attribute {}. Full response:\n{}".format(
- attr, json.dumps(context.response.header, sort_keys=True, indent=2))
- else:
- assert attr in context.response.header, \
- "No attribute {}. Full response:\n{}".format(
- attr, json.dumps(context.response.header, sort_keys=True, indent=2))
+ check_for_attributes(context.response.header, attrs,
+ 'absent' if neg else 'present')
-@then(u'results contain')
-def step_impl(context):
+
+@then(u'results contain(?: in field (?P<field>.*))?')
+def step_impl(context, field):
context.execute_steps("then at least 1 result is returned")
for line in context.table:
- context.response.match_row(line, context=context)
+ context.response.match_row(line, context=context, field=field)
+
@then(u'result (?P<lid>\d+ )?has (?P<neg>not )?attributes (?P<attrs>.*)')
def validate_attributes(context, lid, neg, attrs):
- if lid is None:
- idx = range(len(context.response.result))
- context.execute_steps("then at least 1 result is returned")
- else:
- idx = [int(lid.strip())]
- context.execute_steps("then more than %sresults are returned" % lid)
-
- for i in idx:
- for attr in attrs.split(','):
- if neg:
- assert attr not in context.response.result[i],\
- "Unexpected attribute {}. Full response:\n{}".format(
- attr, json.dumps(context.response.result[i], sort_keys=True, indent=2))
- else:
- assert attr in context.response.result[i], \
- "No attribute {}. Full response:\n{}".format(
- attr, json.dumps(context.response.result[i], sort_keys=True, indent=2))
+ for i in make_todo_list(context, lid):
+ check_for_attributes(context.response.result[i], attrs,
+ 'absent' if neg else 'present')
+
@then(u'result addresses contain')
def step_impl(context):
@then(u'address of result (?P<lid>\d+) has(?P<neg> no)? types (?P<attrs>.*)')
def check_address(context, lid, neg, attrs):
- context.execute_steps("then more than %s results are returned" % lid)
+ context.execute_steps(f"then more than {lid} results are returned")
addr_parts = context.response.result[int(lid)]['address']
@then(u'address of result (?P<lid>\d+) (?P<complete>is|contains)')
def check_address(context, lid, complete):
- context.execute_steps("then more than %s results are returned" % lid)
+ context.execute_steps(f"then more than {lid} results are returned")
lid = int(lid)
addr_parts = dict(context.response.result[lid]['address'])
del addr_parts[line['type']]
if complete == 'is':
- assert len(addr_parts) == 0, "Additional address parts found: %s" % str(addr_parts)
+ assert len(addr_parts) == 0, f"Additional address parts found: {addr_parts!s}"
-@then(u'result (?P<lid>\d+ )?has bounding box in (?P<coords>[\d,.-]+)')
-def step_impl(context, lid, coords):
- if lid is None:
- context.execute_steps("then at least 1 result is returned")
- bboxes = context.response.property_list('boundingbox')
- else:
- context.execute_steps("then more than {}results are returned".format(lid))
- bboxes = [context.response.result[int(lid)]['boundingbox']]
+@then(u'result (?P<lid>\d+ )?has bounding box in (?P<coords>[\d,.-]+)')
+def check_bounding_box_in_area(context, lid, coords):
expected = Bbox(coords)
- for bbox in bboxes:
- assert bbox in expected, "Bbox {} is not contained in {}.".format(bbox, expected)
+ for idx in make_todo_list(context, lid):
+ res = context.response.result[idx]
+ check_for_attributes(res, 'boundingbox')
+ context.response.check_row(idx, res['boundingbox'] in expected,
+ f"Bbox is not contained in {expected}")
-@then(u'result (?P<lid>\d+ )?has centroid in (?P<coords>[\d,.-]+)')
-def step_impl(context, lid, coords):
- if lid is None:
- context.execute_steps("then at least 1 result is returned")
- centroids = zip(context.response.property_list('lon'),
- context.response.property_list('lat'))
- else:
- context.execute_steps("then more than %sresults are returned".format(lid))
- res = context.response.result[int(lid)]
- centroids = [(res['lon'], res['lat'])]
+@then(u'result (?P<lid>\d+ )?has centroid in (?P<coords>[\d,.-]+)')
+def check_centroid_in_area(context, lid, coords):
expected = Bbox(coords)
- for centroid in centroids:
- assert centroid in expected,\
- "Centroid {} is not inside {}.".format(centroid, expected)
+ for idx in make_todo_list(context, lid):
+ res = context.response.result[idx]
+ check_for_attributes(res, 'lat,lon')
+ context.response.check_row(idx, (res['lon'], res['lat']) in expected,
+ f"Centroid is not inside {expected}")
+
@then(u'there are(?P<neg> no)? duplicates')
def check_for_duplicates(context, neg):
resarr.add(dup)
if neg:
- assert not has_dupe, "Found duplicate for %s" % (dup, )
+ assert not has_dupe, f"Found duplicate for {dup}"
else:
assert has_dupe, "No duplicates found"
+
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
import logging
from itertools import chain
-import psycopg2.extras
+import psycopg
+from psycopg import sql as pysql
from place_inserter import PlaceColumn
from table_compare import NominatimID, DBRow
-from nominatim.indexer import indexer
-from nominatim.tokenizer import factory as tokenizer_factory
+from nominatim_db.indexer import indexer
+from nominatim_db.tokenizer import factory as tokenizer_factory
def check_database_integrity(context):
""" Check some generic constraints on the tables.
"""
- with context.db.cursor() as cur:
+ with context.db.cursor(row_factory=psycopg.rows.tuple_row) as cur:
# place_addressline should not have duplicate (place_id, address_place_id)
cur.execute("""SELECT count(*) FROM
(SELECT place_id, address_place_id, count(*) as c
assert cur.fetchone()[0] == 0, "Duplicates found in place_addressline"
# word table must not have empty word_tokens
- if context.nominatim.tokenizer != 'legacy':
- cur.execute("SELECT count(*) FROM word WHERE word_token = ''")
- assert cur.fetchone()[0] == 0, "Empty word tokens found in word table"
+ cur.execute("SELECT count(*) FROM word WHERE word_token = ''")
+ assert cur.fetchone()[0] == 0, "Empty word tokens found in word table"
for tests on data that looks up members.
"""
with context.db.cursor() as cur:
- for r in context.table:
- last_node = 0
- last_way = 0
- parts = []
- if r['members']:
- members = []
- for m in r['members'].split(','):
- mid = NominatimID(m)
- if mid.typ == 'N':
- parts.insert(last_node, int(mid.oid))
- last_node += 1
- last_way += 1
- elif mid.typ == 'W':
- parts.insert(last_way, int(mid.oid))
- last_way += 1
- else:
- parts.append(int(mid.oid))
-
- members.extend((mid.typ.lower() + mid.oid, mid.cls or ''))
- else:
- members = None
-
- tags = chain.from_iterable([(h[5:], r[h]) for h in r.headings if h.startswith("tags+")])
-
- cur.execute("""INSERT INTO planet_osm_rels (id, way_off, rel_off, parts, members, tags)
- VALUES (%s, %s, %s, %s, %s, %s)""",
- (r['id'], last_node, last_way, parts, members, list(tags)))
+ cur.execute("SELECT value FROM osm2pgsql_properties WHERE property = 'db_format'")
+ row = cur.fetchone()
+ if row is None or row['value'] == '1':
+ for r in context.table:
+ last_node = 0
+ last_way = 0
+ parts = []
+ if r['members']:
+ members = []
+ for m in r['members'].split(','):
+ mid = NominatimID(m)
+ if mid.typ == 'N':
+ parts.insert(last_node, int(mid.oid))
+ last_node += 1
+ last_way += 1
+ elif mid.typ == 'W':
+ parts.insert(last_way, int(mid.oid))
+ last_way += 1
+ else:
+ parts.append(int(mid.oid))
+
+ members.extend((mid.typ.lower() + mid.oid, mid.cls or ''))
+ else:
+ members = None
+
+ tags = chain.from_iterable([(h[5:], r[h]) for h in r.headings if h.startswith("tags+")])
+
+ cur.execute("""INSERT INTO planet_osm_rels (id, way_off, rel_off, parts, members, tags)
+ VALUES (%s, %s, %s, %s, %s, %s)""",
+ (r['id'], last_node, last_way, parts, members, list(tags)))
+ else:
+ for r in context.table:
+ if r['members']:
+ members = []
+ for m in r['members'].split(','):
+ mid = NominatimID(m)
+ members.append({'ref': mid.oid, 'role': mid.cls or '', 'type': mid.typ})
+ else:
+ members = []
+
+ tags = {h[5:]: r[h] for h in r.headings if h.startswith("tags+")}
+
+ cur.execute("""INSERT INTO planet_osm_rels (id, tags, members)
+ VALUES (%s, %s, %s)""",
+ (r['id'], psycopg.types.json.Json(tags),
+ psycopg.types.json.Json(members)))
@given("the ways")
def add_data_to_planet_ways(context):
tests on that that looks up node ids in this table.
"""
with context.db.cursor() as cur:
+ cur.execute("SELECT value FROM osm2pgsql_properties WHERE property = 'db_format'")
+ row = cur.fetchone()
+ json_tags = row is not None and row['value'] != '1'
for r in context.table:
- tags = chain.from_iterable([(h[5:], r[h]) for h in r.headings if h.startswith("tags+")])
+ if json_tags:
+ tags = psycopg.types.json.Json({h[5:]: r[h] for h in r.headings if h.startswith("tags+")})
+ else:
+ tags = list(chain.from_iterable([(h[5:], r[h])
+ for h in r.headings if h.startswith("tags+")]))
nodes = [ int(x.strip()) for x in r['nodes'].split(',') ]
cur.execute("INSERT INTO planet_osm_ways (id, nodes, tags) VALUES (%s, %s, %s)",
- (r['id'], nodes, list(tags)))
+ (r['id'], nodes, tags))
################################ WHEN ##################################
context.nominatim.run_nominatim('refresh', '--functions')
with context.db.cursor() as cur:
for row in context.table:
- PlaceColumn(context).add_row(row, False).db_insert(cur)
+ col = PlaceColumn(context).add_row(row, False)
+ col.db_delete(cur)
+ col.db_insert(cur)
+ cur.execute('SELECT flush_deleted_places()')
context.nominatim.reindex_placex(context.db)
check_database_integrity(context)
"""
context.nominatim.run_nominatim('refresh', '--functions')
with context.db.cursor() as cur:
+ cur.execute('TRUNCATE place_to_be_deleted')
for oid in oids.split(','):
NominatimID(oid).query_osm_id(cur, 'DELETE FROM place WHERE {}')
+ cur.execute('SELECT flush_deleted_places()')
context.nominatim.reindex_placex(context.db)
def check_place_contents(context, table, exact):
""" Check contents of place/placex tables. Each row represents a table row
and all data must match. Data not present in the expected table, may
- be arbitry. The rows are identified via the 'object' column which must
+ be arbitrary. The rows are identified via the 'object' column which must
have an identifier of the form '<NRW><osm id>[:<class>]'. When multiple
rows match (for example because 'class' was left out and there are
multiple entries for the given OSM object) then all must match. All
expected rows are expected to be present with at least one database row.
When 'exactly' is given, there must not be additional rows in the database.
"""
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
expected_content = set()
for row in context.table:
nid = NominatimID(row['object'])
DBRow(nid, res, context).assert_row(row, ['object'])
if exact:
- cur.execute('SELECT osm_type, osm_id, class from {}'.format(table))
- assert expected_content == set([(r[0], r[1], r[2]) for r in cur])
+ cur.execute(pysql.SQL('SELECT osm_type, osm_id, class from')
+ + pysql.Identifier(table))
+ actual = set([(r['osm_type'], r['osm_id'], r['class']) for r in cur])
+ assert expected_content == actual, \
+ f"Missing entries: {expected_content - actual}\n" \
+ f"Not expected in table: {actual - expected_content}"
@then("(?P<table>placex|place) has no entry for (?P<oid>.*)")
""" Ensure that no database row for the given object exists. The ID
must be of the form '<NRW><osm id>[:<class>]'.
"""
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
NominatimID(oid).query_osm_id(cur, "SELECT * FROM %s where {}" % table)
assert cur.rowcount == 0, \
"Found {} entries for ID {}".format(cur.rowcount, oid)
def check_search_name_contents(context, exclude):
""" Check contents of place/placex tables. Each row represents a table row
and all data must match. Data not present in the expected table, may
- be arbitry. The rows are identified via the 'object' column which must
+ be arbitrary. The rows are identified via the 'object' column which must
have an identifier of the form '<NRW><osm id>[:<class>]'. All
expected rows are expected to be present with at least one database row.
"""
tokenizer = tokenizer_factory.get_tokenizer_for_db(context.nominatim.get_test_config())
with tokenizer.name_analyzer() as analyzer:
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
for row in context.table:
nid = NominatimID(row['object'])
nid.row_by_place_id(cur, 'search_name',
""" Check that there is noentry in the search_name table for the given
objects. IDs are in format '<NRW><osm id>[:<class>]'.
"""
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
NominatimID(oid).row_by_place_id(cur, 'search_name')
assert cur.rowcount == 0, \
def check_location_postcode(context):
""" Check full contents for location_postcode table. Each row represents a table row
and all data must match. Data not present in the expected table, may
- be arbitry. The rows are identified via 'country' and 'postcode' columns.
+ be arbitrary. The rows are identified via 'country' and 'postcode' columns.
All rows must be present as excepted and there must not be additional
rows.
"""
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
cur.execute("SELECT *, ST_AsText(geometry) as geomtxt FROM location_postcode")
assert cur.rowcount == len(list(context.table)), \
"Postcode table has {} rows, expected {}.".format(cur.rowcount, len(list(context.table)))
plist.sort()
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
- if nctx.tokenizer != 'legacy':
- cur.execute("SELECT word FROM word WHERE type = 'P' and word = any(%s)",
- (plist,))
- else:
- cur.execute("""SELECT word FROM word WHERE word = any(%s)
- and class = 'place' and type = 'postcode'""",
- (plist,))
+ with context.db.cursor() as cur:
+ cur.execute("SELECT word FROM word WHERE type = 'P' and word = any(%s)",
+ (plist,))
- found = [row[0] for row in cur]
+ found = [row['word'] for row in cur]
assert len(found) == len(set(found)), f"Duplicate rows for postcodes: {found}"
if exclude:
def check_place_addressline(context):
""" Check the contents of the place_addressline table. Each row represents
a table row and all data must match. Data not present in the expected
- table, may be arbitry. The rows are identified via the 'object' column,
+ table, may be arbitrary. The rows are identified via the 'object' column,
representing the addressee and the 'address' column, representing the
address item.
"""
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
for row in context.table:
nid = NominatimID(row['object'])
pid = nid.get_place_id(cur)
""" Check that the place_addressline doesn't contain any entries for the
given addressee/address item pairs.
"""
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
for row in context.table:
pid = NominatimID(row['object']).get_place_id(cur)
apid = NominatimID(row['address']).get_place_id(cur, allow_empty=True)
def check_location_property_osmline(context, oid, neg):
""" Check that the given way is present in the interpolation table.
"""
- with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ with context.db.cursor() as cur:
cur.execute("""SELECT *, ST_AsText(linegeo) as geomtxt
FROM location_property_osmline
WHERE osm_id = %s AND startnumber IS NOT NULL""",
assert not todo, f"Unmatched lines in table: {list(context.table[i] for i in todo)}"
+@then("location_property_osmline contains(?P<exact> exactly)?")
+def check_place_contents(context, exact):
+ """ Check contents of the interpolation table. Each row represents a table row
+ and all data must match. Data not present in the expected table, may
+ be arbitrary. The rows are identified via the 'object' column which must
+ have an identifier of the form '<osm id>[:<startnumber>]'. When multiple
+ rows match (for example because 'startnumber' was left out and there are
+ multiple entries for the given OSM object) then all must match. All
+ expected rows are expected to be present with at least one database row.
+ When 'exactly' is given, there must not be additional rows in the database.
+ """
+ with context.db.cursor() as cur:
+ expected_content = set()
+ for row in context.table:
+ if ':' in row['object']:
+ nid, start = row['object'].split(':', 2)
+ start = int(start)
+ else:
+ nid, start = row['object'], None
+
+ query = """SELECT *, ST_AsText(linegeo) as geomtxt,
+ ST_GeometryType(linegeo) as geometrytype
+ FROM location_property_osmline WHERE osm_id=%s"""
+
+ if ':' in row['object']:
+ query += ' and startnumber = %s'
+ params = [int(val) for val in row['object'].split(':', 2)]
+ else:
+ params = (int(row['object']), )
+
+ cur.execute(query, params)
+ assert cur.rowcount > 0, "No rows found for " + row['object']
+
+ for res in cur:
+ if exact:
+ expected_content.add((res['osm_id'], res['startnumber']))
+
+ DBRow(nid, res, context).assert_row(row, ['object'])
+
+ if exact:
+ cur.execute('SELECT osm_id, startnumber from location_property_osmline')
+ actual = set([(r['osm_id'], r['startnumber']) for r in cur])
+ assert expected_content == actual, \
+ f"Missing entries: {expected_content - actual}\n" \
+ f"Not expected in table: {actual - expected_content}"
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
import tempfile
import random
import os
from pathlib import Path
-from nominatim.tools.exec_utils import run_osm2pgsql
+from nominatim_db.tools.exec_utils import run_osm2pgsql
+from nominatim_db.tools.replication import run_osm2pgsql_updates
from geometry_alias import ALIASES
def get_osm2pgsql_options(nominatim_env, fname, append):
return dict(import_file=fname,
- osm2pgsql=str(nominatim_env.build_dir / 'osm2pgsql' / 'osm2pgsql'),
+ osm2pgsql='osm2pgsql',
osm2pgsql_cache=50,
- osm2pgsql_style=str(nominatim_env.src_dir / 'settings' / 'import-extratags.style'),
+ osm2pgsql_style=str(nominatim_env.get_test_config().get_import_style_file()),
+ osm2pgsql_style_path=nominatim_env.get_test_config().lib_dir.lua,
threads=1,
dsn=nominatim_env.get_libpq_dsn(),
flatnode_file='',
return fd.name
+@given('the lua style file')
+def lua_style_file(context):
+ """ Define a custom style file to use for the import.
+ """
+ style = Path(context.nominatim.website_dir.name) / 'custom.lua'
+ style.write_text(context.text)
+ context.nominatim.test_env['NOMINATIM_IMPORT_STYLE'] = str(style)
+
+
@given(u'the ([0-9.]+ )?grid(?: with origin (?P<origin>.*))?')
def define_node_grid(context, grid_step, origin):
"""
# TODO coordinate
coords = origin.split(',')
if len(coords) != 2:
- raise RuntimeError('Grid origin expects orgin with x,y coordinates.')
+ raise RuntimeError('Grid origin expects origin with x,y coordinates.')
origin = (float(coords[0]), float(coords[1]))
elif origin in ALIASES:
origin = ALIASES[origin]
# create an OSM file and import it
fname = write_opl_file(context.text, context.osm)
try:
- run_osm2pgsql(get_osm2pgsql_options(context.nominatim, fname, append=True))
+ run_osm2pgsql_updates(context.db,
+ get_osm2pgsql_options(context.nominatim, fname, append=True))
finally:
os.remove(fname)
+
+@when('indexing')
+def index_database(context):
+ """
+ Run the Nominatim indexing step. This will process data previously
+ loaded with 'updating osm data'
+ """
+ context.nominatim.run_nominatim('index')
import re
import json
+import psycopg
+from psycopg import sql as pysql
+
from steps.check_functions import Almost
ID_REGEX = re.compile(r"(?P<typ>[NRW])(?P<oid>\d+)(:(?P<cls>\w+))?")
assert cur.rowcount == 1, \
"Place ID {!s} not unique. Found {} entries.".format(self, cur.rowcount)
- return cur.fetchone()[0]
+ return cur.fetchone()['place_id']
class DBRow:
def _has_centroid(self, expected):
if expected == 'in geometry':
- with self.context.db.cursor() as cur:
- cur.execute("""SELECT ST_Within(ST_SetSRID(ST_Point({cx}, {cy}), 4326),
- ST_SetSRID('{geomtxt}'::geometry, 4326))""".format(**self.db_row))
+ with self.context.db.cursor(row_factory=psycopg.rows.tuple_row) as cur:
+ cur.execute("""SELECT ST_Within(ST_SetSRID(ST_Point(%(cx)s, %(cy)s), 4326),
+ ST_SetSRID(%(geomtxt)s::geometry, 4326))""",
+ (self.db_row))
return cur.fetchone()[0]
if ' ' in expected:
def _has_geometry(self, expected):
geom = self.context.osm.parse_geometry(expected)
- with self.context.db.cursor() as cur:
- cur.execute("""SELECT ST_Equals(ST_SnapToGrid({}, 0.00001, 0.00001),
- ST_SnapToGrid(ST_SetSRID('{}'::geometry, 4326), 0.00001, 0.00001))""".format(
- geom, self.db_row['geomtxt']))
+ with self.context.db.cursor(row_factory=psycopg.rows.tuple_row) as cur:
+ cur.execute(pysql.SQL("""SELECT ST_Equals(ST_SnapToGrid({}, 0.00001, 0.00001),
+ ST_SnapToGrid(ST_SetSRID({}::geometry, 4326), 0.00001, 0.00001))""")
+ .format(pysql.SQL(geom),
+ pysql.Literal(self.db_row['geomtxt'])))
return cur.fetchone()[0]
def assert_msg(self, name, value):
if actual == 0:
return "place ID 0"
- with self.context.db.cursor() as cur:
+ with self.context.db.cursor(row_factory=psycopg.rows.tuple_row) as cur:
cur.execute("""SELECT osm_type, osm_id, class
FROM placex WHERE place_id = %s""",
(actual, ))
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/AddressDetails.php');
-
-
-class AddressDetailsTest extends \PHPUnit\Framework\TestCase
-{
-
- protected function setUp(): void
- {
- // How the fixture got created
- //
- // 1) search for '10 downing street'
- // https://nominatim.openstreetmap.org/details.php?osmtype=R&osmid=1879842
- //
- // 2) find place_id in the local database
- // SELECT place_id, name FROM placex WHERE osm_type='R' AND osm_id=1879842;
- //
- // 3) set postgresql to non-align output, e.g. psql -A or \a in the CLI
- //
- // 4) query
- // SELECT row_to_json(row,true) FROM (
- // SELECT *, get_name_by_language(name, ARRAY['name:en']) as localname
- // FROM get_addressdata(194663412,10)
- // ORDER BY rank_address DESC, isaddress DESC
- // ) AS row;
- //
- // 5) copy&paste into file. Add commas between records
- //
- $json = file_get_contents(CONST_DataDir.'/test/php/fixtures/address_details_10_downing_street.json');
- $data = json_decode($json, true);
-
- $this->oDbStub = $this->getMockBuilder(\DB::class)
- ->setMethods(array('getAll'))
- ->getMock();
- $this->oDbStub->method('getAll')
- ->willReturn($data);
- }
-
- public function testGetLocaleAddress()
- {
- $oAD = new AddressDetails($this->oDbStub, 194663412, 10, 'en');
- $expected = join(', ', array(
- '10 Downing Street',
- '10',
- 'Downing Street',
- 'St. James\'s',
- 'Covent Garden',
- 'Westminster',
- 'London',
- 'Greater London',
- 'England',
- 'SW1A 2AA',
- 'United Kingdom'
- ));
- $this->assertEquals($expected, $oAD->getLocaleAddress());
- }
-
- public function testGetAddressDetails()
- {
- $oAD = new AddressDetails($this->oDbStub, 194663412, 10, 'en');
- $this->assertEquals(18, count($oAD->getAddressDetails(true)));
- $this->assertEquals(12, count($oAD->getAddressDetails(false)));
- }
-
- public function testGetAddressNames()
- {
- $oAD = new AddressDetails($this->oDbStub, 194663412, 10, 'en');
- $expected = array(
- 'tourism' => '10 Downing Street',
- 'house_number' => '10',
- 'road' => 'Downing Street',
- 'neighbourhood' => 'St. James\'s',
- 'suburb' => 'Covent Garden',
- 'city' => 'London',
- 'state_district' => 'Greater London',
- 'state' => 'England',
- 'ISO3166-2-lvl4' => 'GB-ENG',
- 'ISO3166-2-lvl6' => 'GB-LND',
- 'postcode' => 'SW1A 2AA',
- 'country' => 'United Kingdom',
- 'country_code' => 'gb'
- );
-
- $this->assertEquals($expected, $oAD->getAddressNames());
- }
-
- public function testGetAdminLevels()
- {
- $oAD = new AddressDetails($this->oDbStub, 194663412, 10, 'en');
- $expected = array(
- 'level8' => 'Westminster',
- 'level6' => 'London',
- 'level5' => 'Greater London',
- 'level4' => 'England',
- 'level2' => 'United Kingdom'
- );
- $this->assertEquals($expected, $oAD->getAdminLevels());
- }
-
- public function testDebugInfo()
- {
- $oAD = new AddressDetails($this->oDbStub, 194663412, 10, 'en');
- $this->assertTrue(is_array($oAD->debugInfo()));
- $this->assertEquals(18, count($oAD->debugInfo()));
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/ClassTypes.php');
-
-class ClassTypesTest extends \PHPUnit\Framework\TestCase
-{
- public function testGetLabelTag()
- {
- $aPlace = array('class' => 'boundary', 'type' => 'administrative',
- 'rank_address' => '4', 'place_type' => 'city');
- $this->assertEquals('city', ClassTypes\getLabelTag($aPlace));
-
- $aPlace = array('class' => 'boundary', 'type' => 'administrative',
- 'rank_address' => '10');
- $this->assertEquals('state_district', ClassTypes\getLabelTag($aPlace));
-
- $aPlace = array('class' => 'boundary', 'type' => 'administrative');
- $this->assertEquals('administrative', ClassTypes\getLabelTag($aPlace));
-
- $aPlace = array('class' => 'place', 'type' => 'hamlet', 'rank_address' => '20');
- $this->assertEquals('hamlet', ClassTypes\getLabelTag($aPlace));
-
- $aPlace = array('class' => 'highway', 'type' => 'residential',
- 'rank_address' => '26');
- $this->assertEquals('road', ClassTypes\getLabelTag($aPlace));
-
- $aPlace = array('class' => 'place', 'type' => 'house_number',
- 'rank_address' => '30');
- $this->assertEquals('house_number', ClassTypes\getLabelTag($aPlace));
-
- $aPlace = array('class' => 'amenity', 'type' => 'prison',
- 'rank_address' => '30');
- $this->assertEquals('amenity', ClassTypes\getLabelTag($aPlace));
- }
-
- public function testGetLabel()
- {
- $aPlace = array('class' => 'boundary', 'type' => 'administrative',
- 'rank_address' => '4', 'place_type' => 'city');
- $this->assertEquals('City', ClassTypes\getLabel($aPlace));
-
- $aPlace = array('class' => 'boundary', 'type' => 'administrative',
- 'rank_address' => '10');
- $this->assertEquals('State District', ClassTypes\getLabel($aPlace));
-
- $aPlace = array('class' => 'boundary', 'type' => 'administrative');
- $this->assertEquals('Administrative', ClassTypes\getLabel($aPlace));
-
- $aPlace = array('class' => 'amenity', 'type' => 'prison');
- $this->assertEquals('Prison', ClassTypes\getLabel($aPlace));
-
- $aPlace = array('class' => 'amenity', 'type' => 'foobar');
- $this->assertNull(ClassTypes\getLabel($aPlace));
- }
-
- public function testGetBoundaryLabel()
- {
- $this->assertEquals('City', ClassTypes\getBoundaryLabel(8, null));
- $this->assertEquals('Administrative', ClassTypes\getBoundaryLabel(18, null));
- $this->assertEquals('None', ClassTypes\getBoundaryLabel(18, null, 'None'));
- $this->assertEquals('State', ClassTypes\getBoundaryLabel(4, 'de', 'None'));
- $this->assertEquals('County', ClassTypes\getBoundaryLabel(4, 'se', 'None'));
- $this->assertEquals('Municipality', ClassTypes\getBoundaryLabel(7, 'se', 'None'));
- }
-
- public function testGetDefRadius()
- {
- $aResult = array('class' => '', 'type' => '');
- $this->assertEquals(0.00005, ClassTypes\getDefRadius($aResult));
-
- $aResult = array('class' => 'place', 'type' => 'country');
- $this->assertEquals(7, ClassTypes\getDefRadius($aResult));
- }
-
- public function testGetIcon()
- {
- $aResult = array('class' => '', 'type' => '');
- $this->assertNull(ClassTypes\getIcon($aResult));
-
- $aResult = array('class' => 'place', 'type' => 'airport');
- $this->assertEquals('transport_airport2', ClassTypes\getIcon($aResult));
- }
-
- public function testGetImportance()
- {
- $aResult = array('class' => '', 'type' => '');
- $this->assertNull(ClassTypes\getImportance($aResult));
-
- $aResult = array('class' => 'place', 'type' => 'airport');
- $this->assertGreaterThan(0, ClassTypes\getImportance($aResult));
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/lib.php');
-require_once(CONST_LibDir.'/DB.php');
-
-// subclassing so we can set the protected connection variable
-class NominatimSubClassedDB extends \Nominatim\DB
-{
- public function setConnection($oConnection)
- {
- $this->connection = $oConnection;
- }
-}
-
-// phpcs:ignore PSR1.Classes.ClassDeclaration.MultipleClasses
-class DBTest extends \PHPUnit\Framework\TestCase
-{
- public function testReusingConnection()
- {
- $oDB = new NominatimSubClassedDB('');
- $oDB->setConnection('anything');
- $this->assertTrue($oDB->connect());
- }
-
- public function testCheckConnection()
- {
- $oDB = new \Nominatim\DB('');
- $this->assertFalse($oDB->checkConnection());
- }
-
- public function testErrorHandling()
- {
- $this->expectException(DatabaseError::class);
- $this->expectExceptionMessage('Failed to establish database connection');
-
- $oDB = new \Nominatim\DB('pgsql:dbname=abc');
- $oDB->connect();
- }
-
- public function testErrorHandling2()
- {
- $this->expectException(DatabaseError::class);
- $this->expectExceptionMessage('Database query failed');
-
- $oPDOStub = $this->getMockBuilder(PDO::class)
- ->setMethods(array('query', 'quote'))
- ->getMock();
-
- $oPDOStub->method('query')
- ->will($this->returnCallback(function ($sVal) {
- return "'$sVal'";
- }));
-
- $oPDOStub->method('query')
- ->will($this->returnCallback(function () {
- throw new \PDOException('ERROR: syntax error at or near "FROM"');
- }));
-
- $oDB = new NominatimSubClassedDB('');
- $oDB->setConnection($oPDOStub);
- $oDB->getOne('SELECT name FROM');
- }
-
- public function testGetPostgresVersion()
- {
- $oDBStub = $this->getMockBuilder(\Nominatim\DB::class)
- ->disableOriginalConstructor()
- ->setMethods(array('getOne'))
- ->getMock();
-
- $oDBStub->method('getOne')
- ->willReturn('100006');
-
- $this->assertEquals(10, $oDBStub->getPostgresVersion());
- }
-
- public function testGetPostgisVersion()
- {
- $oDBStub = $this->getMockBuilder(\Nominatim\DB::class)
- ->disableOriginalConstructor()
- ->setMethods(array('getOne'))
- ->getMock();
-
- $oDBStub->method('getOne')
- ->willReturn('2.4.4');
-
- $this->assertEquals(2.4, $oDBStub->getPostgisVersion());
- }
-
- public function testParseDSN()
- {
- $this->assertEquals(
- array(),
- \Nominatim\DB::parseDSN('')
- );
- $this->assertEquals(
- array(
- 'database' => 'db1',
- 'hostspec' => 'machine1'
- ),
- \Nominatim\DB::parseDSN('pgsql:dbname=db1;host=machine1')
- );
- $this->assertEquals(
- array(
- 'database' => 'db1',
- 'hostspec' => 'machine1',
- 'port' => '1234',
- 'username' => 'john',
- 'password' => 'secret'
- ),
- \Nominatim\DB::parseDSN('pgsql:dbname=db1;host=machine1;port=1234;user=john;password=secret')
- );
- }
-
- public function testGenerateDSN()
- {
- $this->assertEquals(
- 'pgsql:',
- \Nominatim\DB::generateDSN(array())
- );
- $this->assertEquals(
- 'pgsql:host=machine1;dbname=db1',
- \Nominatim\DB::generateDSN(\Nominatim\DB::parseDSN('pgsql:host=machine1;dbname=db1'))
- );
- }
-
- public function testAgainstDatabase()
- {
- $unit_test_dsn = getenv('UNIT_TEST_DSN') != false ?
- getenv('UNIT_TEST_DSN') :
- 'pgsql:dbname=nominatim_unit_tests';
-
- ## Create the database.
- {
- $aDSNParsed = \Nominatim\DB::parseDSN($unit_test_dsn);
- $sDbname = $aDSNParsed['database'];
- $aDSNParsed['database'] = 'postgres';
-
- $oDB = new \Nominatim\DB(\Nominatim\DB::generateDSN($aDSNParsed));
- $oDB->connect();
- $oDB->exec('DROP DATABASE IF EXISTS ' . $sDbname);
- $oDB->exec('CREATE DATABASE ' . $sDbname);
- }
-
- $oDB = new \Nominatim\DB($unit_test_dsn);
- $oDB->connect();
-
- $this->assertTrue(
- $oDB->checkConnection($sDbname)
- );
-
- # Tables, Indices
- {
- $oDB->exec('CREATE TABLE table1 (id integer, city varchar, country varchar)');
-
- $this->assertTrue($oDB->tableExists('table1'));
- $this->assertFalse($oDB->tableExists('table99'));
- $this->assertFalse($oDB->tableExists(null));
- }
-
- # select queries
- {
- $oDB->exec(
- "INSERT INTO table1 VALUES (1, 'Berlin', 'Germany'), (2, 'Paris', 'France')"
- );
-
- $this->assertEquals(
- array(
- array('city' => 'Berlin'),
- array('city' => 'Paris')
- ),
- $oDB->getAll('SELECT city FROM table1')
- );
- $this->assertEquals(
- array(),
- $oDB->getAll('SELECT city FROM table1 WHERE id=999')
- );
-
-
- $this->assertEquals(
- array('id' => 1, 'city' => 'Berlin', 'country' => 'Germany'),
- $oDB->getRow('SELECT * FROM table1 WHERE id=1')
- );
- $this->assertEquals(
- false,
- $oDB->getRow('SELECT * FROM table1 WHERE id=999')
- );
-
-
- $this->assertEquals(
- array('Berlin', 'Paris'),
- $oDB->getCol('SELECT city FROM table1')
- );
- $this->assertEquals(
- array(),
- $oDB->getCol('SELECT city FROM table1 WHERE id=999')
- );
-
- $this->assertEquals(
- 'Berlin',
- $oDB->getOne('SELECT city FROM table1 WHERE id=1')
- );
- $this->assertEquals(
- null,
- $oDB->getOne('SELECT city FROM table1 WHERE id=999')
- );
-
- $this->assertEquals(
- array('Berlin' => 'Germany', 'Paris' => 'France'),
- $oDB->getAssoc('SELECT city, country FROM table1')
- );
- $this->assertEquals(
- array(),
- $oDB->getAssoc('SELECT city, country FROM table1 WHERE id=999')
- );
- }
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/init-website.php');
-require_once(CONST_LibDir.'/DatabaseError.php');
-
-class DatabaseErrorTest extends \PHPUnit\Framework\TestCase
-{
-
- public function testSqlMessage()
- {
- $oSqlStub = $this->getMockBuilder(PDOException::class)
- ->setMethods(array('getMessage'))
- ->getMock();
-
- $oSqlStub->method('getMessage')
- ->willReturn('Unknown table.');
-
- $oErr = new DatabaseError('Sql error', 123, null, $oSqlStub);
- $this->assertEquals('Sql error', $oErr->getMessage());
- $this->assertEquals(123, $oErr->getCode());
- $this->assertEquals('Unknown table.', $oErr->getSqlError());
- }
-
- public function testSqlObjectDump()
- {
- $oErr = new DatabaseError('Sql error', 123, null, array('one' => 'two'));
- $this->assertStringContainsString('two', $oErr->getSqlDebugDump());
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/DebugHtml.php');
-
-class DebugTest extends \PHPUnit\Framework\TestCase
-{
-
- protected function setUp(): void
- {
- $this->oWithDebuginfo = $this->getMockBuilder(\GeococdeMock::class)
- ->setMethods(array('debugInfo'))
- ->getMock();
- $this->oWithDebuginfo->method('debugInfo')
- ->willReturn(array('key1' => 'val1', 'key2' => 'val2', 'key3' => 'val3'));
-
-
- $this->oWithToString = $this->getMockBuilder(\SomeMock::class)
- ->setMethods(array('__toString'))
- ->getMock();
- $this->oWithToString->method('__toString')->willReturn('me as string');
- }
-
- public function testPrintVar()
- {
- $this->expectOutputString(<<<EOT
-<pre><b>Var0:</b> </pre>
-<pre><b>Var1:</b> <i>True</i></pre>
-<pre><b>Var2:</b> <i>False</i></pre>
-<pre><b>Var3:</b> 0</pre>
-<pre><b>Var4:</b> 'String'</pre>
-<pre><b>Var5:</b> 0 => 'one'
- 1 => 'two'
- 2 => 'three'</pre>
-<pre><b>Var6:</b> 'key' => 'value'
- 'key2' => 'value2'</pre>
-<pre><b>Var7:</b> me as string</pre>
-<pre><b>Var8:</b> 'value', 'value2'</pre>
-
-EOT
- );
-
- Debug::printVar('Var0', null);
- Debug::printVar('Var1', true);
- Debug::printVar('Var2', false);
- Debug::printVar('Var3', 0);
- Debug::printVar('Var4', 'String');
- Debug::printVar('Var5', array('one', 'two', 'three'));
- Debug::printVar('Var6', array('key' => 'value', 'key2' => 'value2'));
- Debug::printVar('Var7', $this->oWithToString);
- Debug::printVar('Var8', Debug::fmtArrayVals(array('key' => 'value', 'key2' => 'value2')));
- }
-
-
- public function testDebugArray()
- {
- $this->expectOutputString(<<<EOT
-<pre><b>Arr0:</b> 'null'</pre>
-<pre><b>Arr1:</b> 'key1' => 'val1'
- 'key2' => 'val2'
- 'key3' => 'val3'</pre>
-
-EOT
- );
-
- Debug::printDebugArray('Arr0', null);
- Debug::printDebugArray('Arr1', $this->oWithDebuginfo);
- }
-
-
- public function testPrintDebugTable()
- {
- $this->expectOutputString(<<<EOT
-<b>Table1:</b>
-<table border='1'>
-</table>
-<b>Table2:</b>
-<table border='1'>
-</table>
-<b>Table3:</b>
-<table border='1'>
- <tr>
- <th><small>0</small></th>
- <th><small>1</small></th>
- </tr>
- <tr>
- <td><pre>'one'</pre></td>
- <td><pre>'two'</pre></td>
- </tr>
- <tr>
- <td><pre>'three'</pre></td>
- <td><pre>'four'</pre></td>
- </tr>
-</table>
-<b>Table4:</b>
-<table border='1'>
- <tr>
- <th><small>key1</small></th>
- <th><small>key2</small></th>
- <th><small>key3</small></th>
- </tr>
- <tr>
- <td><pre>'val1'</pre></td>
- <td><pre>'val2'</pre></td>
- <td><pre>'val3'</pre></td>
- </tr>
-</table>
-
-EOT
- );
-
- Debug::printDebugTable('Table1', null);
-
- Debug::printDebugTable('Table2', array());
-
- // Numeric headers
- Debug::printDebugTable('Table3', array(array('one', 'two'), array('three', 'four')));
-
- // Associate array
- Debug::printDebugTable('Table4', array($this->oWithDebuginfo));
- }
-
- public function testPrintGroupTable()
- {
- $this->expectOutputString(<<<EOT
-<b>Table1:</b>
-<table border='1'>
-</table>
-<b>Table2:</b>
-<table border='1'>
-</table>
-<b>Table3:</b>
-<table border='1'>
- <tr>
- <th><small>Group</small></th>
- <th><small>key1</small></th>
- <th><small>key2</small></th>
- </tr>
- <tr>
- <td><pre>group1</pre></td>
- <td><pre>'val1'</pre></td>
- <td><pre>'val2'</pre></td>
- </tr>
- <tr>
- <td><pre>group1</pre></td>
- <td><pre>'one'</pre></td>
- <td><pre>'two'</pre></td>
- </tr>
- <tr>
- <td><pre>group2</pre></td>
- <td><pre>'val1'</pre></td>
- <td><pre>'val2'</pre></td>
- </tr>
-</table>
-<b>Table4:</b>
-<table border='1'>
- <tr>
- <th><small>Group</small></th>
- <th><small>key1</small></th>
- <th><small>key2</small></th>
- <th><small>key3</small></th>
- </tr>
- <tr>
- <td><pre>group1</pre></td>
- <td><pre>'val1'</pre></td>
- <td><pre>'val2'</pre></td>
- <td><pre>'val3'</pre></td>
- </tr>
- <tr>
- <td><pre>group1</pre></td>
- <td><pre>'val1'</pre></td>
- <td><pre>'val2'</pre></td>
- <td><pre>'val3'</pre></td>
- </tr>
-</table>
-
-EOT
- );
-
- Debug::printGroupTable('Table1', null);
- Debug::printGroupTable('Table2', array());
-
- // header are taken from first group item, thus no key3 gets printed
- $aGroups = array(
- 'group1' => array(
- array('key1' => 'val1', 'key2' => 'val2'),
- array('key1' => 'one', 'key2' => 'two', 'unknown' => 1),
- ),
- 'group2' => array(
- array('key1' => 'val1', 'key2' => 'val2', 'key3' => 'val3'),
- )
- );
- Debug::printGroupTable('Table3', $aGroups);
-
- $aGroups = array(
- 'group1' => array($this->oWithDebuginfo, $this->oWithDebuginfo),
- );
- Debug::printGroupTable('Table4', $aGroups);
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/lib.php');
-require_once(CONST_LibDir.'/ClassTypes.php');
-
-class LibTest extends \PHPUnit\Framework\TestCase
-{
-
- public function testAddQuotes()
- {
- // FIXME: not quoting existing quote signs is probably a bug
- $this->assertSame("'St. John's'", addQuotes("St. John's"));
- $this->assertSame("''", addQuotes(''));
- }
-
- public function testParseLatLon()
- {
- // no coordinates expected
- $this->assertFalse(parseLatLon(''));
- $this->assertFalse(parseLatLon('abc'));
- $this->assertFalse(parseLatLon('12 34'));
-
- // coordinates expected
- $this->assertNotNull(parseLatLon('0.0 -0.0'));
-
- $aRes = parseLatLon(' abc 12.456 -78.90 def ');
- $this->assertEquals($aRes[1], 12.456);
- $this->assertEquals($aRes[2], -78.90);
- $this->assertEquals($aRes[0], ' 12.456 -78.90 ');
-
- $aRes = parseLatLon(' [12.456,-78.90] ');
- $this->assertEquals($aRes[1], 12.456);
- $this->assertEquals($aRes[2], -78.90);
- $this->assertEquals($aRes[0], ' [12.456,-78.90] ');
-
- $aRes = parseLatLon(' -12.456,-78.90 ');
- $this->assertEquals($aRes[1], -12.456);
- $this->assertEquals($aRes[2], -78.90);
- $this->assertEquals($aRes[0], ' -12.456,-78.90 ');
-
- // http://en.wikipedia.org/wiki/Geographic_coordinate_conversion
- // these all represent the same location
- $aQueries = array(
- '40 26.767 N 79 58.933 W',
- '40° 26.767′ N 79° 58.933′ W',
- "40° 26.767' N 79° 58.933' W",
- "40° 26.767'
- N 79° 58.933' W",
- 'N 40 26.767, W 79 58.933',
- 'N 40°26.767′, W 79°58.933′',
- ' N 40°26.767′, W 79°58.933′',
- "N 40°26.767', W 79°58.933'",
-
- '40 26 46 N 79 58 56 W',
- '40° 26′ 46″ N 79° 58′ 56″ W',
- '40° 26′ 46.00″ N 79° 58′ 56.00″ W',
- '40°26′46″N 79°58′56″W',
- 'N 40 26 46 W 79 58 56',
- 'N 40° 26′ 46″, W 79° 58′ 56″',
- 'N 40° 26\' 46", W 79° 58\' 56"',
- 'N 40° 26\' 46", W 79° 58\' 56"',
-
- '40.446 -79.982',
- '40.446,-79.982',
- '40.446° N 79.982° W',
- 'N 40.446° W 79.982°',
-
- '[40.446 -79.982]',
- '[40.446,\v-79.982]',
- ' 40.446 , -79.982 ',
- ' 40.446 , -79.982 ',
- ' 40.446 , -79.982 ',
- ' 40.446\v, -79.982 ',
- );
-
-
- foreach ($aQueries as $sQuery) {
- $aRes = parseLatLon($sQuery);
- $this->assertEqualsWithDelta(40.446, $aRes[1], 0.01, 'degrees decimal ' . $sQuery);
- $this->assertEqualsWithDelta(-79.982, $aRes[2], 0.01, 'degrees decimal ' . $sQuery);
- $this->assertEquals($sQuery, $aRes[0]);
- }
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/ParameterParser.php');
-
-
-function userError($sError)
-{
- throw new \Exception($sError);
-}
-
-class ParameterParserTest extends \PHPUnit\Framework\TestCase
-{
-
-
- public function testGetBool()
- {
- $oParams = new ParameterParser(array(
- 'bool1' => '1',
- 'bool2' => '0',
- 'bool3' => 'true',
- 'bool4' => 'false',
- 'bool5' => ''
- ));
-
- $this->assertSame(false, $oParams->getBool('non-exists'));
- $this->assertSame(true, $oParams->getBool('non-exists', true));
- $this->assertSame(true, $oParams->getBool('bool1'));
- $this->assertSame(false, $oParams->getBool('bool2'));
- $this->assertSame(true, $oParams->getBool('bool3'));
- $this->assertSame(true, $oParams->getBool('bool4'));
- $this->assertSame(false, $oParams->getBool('bool5'));
- }
-
-
- public function testGetInt()
- {
- $oParams = new ParameterParser(array(
- 'int1' => '5',
- 'int2' => '-1',
- 'int3' => 0
- ));
-
- $this->assertSame(false, $oParams->getInt('non-exists'));
- $this->assertSame(999, $oParams->getInt('non-exists', 999));
- $this->assertSame(5, $oParams->getInt('int1'));
-
- $this->assertSame(-1, $oParams->getInt('int2'));
- $this->assertSame(0, $oParams->getInt('int3'));
- }
-
-
- public function testGetIntWithNonNumber()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage("Integer number expected for parameter 'int4'");
-
- (new ParameterParser(array('int4' => 'a')))->getInt('int4');
- }
-
-
- public function testGetIntWithEmpytString()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage("Integer number expected for parameter 'int5'");
-
- (new ParameterParser(array('int5' => '')))->getInt('int5');
- }
-
-
- public function testGetFloat()
- {
-
- $oParams = new ParameterParser(array(
- 'float1' => '1.0',
- 'float2' => '-5',
- 'float3' => 0
- ));
-
- $this->assertSame(false, $oParams->getFloat('non-exists'));
- $this->assertSame(999, $oParams->getFloat('non-exists', 999));
- $this->assertSame(1.0, $oParams->getFloat('float1'));
- $this->assertSame(-5.0, $oParams->getFloat('float2'));
- $this->assertSame(0.0, $oParams->getFloat('float3'));
- }
-
- public function testGetFloatWithEmptyString()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage("Floating-point number expected for parameter 'float4'");
-
- (new ParameterParser(array('float4' => '')))->getFloat('float4');
- }
-
- public function testGetFloatWithTextString()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage("Floating-point number expected for parameter 'float5'");
-
- (new ParameterParser(array('float5' => 'a')))->getFloat('float5');
- }
-
-
- public function testGetFloatWithInvalidNumber()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage("Floating-point number expected for parameter 'float6'");
-
- (new ParameterParser(array('float6' => '-55.')))->getFloat('float6');
- }
-
-
- public function testGetString()
- {
- $oParams = new ParameterParser(array(
- 'str1' => 'abc',
- 'str2' => '',
- 'str3' => '0'
- ));
-
- $this->assertSame(false, $oParams->getString('non-exists'));
- $this->assertSame('default', $oParams->getString('non-exists', 'default'));
- $this->assertSame('abc', $oParams->getString('str1'));
- $this->assertSame(false, $oParams->getStringList('str2'));
- $this->assertSame(false, $oParams->getStringList('str3')); // sadly PHP magic treats 0 as false when returned
- }
-
-
- public function testGetSet()
- {
- $oParams = new ParameterParser(array(
- 'val1' => 'foo',
- 'val2' => '',
- 'val3' => 0
- ));
-
- $this->assertSame(false, $oParams->getSet('non-exists', array('foo', 'bar')));
- $this->assertSame('default', $oParams->getSet('non-exists', array('foo', 'bar'), 'default'));
- $this->assertSame('foo', $oParams->getSet('val1', array('foo', 'bar')));
-
- $this->assertSame(false, $oParams->getSet('val2', array('foo', 'bar')));
- $this->assertSame(false, $oParams->getSet('val3', array('foo', 'bar')));
- }
-
-
- public function testGetSetWithValueNotInSet()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage("Parameter 'val4' must be one of: foo, bar");
-
- (new ParameterParser(array('val4' => 'faz')))->getSet('val4', array('foo', 'bar'));
- }
-
-
- public function testGetStringList()
- {
- $oParams = new ParameterParser(array(
- 'list1' => ',a,b,c,,c,d',
- 'list2' => 'a',
- 'list3' => '',
- 'list4' => '0'
- ));
-
- $this->assertSame(false, $oParams->getStringList('non-exists'));
- $this->assertSame(array('a', 'b'), $oParams->getStringList('non-exists', array('a', 'b')));
- $this->assertSame(array('a', 'b', 'c', 'c', 'd'), $oParams->getStringList('list1'));
- $this->assertSame(array('a'), $oParams->getStringList('list2'));
- $this->assertSame(false, $oParams->getStringList('list3'));
- $this->assertSame(false, $oParams->getStringList('list4'));
- }
-
-
- public function testGetPreferredLanguages()
- {
- $oParams = new ParameterParser(array('accept-language' => ''));
- $this->assertSame(array(
- 'name:default' => 'name:default',
- '_place_name:default' => '_place_name:default',
- 'name' => 'name',
- '_place_name' => '_place_name'
- ), array_slice($oParams->getPreferredLanguages('default'), 0, 4));
-
- $oParams = new ParameterParser(array('accept-language' => 'de,en'));
- $this->assertSame(array(
- 'name:de' => 'name:de',
- '_place_name:de' => '_place_name:de',
- 'name:en' => 'name:en',
- '_place_name:en' => '_place_name:en',
- 'name' => 'name',
- '_place_name' => '_place_name'
- ), array_slice($oParams->getPreferredLanguages('default'), 0, 6));
-
- $oParams = new ParameterParser(array('accept-language' => 'fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3'));
- $this->assertSame(array(
- 'name:fr-ca' => 'name:fr-ca',
- '_place_name:fr-ca' => '_place_name:fr-ca',
- 'name:fr' => 'name:fr',
- '_place_name:fr' => '_place_name:fr',
- 'name:en-ca' => 'name:en-ca',
- '_place_name:en-ca' => '_place_name:en-ca',
- 'name:en' => 'name:en',
- '_place_name:en' => '_place_name:en',
- 'name' => 'name',
- '_place_name' => '_place_name'
- ), array_slice($oParams->getPreferredLanguages('default'), 0, 10));
-
- $oParams = new ParameterParser(array('accept-language' => 'ja_rm,zh_pinyin'));
- $this->assertSame(array(
- 'name:ja_rm' => 'name:ja_rm',
- '_place_name:ja_rm' => '_place_name:ja_rm',
- 'name:zh_pinyin' => 'name:zh_pinyin',
- '_place_name:zh_pinyin' => '_place_name:zh_pinyin',
- 'name:ja' => 'name:ja',
- '_place_name:ja' => '_place_name:ja',
- 'name:zh' => 'name:zh',
- '_place_name:zh' => '_place_name:zh',
- 'name' => 'name',
- '_place_name' => '_place_name'
- ), array_slice($oParams->getPreferredLanguages('default'), 0, 10));
- }
-
- public function testHasSetAny()
- {
- $oParams = new ParameterParser(array(
- 'one' => '',
- 'two' => 0,
- 'three' => '0',
- 'four' => '1',
- 'five' => 'anystring'
- ));
- $this->assertFalse($oParams->hasSetAny(array()));
- $this->assertFalse($oParams->hasSetAny(array('')));
- $this->assertFalse($oParams->hasSetAny(array('unknown')));
- $this->assertFalse($oParams->hasSetAny(array('one', 'two', 'three')));
- $this->assertTrue($oParams->hasSetAny(array('one', 'four')));
- $this->assertTrue($oParams->hasSetAny(array('four')));
- $this->assertTrue($oParams->hasSetAny(array('five')));
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/Result.php');
-
-function mkRankedResult($iId, $iResultRank)
-{
- $oResult = new Result($iId);
- $oResult->iResultRank = $iResultRank;
-
- return $oResult;
-}
-
-
-class ResultTest extends \PHPUnit\Framework\TestCase
-{
- public function testSplitResults()
- {
- $aSplitResults = Result::splitResults(array(
- mkRankedResult(1, 2),
- mkRankedResult(2, 0),
- mkRankedResult(3, 0),
- mkRankedResult(4, 2),
- mkRankedResult(5, 1)
- ));
-
-
- $aHead = array_keys($aSplitResults['head']);
- $aTail = array_keys($aSplitResults['tail']);
-
- $this->assertEquals($aHead, array(2, 3));
- $this->assertEquals($aTail, array(1, 4, 5));
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/SearchContext.php');
-
-class SearchContextTest extends \PHPUnit\Framework\TestCase
-{
- private $oCtx;
-
-
- protected function setUp(): void
- {
- $this->oCtx = new SearchContext();
- }
-
- public function testHasNearPoint()
- {
- $this->assertFalse($this->oCtx->hasNearPoint());
- $this->oCtx->setNearPoint(0, 0);
- $this->assertTrue($this->oCtx->hasNearPoint());
- }
-
- public function testNearRadius()
- {
- $this->oCtx->setNearPoint(1, 1);
- $this->assertEquals(0.1, $this->oCtx->nearRadius());
- $this->oCtx->setNearPoint(1, 1, 0.338);
- $this->assertEquals(0.338, $this->oCtx->nearRadius());
- }
-
- public function testWithinSQL()
- {
- $this->oCtx->setNearPoint(0.1, 23, 1);
-
- $this->assertEquals(
- 'ST_DWithin(foo, ST_SetSRID(ST_Point(23,0.1),4326), 1.000000)',
- $this->oCtx->withinSQL('foo')
- );
- }
-
- public function testDistanceSQL()
- {
- $this->oCtx->setNearPoint(0.1, 23, 1);
-
- $this->assertEquals(
- 'ST_Distance(ST_SetSRID(ST_Point(23,0.1),4326), foo)',
- $this->oCtx->distanceSQL('foo')
- );
- }
-
- public function testSetViewboxFromBox()
- {
- $viewbox = array(30, 20, 40, 50);
- $this->oCtx->setViewboxFromBox($viewbox, true);
- $this->assertEquals(
- 'ST_SetSRID(ST_MakeBox2D(ST_Point(30.000000,20.000000),ST_Point(40.000000,50.000000)),4326)',
- $this->oCtx->sqlViewboxSmall
- );
- // height: 10
- // width: 30
- $this->assertEquals(
- 'ST_SetSRID(ST_MakeBox2D(ST_Point(50.000000,80.000000),ST_Point(20.000000,-10.000000)),4326)',
- $this->oCtx->sqlViewboxLarge
- );
-
-
- $viewbox = array(-1.5, -2, 1.5, 2);
- $this->oCtx->setViewboxFromBox($viewbox, true);
- $this->assertEquals(
- 'ST_SetSRID(ST_MakeBox2D(ST_Point(-1.500000,-2.000000),ST_Point(1.500000,2.000000)),4326)',
- $this->oCtx->sqlViewboxSmall
- );
- // height: 3
- // width: 4
- $this->assertEquals(
- 'ST_SetSRID(ST_MakeBox2D(ST_Point(4.500000,6.000000),ST_Point(-4.500000,-6.000000)),4326)',
- $this->oCtx->sqlViewboxLarge
- );
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/Shell.php');
-
-class ShellTest extends \PHPUnit\Framework\TestCase
-{
- public function testNew()
- {
- $this->expectException('ArgumentCountError');
- $this->expectExceptionMessage('Too few arguments to function');
- $oCmd = new \Nominatim\Shell();
-
-
- $oCmd = new \Nominatim\Shell('wc', '-l', 'file.txt');
- $this->assertSame(
- "wc -l 'file.txt'",
- $oCmd->escapedCmd()
- );
- }
-
- public function testaddParams()
- {
- $oCmd = new \Nominatim\Shell('grep');
- $oCmd->addParams('-a', 'abc')
- ->addParams(10);
-
- $this->assertSame(
- 'grep -a abc 10',
- $oCmd->escapedCmd(),
- 'no escaping needed, chained'
- );
-
- $oCmd = new \Nominatim\Shell('grep');
- $oCmd->addParams();
- $oCmd->addParams(null);
- $oCmd->addParams('');
-
- $this->assertEmpty($oCmd->aParams);
- $this->assertSame('grep', $oCmd->escapedCmd(), 'empty params');
-
- $oCmd = new \Nominatim\Shell('echo', '-n', 0);
- $this->assertSame(
- 'echo -n 0',
- $oCmd->escapedCmd(),
- 'zero param'
- );
-
- $oCmd = new \Nominatim\Shell('/path with space/do.php');
- $oCmd->addParams('-a', ' b ');
- $oCmd->addParams('--flag');
- $oCmd->addParams('two words');
- $oCmd->addParams('v=1');
-
- $this->assertSame(
- "'/path with space/do.php' -a ' b ' --flag 'two words' 'v=1'",
- $oCmd->escapedCmd(),
- 'escape whitespace'
- );
-
- $oCmd = new \Nominatim\Shell('grep');
- $oCmd->addParams(';', '|more&', '2>&1');
-
- $this->assertSame(
- "grep ';' '|more&' '2>&1'",
- $oCmd->escapedCmd(),
- 'escape shell characters'
- );
- }
-
- public function testaddEnvPair()
- {
- $oCmd = new \Nominatim\Shell('date');
-
- $oCmd->addEnvPair('one', 'two words')
- ->addEnvPair('null', null)
- ->addEnvPair(null, 'null')
- ->addEnvPair('empty', '')
- ->addEnvPair('', 'empty');
-
- $this->assertEquals(
- array('one' => 'two words', 'empty' => ''),
- $oCmd->aEnv
- );
-
- $oCmd->addEnvPair('one', 'overwrite');
- $this->assertEquals(
- array('one' => 'overwrite', 'empty' => ''),
- $oCmd->aEnv
- );
- }
-
- public function testClone()
- {
- $oCmd = new \Nominatim\Shell('wc', '-l', 'file.txt');
- $oCmd2 = clone $oCmd;
- $oCmd->addParams('--flag');
- $oCmd2->addParams('--flag2');
-
- $this->assertSame(
- "wc -l 'file.txt' --flag",
- $oCmd->escapedCmd()
- );
-
- $this->assertSame(
- "wc -l 'file.txt' --flag2",
- $oCmd2->escapedCmd()
- );
- }
-
- public function testRun()
- {
- $oCmd = new \Nominatim\Shell('echo');
-
- $this->assertSame(0, $oCmd->run());
-
- // var_dump($sStdout);
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/SimpleWordList.php');
-
-class TokensFullSet
-{
- public function containsAny($sTerm)
- {
- return true;
- }
-}
-
-// phpcs:ignore PSR1.Classes.ClassDeclaration.MultipleClasses
-class TokensPartialSet
-{
- public function __construct($aTokens)
- {
- $this->aTokens = array_flip($aTokens);
- }
-
- public function containsAny($sTerm)
- {
- return isset($this->aTokens[$sTerm]);
- }
-}
-
-// phpcs:ignore PSR1.Classes.ClassDeclaration.MultipleClasses
-class SimpleWordListTest extends \PHPUnit\Framework\TestCase
-{
-
-
- private function serializeSets($aSets)
- {
- $aParts = array();
- foreach ($aSets as $aSet) {
- $aParts[] = '(' . join('|', $aSet) . ')';
- }
- return join(',', $aParts);
- }
-
-
- public function testEmptyPhrase()
- {
- $oList = new SimpleWordList('');
- $this->assertNull($oList->getWordSets(new TokensFullSet()));
- }
-
-
- public function testSingleWordPhrase()
- {
- $oList = new SimpleWordList('a');
-
- $this->assertEquals(
- '(a)',
- $this->serializeSets($oList->getWordSets(new TokensFullSet()))
- );
- }
-
-
- public function testMultiWordPhrase()
- {
- $oList = new SimpleWordList('a b');
- $this->assertEquals(
- '(a b),(a|b)',
- $this->serializeSets($oList->getWordSets(new TokensFullSet()))
- );
-
- $oList = new SimpleWordList('a b c');
- $this->assertEquals(
- '(a b c),(a b|c),(a|b c),(a|b|c)',
- $this->serializeSets($oList->getWordSets(new TokensFullSet()))
- );
-
- $oList = new SimpleWordList('a b c d');
- $this->assertEquals(
- '(a b c d),(a b c|d),(a b|c d),(a|b c d),(a b|c|d),(a|b c|d),(a|b|c d),(a|b|c|d)',
- $this->serializeSets($oList->getWordSets(new TokensFullSet()))
- );
- }
-
- public function testCmpByArraylen()
- {
- // Array elements are phrases, we want to sort so longest phrases are first
- $aList1 = array('hackney', 'bridge', 'london', 'england');
- $aList2 = array('hackney', 'london', 'bridge');
- $aList3 = array('bridge', 'hackney', 'london', 'england');
-
- $this->assertEquals(0, \Nominatim\SimpleWordList::cmpByArraylen($aList1, $aList1));
-
- // list2 "wins". Less array elements
- $this->assertEquals(1, \Nominatim\SimpleWordList::cmpByArraylen($aList1, $aList2));
- $this->assertEquals(-1, \Nominatim\SimpleWordList::cmpByArraylen($aList2, $aList3));
-
- // list1 "wins". Same number of array elements but longer first element
- $this->assertEquals(-1, \Nominatim\SimpleWordList::cmpByArraylen($aList1, $aList3));
- }
-
- public function testMaxWordSets()
- {
- $aWords = array_fill(0, 4, 'a');
- $oList = new SimpleWordList(join(' ', $aWords));
- $this->assertEquals(8, count($oList->getWordSets(new TokensFullSet())));
-
- $aWords = array_fill(0, 18, 'a');
- $oList = new SimpleWordList(join(' ', $aWords));
- $this->assertEquals(100, count($oList->getWordSets(new TokensFullSet())));
- }
-
-
- public function testPartialTokensShortTerm()
- {
- $oList = new SimpleWordList('a b c d');
- $this->assertEquals(
- '(a|b c d),(a|b c|d)',
- $this->serializeSets($oList->getWordSets(new TokensPartialSet(array('a', 'b', 'd', 'b c', 'b c d'))))
- );
- }
-
-
- public function testPartialTokensLongTerm()
- {
- $aWords = array_fill(0, 18, 'a');
- $oList = new SimpleWordList(join(' ', $aWords));
- $this->assertEquals(80, count($oList->getWordSets(new TokensPartialSet(array('a', 'a a a a a')))));
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-@define('CONST_TokenizerDir', dirname(__FILE__));
-
-require_once(CONST_LibDir.'/DB.php');
-require_once(CONST_LibDir.'/Status.php');
-
-
-class StatusTest extends \PHPUnit\Framework\TestCase
-{
-
- public function testNoDatabaseGiven()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage('No database');
- $this->expectExceptionCode(700);
-
- $oDB = null;
- $oStatus = new Status($oDB);
- $this->assertEquals('No database', $oStatus->status());
- }
-
- public function testNoDatabaseConnectionFail()
- {
- $this->expectException(\Exception::class);
- $this->expectExceptionMessage('Database connection failed');
- $this->expectExceptionCode(700);
-
- $oDbStub = $this->getMockBuilder(Nominatim\DB::class)
- ->setMethods(array('connect'))
- ->getMock();
-
- $oDbStub->method('connect')
- ->will($this->returnCallback(function () {
- throw new \Nominatim\DatabaseError('psql connection problem', 500, null, 'unknown database');
- }));
-
-
- $oStatus = new Status($oDbStub);
- $this->assertEquals('No database', $oStatus->status());
- }
-
- public function testOK()
- {
- $oDbStub = $this->getMockBuilder(Nominatim\DB::class)
- ->setMethods(array('connect', 'getOne'))
- ->getMock();
-
- $oDbStub->method('getOne')
- ->will($this->returnCallback(function ($sql) {
- if (preg_match("/make_standard_name\('(\w+)'\)/", $sql, $aMatch)) return $aMatch[1];
- if (preg_match('/SELECT word_id, word_token/', $sql)) return 1234;
- }));
-
- $oStatus = new Status($oDbStub);
- $this->assertNull($oStatus->status());
- }
-
- public function testDataDate()
- {
- $oDbStub = $this->getMockBuilder(Nominatim\DB::class)
- ->setMethods(array('getOne'))
- ->getMock();
-
- $oDbStub->method('getOne')
- ->willReturn(1519430221);
-
- $oStatus = new Status($oDbStub);
- $this->assertEquals(1519430221, $oStatus->dataDate());
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-require_once(CONST_LibDir.'/TokenList.php');
-
-
-class TokenListTest extends \PHPUnit\Framework\TestCase
-{
- protected function setUp(): void
- {
- $this->oNormalizer = $this->getMockBuilder(\MockNormalizer::class)
- ->setMethods(array('transliterate'))
- ->getMock();
- $this->oNormalizer->method('transliterate')
- ->will($this->returnCallback(function ($text) {
- return strtolower($text);
- }));
- }
-
- private function wordResult($aFields)
- {
- $aRow = array(
- 'word_id' => null,
- 'word_token' => null,
- 'word' => null,
- 'class' => null,
- 'type' => null,
- 'country_code' => null,
- 'count' => 0
- );
- return array_merge($aRow, $aFields);
- }
-
- public function testList()
- {
- $TL = new TokenList;
-
- $this->assertEquals(0, $TL->count());
-
- $TL->addToken('word1', 'token1');
- $TL->addToken('word1', 'token2');
-
- $this->assertEquals(1, $TL->count());
-
- $this->assertTrue($TL->contains('word1'));
- $this->assertEquals(array('token1', 'token2'), $TL->get('word1'));
-
- $this->assertFalse($TL->contains('unknownword'));
- $this->assertEquals(array(), $TL->get('unknownword'));
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
-
-namespace Nominatim;
-
-class Tokenizer
-{
- private $oDB;
-
- public function __construct(&$oDB)
- {
- $this->oDB =& $oDB;
- }
-
- public function checkStatus()
- {
- }
-}
+++ /dev/null
-<?php
-/**
- * SPDX-License-Identifier: GPL-2.0-only
- *
- * This file is part of Nominatim. (https://nominatim.org)
- *
- * Copyright (C) 2022 by the Nominatim developer community.
- * For a full list of authors see the git log.
- */
- @define('CONST_LibDir', '../../lib-php');
- @define('CONST_DataDir', '../..');
-
- @define('CONST_Debug', true);
- @define('CONST_NoAccessControl', false);
+++ /dev/null
-[{"place_id":194663412,
- "osm_type":null,
- "osm_id":null,
- "name":{"name": "10 Downing Street", "name:en": "10 Downing Street", "name:es": "10 de Downing Street", "name:he": "דאונינג 10", "name:ko": "다우닝 가 10번지", "name:zh": "唐寧街10號"},
- "class":"tourism",
- "type":"attraction",
- "admin_level":null,
- "fromarea":true,
- "isaddress":true,
- "rank_address":29,
- "distance":0,
- "localname":"10 Downing Street"},
-{"place_id":194663412,
- "osm_type":null,
- "osm_id":null,
- "name":{"ref": "10"},
- "class":"place",
- "type":"house_number",
- "admin_level":null,
- "fromarea":true,
- "isaddress":true,
- "rank_address":28,
- "distance":0,
- "localname":"10"},
-{"place_id":68310941,
- "osm_type":"W",
- "osm_id":4244999,
- "name":{"name": "Downing Street"},
- "class":"highway",
- "type":"residential",
- "admin_level":15,
- "fromarea":true,
- "isaddress":true,
- "rank_address":26,
- "distance":0,
- "localname":"Downing Street"},
-{"place_id":16037318,
- "osm_type":"N",
- "osm_id":1653239257,
- "name":{"name": "St. James's"},
- "class":"place",
- "type":"neighbourhood",
- "admin_level":15,
- "fromarea":true,
- "isaddress":true,
- "rank_address":22,
- "distance":0.00982435489434447,
- "localname":"St. James's"},
-{"place_id":51691981,
- "osm_type":"N",
- "osm_id":3937587633,
- "name":{"name": "St Clement Danes"},
- "class":"place",
- "type":"neighbourhood",
- "admin_level":15,
- "fromarea":true,
- "isaddress":false,
- "rank_address":22,
- "distance":0.0128768181947227,
- "localname":"St Clement Danes"},
-{"place_id":22208313,
- "osm_type":"N",
- "osm_id":2290086954,
- "name":{"name": "Covent Garden"},
- "class":"place",
- "type":"suburb",
- "admin_level":15,
- "fromarea":true,
- "isaddress":true,
- "rank_address":20,
- "distance":0.00935748249317067,
- "localname":"Covent Garden"},
-{"place_id":21742712,
- "osm_type":"N",
- "osm_id":2288030397,
- "name":{"name": "Millbank"},
- "class":"place",
- "type":"suburb",
- "admin_level":15,
- "fromarea":true,
- "isaddress":false,
- "rank_address":20,
- "distance":0.0106525181285902,
- "localname":"Millbank"},
-{"place_id":122775,
- "osm_type":"N",
- "osm_id":26745371,
- "name":{"name": "St Giles"},
- "class":"place",
- "type":"suburb",
- "admin_level":15,
- "fromarea":true,
- "isaddress":false,
- "rank_address":20,
- "distance":0.0136188357358441,
- "localname":"St Giles"},
-{"place_id":134882,
- "osm_type":"N",
- "osm_id":27553719,
- "name":{"name": "Lambeth"},
- "class":"place",
- "type":"suburb",
- "admin_level":15,
- "fromarea":true,
- "isaddress":false,
- "rank_address":20,
- "distance":0.0093308163978298,
- "localname":"Lambeth"},
-{"place_id":194276676,
- "osm_type":"R",
- "osm_id":51781,
- "name":{"name": "City of Westminster", "name:be": "Вэстмінстэр", "name:cy": "San Steffan", "name:en": "Westminster", "name:he": "וסטמינסטר", "name:ru": "Вестминстер"},
- "class":"place",
- "type":"city",
- "admin_level":8,
- "fromarea":true,
- "isaddress":true,
- "rank_address":16,
- "distance":0.0340909562148044,
- "localname":"Westminster"},
-{"place_id":195398522,
- "osm_type":"N",
- "osm_id":107775,
- "name":{"name": "London", "name:ab": "Лондан", "name:af": "Londen", "name:am": "ለንደን", "name:an": "Londres", "name:ar": "لندن", "name:ba": "Лондон", "name:be": "Лондан", "name:bg": "Лондон", "name:bn": "লন্ডন", "name:bo": "ལོན་ཊོན།", "name:br": "Londrez", "name:ca": "Londres", "name:co": "Londra", "name:cs": "Londýn", "name:cu": "Лондонъ", "name:cv": "Лондон", "name:cy": "Llundain", "name:de": "London", "name:el": "Λονδίνο", "name:en": "London", "name:eo": "Londono", "name:es": "Londres", "name:eu": "Londres", "name:fa": "لندن", "name:fi": "Lontoo", "name:fr": "Londres", "name:fy": "Londen", "name:ga": "Londain", "name:gd": "Lunnainn", "name:gl": "Londres - London", "name:gn": "Londye", "name:gu": "લંડન", "name:gv": "Lunnin", "name:he": "לונדון", "name:hi": "लंदन", "name:ht": "Lonn", "name:hu": "London", "name:hy": "Լոնդոն", "name:is": "Lundúnir", "name:it": "Londra", "name:ja": "ロンドン", "name:ka": "ლონდონი", "name:kk": "Лондон", "name:kn": "ಲಂಡನ್", "name:ko": "런던", "name:ku": "London", "name:kv": "Лондон", "name:kw": "Loundres", "name:ky": "Лондон", "name:la": "Londinium", "name:li": "Londe", "name:ln": "Londoni", "name:lo": "ລອນດອນ", "name:lt": "Londonas", "name:lv": "Londona", "name:mi": "Rānana", "name:mk": "Лондон", "name:ml": "ലണ്ടൻ", "name:mn": "Лондон", "name:mr": "लंडन", "name:mt": "Londra", "name:my": "လန်ဒန်မြို့", "name:ne": "लण्डन", "name:nl": "Londen", "name:no": "London", "name:oc": "Londres", "name:or": "ଲଣ୍ଡନ", "name:os": "Лондон", "name:pl": "Londyn", "name:ps": "لندن", "name:pt": "Londres", "name:rm": "Londra", "name:ro": "Londra", "name:ru": "Лондон", "name:sa": "लन्डन्", "name:sc": "Londra", "name:si": "ලන්ඩන්", "name:sk": "Londýn", "name:sq": "Londra", "name:sr": "Лондон", "name:sv": "London", "name:ta": "இலண்டன்", "name:te": "లండన్", "name:tg": "Лондон", "name:th": "ลอนดอน", "name:tl": "Londres", "name:tr": "Londra", "name:tt": "Лондон", "name:uk": "Лондон", "name:ur": "لندن", "name:vi": "Luân Đôn", "name:wo": "Londar", "name:yi": "לאנדאן", "name:yo": "Lọndọnu", "name:zh": "倫敦", "name:zu": "ILondon", "name:ang": "Lunden", "name:arc": "ܠܘܢܕܘܢ", "name:arz": "لندن", "name:ast": "Londres", "name:bcl": "Londres", "name:cdo": "Lùng-dŭng", "name:ckb": "لەندەن", "name:diq": "Londra", "name:eml": "Lòndra", "name:ext": "Londri", "name:frp": "Londres", "name:gan": "倫敦", "name:haw": "Lākana", "name:ilo": "Londres", "name:jbo": "london", "name:koi": "Лондон", "name:krc": "Лондон", "name:lad": "Londra", "name:lbe": "Лондон", "name:lez": "Лондон", "name:lij": "Londra", "name:lmo": "Lundra", "name:mhr": "Лондон", "name:mrj": "Лондон", "name:mwl": "Londres", "name:mzn": "لندن", "name:nah": "Londres", "name:nap": "Londra", "name:new": "लण्डन", "name:nrm": "Londres", "name:pcd": "Londe", "name:pms": "Londra", "name:pnb": "لندن", "name:pnt": "Λονδίνο", "name:rue": "Лондон", "name:sah": "Лондон", "name:scn": "Londra", "name:sco": "Lunnon", "name:szl": "Lůndůn", "name:tet": "Londres", "name:tpi": "Landen", "name:tzl": "Londra", "name:udm": "Лондон", "name:vec": "Łondra", "name:vls": "Londn", "name:wuu": "伦敦", "name:xmf": "ლონდონი", "name:yue": "倫敦", "name:zea": "Londen", "name:nds-nl": "Londen", "name:bat-smg": "Londons", "name:roa-rup": "Londra", "name:roa-tara": "Londre", "name:be-tarask": "Лёндан", "name:zh_pinyin": "Lúndūn", "name:zh-classical": "倫敦", "name:zh-simplified": "伦敦", "name:zh-traditional": "倫敦"},
- "class":"place",
- "type":"city",
- "admin_level":2,
- "fromarea":true,
- "isaddress":false,
- "rank_address":16,
- "distance":0.00412384196971048,
- "localname":"London"},
-{"place_id":193774423,
- "osm_type":"R",
- "osm_id":65606,
- "name":{"name": "London", "ISO3166-2": "GB-LND", "name:be": "Лондан", "name:ca": "Londres", "name:el": "Λονδίνο", "name:en": "London", "name:eo": "Londono", "name:es": "Londres", "name:fa": "لندن", "name:fi": "Lontoo", "name:fr": "Londres", "name:fy": "Londen", "name:gl": "Londres", "name:hi": "लंदन", "name:lt": "Londonas", "name:nl": "Londen", "name:pl": "Londyn", "name:pt": "Londres", "name:ru": "Лондон", "name:uk": "Лондон", "name:vi": "Luân Đôn", "name:zh": "伦敦", "int_name": "London", "name:szl": "Lůndůn", "name:tzl": "Londra", "name:be-tarask": "Лёндан"},
- "class":"place",
- "type":"city",
- "admin_level":6,
- "fromarea":true,
- "isaddress":true,
- "rank_address":12,
- "distance":0.0172243361058611,
- "localname":"London"},
-{"place_id":194000080,
- "osm_type":"R",
- "osm_id":175342,
- "name":{"name": "Greater London", "name:be": "Вялікі Лондан", "name:de": "Groß-London", "name:en": "Greater London", "name:fr": "Grand Londres", "name:lt": "Didysis Londonas", "name:ru": "Большой Лондон", "name:uk": "Великий Лондон", "official_name": "Greater London (incl. City of London)", "name:be-tarask": "Вялікі Лёндан"},
- "class":"boundary",
- "type":"administrative",
- "admin_level":5,
- "fromarea":true,
- "isaddress":true,
- "rank_address":10,
- "distance":0.0172532381571105,
- "localname":"Greater London"},
-{"place_id":194325361,
- "osm_type":"R",
- "osm_id":58447,
- "name":{"ref": "ENG", "ISO3166-2": "GB-ENG", "name": "England", "name:be": "Англія", "name:br": "Bro-Saoz", "name:ca": "Anglaterra", "name:cs": "Anglie", "name:cy": "Lloegr", "name:de": "England", "name:el": "Αγγλία", "name:en": "England", "name:eo": "Anglujo", "name:es": "Inglaterra", "name:fi": "Englanti", "name:fr": "Angleterre", "name:fy": "Ingelân", "name:ga": "Sasana", "name:gd": "Sasainn", "name:gv": "Sostyn", "name:he": "אנגליה", "name:hu": "Anglia", "name:ia": "Anglaterra", "name:io": "Anglia", "name:it": "Inghilterra", "name:la": "Anglia", "name:lt": "Anglija", "name:nl": "Engeland", "name:pl": "Anglia", "name:pt": "Inglaterra", "name:ru": "Англия", "name:sk": "Anglicko", "name:sv": "England", "name:tr": "İngiltere", "name:uk": "Англія", "name:vi": "Anh", "name:vo": "Linglän", "name:zh": "英格蘭", "name:hsb": "Jendźelska", "name:nds": "England", "name:tok": "ma Inli", "name:tzl": "Anglatzara", "alt_name:eo": "Anglio", "alt_name:ia": "Anglia", "old_name:vi": "Anh Quốc", "alt_name:nds": "Ingland", "name:be-tarask": "Ангельшчына", "name:zh-classical": "英格蘭", "name:zh-simplified": "英格兰", "name:zh-traditional": "英格蘭"},
- "class":"boundary",
- "type":"administrative",
- "admin_level":4,
- "fromarea":true,
- "isaddress":true,
- "rank_address":8,
- "distance":1.75192967136328,
- "localname":"England"},
-{"place_id":null,
- "osm_type":null,
- "osm_id":null,
- "name":{"ref": "SW1A 2AA"},
- "class":"place",
- "type":"postcode",
- "admin_level":null,
- "fromarea":true,
- "isaddress":true,
- "rank_address":5,
- "distance":0,
- "localname":"SW1A 2AA"},
-{"place_id":40715006,
- "osm_type":"N",
- "osm_id":3055075992,
- "name":{"ref": "SW1A 2AQ"},
- "class":"place",
- "type":"postcode",
- "admin_level":15,
- "fromarea":true,
- "isaddress":false,
- "rank_address":5,
- "distance":0.00172905579146705,
- "localname":"SW1A 2AQ"},
-{"place_id":194354400,
- "osm_type":"R",
- "osm_id":62149,
- "name":{"name": "United Kingdom", "name:ab": "Британиа Ду", "name:af": "Verenigde Koninkryk", "name:ak": "United Kingdom", "name:am": "ዩናይትድ ኪንግደም", "name:an": "Reino Unito", "name:ar": "المملكة المتحدة", "name:az": "Böyük Britaniya", "name:ba": "Бөйөк Британия", "name:be": "Вялікабрытанія", "name:bg": "Обединено кралство Великобритания и Северна Ирландия", "name:bi": "Unaeted Kingdom", "name:bm": "Angilɛtɛri", "name:bn": "যুক্তরাজ্য", "name:bo": "དབྱིན་ཇི་མཉམ་འབྲེལ།", "name:br": "Rouantelezh-Unanet", "name:bs": "Ujedinjeno Kraljevstvo Velike Britanije i Sjeverne Irske", "name:ca": "Regne Unit", "name:ce": "Йоккха Британи", "name:co": "Regnu Unitu", "name:cs": "Spojené království", "name:cu": "Вєлика Британїꙗ", "name:cv": "Аслă Британи", "name:cy": "Deyrnas Unedig", "name:da": "Storbritannien", "name:de": "Vereinigtes Königreich", "name:dv": "ޔުނައިޓެޑް ކިންގްޑަމް", "name:dz": "ཡུ་ནའི་ཊེཊ་ཀིང་ཌམ", "name:ee": "United Kingdom", "name:el": "Ηνωμένο Βασίλειο", "name:en": "United Kingdom", "name:eo": "Britujo", "name:es": "Reino Unido", "name:et": "Suurbritannia", "name:eu": "Erresuma Batua", "name:fa": "بریتانیا", "name:ff": "Laamateeri Rentundi", "name:fi": "Yhdistynyt kuningaskunta", "name:fo": "Stóra Bretland", "name:fr": "Royaume-Uni", "name:fy": "Feriene Keninkryk", "name:ga": "An Ríocht Aontaithe", "name:gd": "An Rìoghachd Aonaichte", "name:gl": "Reino Unido", "name:gn": "Tavetã Joaju", "name:gu": "યુનાઇટેડ કિંગડમ", "name:gv": "Reeriaght Unnaneysit", "name:ha": "Birtaniya", "name:he": "הממלכה המאוחדת", "name:hi": "यूनाइटेड किंगडम", "name:hr": "Ujedinjeno Kraljevstvo", "name:ht": "Wayòm Ini", "name:hu": "Egyesült Királyság", "name:hy": "Միացյալ Թագավորություն", "name:ia": "Regno Unite", "name:id": "Britania Raya", "name:ie": "Reyatu Unit", "name:ig": "Obodoézè Nà Ofú", "name:ii": "ꑱꇩ", "name:io": "Unionita Rejio", "name:is": "Bretland", "name:it": "Regno Unito", "name:ja": "イギリス", "name:jv": "Britania Raya", "name:ka": "გაერთიანებული სამეფო", "name:kg": "Royaume-Uni", "name:ki": "Ngeretha", "name:kk": "Ұлыбритания", "name:kl": "Tuluit Nunaat", "name:km": "រាជាណាចក្ររួម", "name:kn": "ಯುನೈಟೆಡ್ ಕಿಂಗ್ಡಂ", "name:ko": "영국", "name:ks": "یُنایٹِڑ کِنٛگڈَم", "name:ku": "Keyaniya Yekbûyî", "name:kv": "Ыджыд Британия", "name:kw": "Ruwvaneth Unys", "name:ky": "Улуу Британия жана Түндүк Ирландия", "name:la": "Britanniarum Regnum", "name:lb": "Groussbritannien an Nordirland", "name:lg": "Bungereza", "name:li": "Vereineg Keuninkriek", "name:ln": "Ingɛlɛ́tɛlɛ", "name:lo": "ສະຫະລາດຊະອານາຈັກ", "name:lt": "Jungtinė Karalystė", "name:lv": "Apvienotā Karaliste", "name:mg": "Fanjakana Mitambatra", "name:mi": "Kīngitanga Kotahi", "name:mk": "Обединето Кралство", "name:ml": "യുണൈറ്റഡ് കിങ്ഡം", "name:mn": "Их Британи", "name:mr": "युनायटेड किंग्डम", "name:ms": "United Kingdom", "name:mt": "Renju Unit", "name:my": "ယူနိုက်တက်ကင်းဒမ်းနိုင်ငံ", "name:na": "Ingerand", "name:ne": "संयुक्त अधिराज्य", "name:nl": "Verenigd Koninkrijk", "name:nn": "Storbritannia", "name:no": "Storbritannia", "name:nv": "Tótaʼ Dinéʼiʼ Bikéyah", "name:oc": "Reialme Unit", "name:or": "ଯୁକ୍ତରାଜ୍ୟ", "name:os": "Стыр Британи", "name:pa": "ਸੰਯੁਕਤ ਬਾਦਸ਼ਾਹੀ", "name:pl": "Wielka Brytania", "name:ps": "بريتانيا", "name:pt": "Reino Unido", "name:qu": "Hukllachasqa Qhapaq Suyu", "name:rm": "Reginavel Unì", "name:rn": "Ubwongereza", "name:ro": "Regatul Unit al Marii Britanii și al Irlandei de Nord", "name:ru": "Великобритания", "name:rw": "Ubwongereza", "name:sa": "संयुक्त अधिराज्य", "name:sc": "Rennu Auniadu", "name:se": "Ovttastuvvan gonagasriika", "name:sg": "Ködörögbïä--Ôko", "name:sh": "Ujedinjeno Kraljevstvo", "name:si": "එක්සත් රාජධානිය", "name:sk": "Spojené kráľovstvo", "name:sl": "Združeno kraljestvo Velike Britanije in Severne Irske", "name:sn": "United Kingdom", "name:so": "Midowga boqortooyada Britan", "name:sq": "Mbretëria e Bashkuar e Britanisë dhe Irlandës së Veriut", "name:sr": "Уједињено Краљевство", "name:ss": "United Kingdom", "name:su": "Britania", "name:sv": "Storbritannien", "name:sw": "Ufalme wa Muungano", "name:ta": "ஐக்கிய இராச்சியம்", "name:te": "యునైటెడ్ కింగ్డమ్", "name:tg": "Подшоҳии Муттаҳида", "name:th": "สหราชอาณาจักร", "name:ti": "እንግሊዝ", "name:tl": "Nagkakaisang Kaharian", "name:to": "Pilitānia", "name:tr": "Birleşik Krallık", "name:tt": "Бөекбритания", "name:tw": "United Kingdom", "name:ty": "Paratāne", "name:ug": "بۈيۈك بېرىتانىيە", "name:uk": "Велика Британія", "name:ur": "برطانیہ", "name:uz": "Birlashgan Qirollik", "name:vi": "Vương quốc Anh", "name:vo": "Regän Pebalöl", "name:wo": "Nguur-Yu-Bennoo", "name:yi": "פאראייניגטע קעניגרייך", "name:yo": "Ilẹ̀ọba Aṣọ̀kan", "name:za": "Yinghgoz", "name:zh": "英國", "name:zu": "Umbuso Ohlangeneyo", "alt_name": "United Kingdom; UK; Britain; Great Britain", "int_name": "United Kingdom", "name:als": "Vereinigtes Königreich", "name:ang": "Geāned Cynerīce", "name:arc": "ܡܠܟܘܬܐ ܡܚܝܕܬܐ", "name:arz": "المملكه المتحده", "name:ast": "Reinu Xuníu", "name:bar": "Vaeinigts Kinireich", "name:bcl": "Reyno Unido", "name:bjn": "Britania Raya", "name:bpy": "তিলপারাজ্য", "name:bug": "United Kingdom", "name:bxr": "Нэгдсэн Вант Улс", "name:cdo": "Ĭng-guók", "name:ceb": "Hiniusang Gingharian", "name:chr": "ᎡᎵᏏᎯ", "name:chy": "United Kingdom", "name:ckb": "شانشینی یەکگرتوو", "name:crh": "Büyük Britaniya", "name:csb": "Wiôlgô Britanijô", "name:diq": "Qraliya Yewbiyayiye", "name:dsb": "Wjelika Britaniska", "name:eml": "Régn Unî", "name:ext": "Réinu Uniu", "name:frp": "Royômo-Uni", "name:frr": "Feriind Kiningrik", "name:fur": "Ream Unît", "name:gag": "Büük Britaniya", "name:gan": "英國", "name:hak": "Yîn-koet", "name:haw": "Aupuni Mōʻī Hui Pū ʻia", "name:hif": "United Kingdom", "name:hsb": "Zjednoćene kralestwo", "name:ilo": "Nagkaykaysa a Pagarian", "name:jbo": "ritygu'e", "name:kab": "Legliz", "name:kbd": "Британиэшхуэ", "name:koi": "Ыджыт Бритму", "name:krc": "Уллу Британия", "name:ksh": "Jrußbritannie", "name:lad": "Reyno Unido", "name:lez": "ЧIехибритания", "name:lfn": "Rena Unida", "name:lij": "Regno Unïo", "name:lmo": "Regn Ünì", "name:ltg": "Lelbrytaneja", "name:mhr": "Ушымо Королевстве", "name:mrj": "Кого Британи", "name:mwl": "Reino Ounido", "name:mzn": "بریتانیا", "name:nah": "Tlacetilīlli Huēyitlahtohcāyōtl", "name:nap": "Gran Vretagna", "name:nds": "Vereenigt Königriek vun Grootbritannien un Noordirland", "name:nov": "Unionati Regia", "name:nrm": "Rouoyaume Unni", "name:pag": "Reino Unido", "name:pam": "Pisanmetung a Ka-arian", "name:pap": "Reino Uni", "name:pcd": "Roéyôme-Uni", "name:pih": "Yunitid Kingdum", "name:pms": "Regn Unì", "name:pnb": "برطانیہ", "name:pnt": "Ηνωμένο Βασίλειο", "name:rmy": "Phandlo Thagaripen la Bare Britaniyako thai le Nordutne Irlandesko", "name:rue": "Велика Брітанія", "name:sah": "Холбоhуктаах Хоруоллук", "name:scn": "Regnu Unitu", "name:sco": "Unitit Kinrick", "name:srn": "Ingriskondre", "name:stq": "Fereeniged Köönichriek fon Groot-Britannien un Noudirlound", "name:szl": "Wjelgo Brytańijo", "name:tet": "Reinu Naklibur", "name:tok": "ma Juke", "name:tpi": "Yunaitet Kingdom", "name:tzl": "Regipäts Viensiçat", "name:udm": "Великобритания", "name:vec": "Regno Unìo", "name:vep": "Sur' Britanii", "name:vls": "Verênigd Keunienkryk", "name:war": "Reino Unido", "name:wuu": "英国", "name:xal": "Ик Бритишин болн Ар Гәәлгүдин Ниицәтә Нутг", "name:xmf": "გოართოიანაფილი ომაფე", "name:yue": "英國", "name:zea": "Vereênigd Konienkriek", "name:zh_py": "Yingguo", "short_name": "UK", "alt_name:eo": "Britio", "alt_name:sr": "УК;У.К.", "alt_name:vi": "Vương quốc Liên hiệp Anh", "name:nds-nl": "Verienigd Keuninkriek", "name:zh_pyt": "Yīngguó", "name:bat-smg": "Jongtėnė Karalīstė", "name:cbk-zam": "Reinos Unidos de Gran Britania y Norte Irelandia", "name:fiu-vro": "Ütiskuningriik", "name:roa-rup": "Britania Mari", "name:roa-tara": "Regne Aunìte", "official_name": "United Kingdom of Great Britain and Northern Ireland", "short_name:el": "ΗΒ", "short_name:vo": "Britän", "name:be-tarask": "Вялікабрытанія", "name:zh-min-nan": "Liân-ha̍p Ông-kok", "official_name:be": "Злучанае Каралеўства Вялікабрытаніі і Паўночнай Ірландыі", "official_name:br": "Rouantelezh Unanet Breizh-Veur ha Norzhiwerzhon", "official_name:ca": "Regne Unit de Gran Bretanya i Irlanda del Nord", "official_name:cs": "Spojené království Velké Británie a Severního Irska", "official_name:de": "Vereinigtes Königreich Großbritannien und Nordirland", "official_name:el": "Ηνωμένο Βασίλειο της Μεγάλης Βρετανίας και της Βόρειας Ιρλανδίας", "official_name:en": "United Kingdom of Great Britain and Northern Ireland", "official_name:eo": "Unuiĝinta Reĝlando de Granda Britujo kaj Nord-Irlando", "official_name:es": "Reino Unido de Gran Bretaña", "official_name:et": "Suurbritannia ja Põhja-Iiri Ühendkuningriik", "official_name:fr": "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord", "official_name:hr": "Ujedinjeno Kraljevstvo Velike Britanije i Sjeverne Irske", "official_name:id": "Perserikatan Kerajaan Britania Raya dan Irlandia Utara", "official_name:it": "Regno Unito di Gran Bretagna e Irlanda del Nord", "official_name:ja": "グレートブリテン及び北アイルランド連合王国", "official_name:ku": "Keyaniya Yekbûyî ya Brîtaniya Mezin û Bakurê Îrlandê", "official_name:lb": "Vereenegt Kinnekräich vu Groussbritannien an Nordirland", "official_name:no": "Det forente kongeriket Storbritannia og Nord-Irland", "official_name:pl": "Zjednoczone Królestwo Wielkiej Brytanii i Irlandii Północnej", "official_name:pt": "Reino Unido da Grã-Bretanha e Irlanda do Norte", "official_name:ru": "Соединённое королевство Великобритании и Северной Ирландии", "official_name:sk": "Spojené kráľovstvo Veľkej Británie a Severného Írska", "official_name:sl": "Združeno kraljestvo Velike Britanije in Severne Irske", "official_name:sr": "Уједињено Краљевство Велике Британије и Северне Ирске", "official_name:sv": "Förenade konungariket Storbritannien och Nordirland", "official_name:vi": "Vương quốc Liên hiệp Anh và Bắc Ireland", "name:abbreviation": "UK", "name:zh-classical": "英國", "official_name:scn": "Regnu Unitu di Gran Britagna e Irlanna dû Nord", "name:zh-simplified": "英国", "name:zh-traditional": "英國"},
- "class":"place",
- "type":"country",
- "admin_level":2,
- "fromarea":true,
- "isaddress":true,
- "rank_address":4,
- "distance":4.56060933645498,
- "localname":"United Kingdom"},
-{"place_id":null,
- "osm_type":null,
- "osm_id":null,
- "name":{"ref": "gb"},
- "class":"place",
- "type":"country_code",
- "admin_level":null,
- "fromarea":true,
- "isaddress":false,
- "rank_address":4,
- "distance":0,
- "localname":"gb"}
-]
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- backupGlobals="false"
- backupStaticAttributes="false"
- colors="true"
- convertErrorsToExceptions="true"
- convertNoticesToExceptions="true"
- convertWarningsToExceptions="true"
- processIsolation="false"
- stopOnFailure="false"
- bootstrap="./bootstrap.php"
- beStrictAboutTestsThatDoNotTestAnything="true"
- xsi:noNamespaceSchemaLocation="https://schema.phpunit.de/9.3/phpunit.xsd">
- <coverage>
- <include>
- <directory>../../lib-php/</directory>
- </include>
- </coverage>
- <php>
- </php>
- <testsuites>
- <testsuite name="Nominatim PHP Test Suite">
- <directory>./Nominatim</directory>
- </testsuite>
- </testsuites>
-</phpunit>
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Helper fixtures for API call tests.
+"""
+import pytest
+import pytest_asyncio
+import time
+import datetime as dt
+
+import sqlalchemy as sa
+
+import nominatim_api as napi
+from nominatim_db.db.sql_preprocessor import SQLPreprocessor
+from nominatim_api.search.query_analyzer_factory import make_query_analyzer
+from nominatim_db.tools import convert_sqlite
+import nominatim_api.logging as loglib
+
+class APITester:
+
+ def __init__(self):
+ self.api = napi.NominatimAPI()
+ self.async_to_sync(self.api._async_api.setup_database())
+
+
+ def async_to_sync(self, func):
+ """ Run an asynchronous function until completion using the
+ internal loop of the API.
+ """
+ return self.api._loop.run_until_complete(func)
+
+
+ def add_data(self, table, data):
+ """ Insert data into the given table.
+ """
+ sql = getattr(self.api._async_api._tables, table).insert()
+ self.async_to_sync(self.exec_async(sql, data))
+
+
+ def add_placex(self, **kw):
+ name = kw.get('name')
+ if isinstance(name, str):
+ name = {'name': name}
+
+ centroid = kw.get('centroid', (23.0, 34.0))
+ geometry = kw.get('geometry', 'POINT(%f %f)' % centroid)
+
+ self.add_data('placex',
+ {'place_id': kw.get('place_id', 1000),
+ 'osm_type': kw.get('osm_type', 'W'),
+ 'osm_id': kw.get('osm_id', 4),
+ 'class_': kw.get('class_', 'highway'),
+ 'type': kw.get('type', 'residential'),
+ 'name': name,
+ 'address': kw.get('address'),
+ 'extratags': kw.get('extratags'),
+ 'parent_place_id': kw.get('parent_place_id'),
+ 'linked_place_id': kw.get('linked_place_id'),
+ 'admin_level': kw.get('admin_level', 15),
+ 'country_code': kw.get('country_code'),
+ 'housenumber': kw.get('housenumber'),
+ 'postcode': kw.get('postcode'),
+ 'wikipedia': kw.get('wikipedia'),
+ 'rank_search': kw.get('rank_search', 30),
+ 'rank_address': kw.get('rank_address', 30),
+ 'importance': kw.get('importance'),
+ 'centroid': 'POINT(%f %f)' % centroid,
+ 'indexed_status': kw.get('indexed_status', 0),
+ 'indexed_date': kw.get('indexed_date',
+ dt.datetime(2022, 12, 7, 14, 14, 46, 0)),
+ 'geometry': geometry})
+
+
+ def add_address_placex(self, object_id, **kw):
+ self.add_placex(**kw)
+ self.add_data('addressline',
+ {'place_id': object_id,
+ 'address_place_id': kw.get('place_id', 1000),
+ 'distance': kw.get('distance', 0.0),
+ 'cached_rank_address': kw.get('rank_address', 30),
+ 'fromarea': kw.get('fromarea', False),
+ 'isaddress': kw.get('isaddress', True)})
+
+
+ def add_osmline(self, **kw):
+ self.add_data('osmline',
+ {'place_id': kw.get('place_id', 10000),
+ 'osm_id': kw.get('osm_id', 4004),
+ 'parent_place_id': kw.get('parent_place_id'),
+ 'indexed_date': kw.get('indexed_date',
+ dt.datetime(2022, 12, 7, 14, 14, 46, 0)),
+ 'startnumber': kw.get('startnumber', 2),
+ 'endnumber': kw.get('endnumber', 6),
+ 'step': kw.get('step', 2),
+ 'address': kw.get('address'),
+ 'postcode': kw.get('postcode'),
+ 'country_code': kw.get('country_code'),
+ 'linegeo': kw.get('geometry', 'LINESTRING(1.1 -0.2, 1.09 -0.22)')})
+
+
+ def add_tiger(self, **kw):
+ self.add_data('tiger',
+ {'place_id': kw.get('place_id', 30000),
+ 'parent_place_id': kw.get('parent_place_id'),
+ 'startnumber': kw.get('startnumber', 2),
+ 'endnumber': kw.get('endnumber', 6),
+ 'step': kw.get('step', 2),
+ 'postcode': kw.get('postcode'),
+ 'linegeo': kw.get('geometry', 'LINESTRING(1.1 -0.2, 1.09 -0.22)')})
+
+
+ def add_postcode(self, **kw):
+ self.add_data('postcode',
+ {'place_id': kw.get('place_id', 1000),
+ 'parent_place_id': kw.get('parent_place_id'),
+ 'country_code': kw.get('country_code'),
+ 'postcode': kw.get('postcode'),
+ 'rank_search': kw.get('rank_search', 20),
+ 'rank_address': kw.get('rank_address', 22),
+ 'indexed_date': kw.get('indexed_date',
+ dt.datetime(2022, 12, 7, 14, 14, 46, 0)),
+ 'geometry': kw.get('geometry', 'POINT(23 34)')})
+
+
+ def add_country(self, country_code, geometry):
+ self.add_data('country_grid',
+ {'country_code': country_code,
+ 'area': 0.1,
+ 'geometry': geometry})
+
+
+ def add_country_name(self, country_code, names, partition=0):
+ self.add_data('country_name',
+ {'country_code': country_code,
+ 'name': names,
+ 'partition': partition})
+
+
+ def add_search_name(self, place_id, **kw):
+ centroid = kw.get('centroid', (23.0, 34.0))
+ self.add_data('search_name',
+ {'place_id': place_id,
+ 'importance': kw.get('importance', 0.00001),
+ 'search_rank': kw.get('search_rank', 30),
+ 'address_rank': kw.get('address_rank', 30),
+ 'name_vector': kw.get('names', []),
+ 'nameaddress_vector': kw.get('address', []),
+ 'country_code': kw.get('country_code', 'xx'),
+ 'centroid': 'POINT(%f %f)' % centroid})
+
+
+ def add_class_type_table(self, cls, typ):
+ self.async_to_sync(
+ self.exec_async(sa.text(f"""CREATE TABLE place_classtype_{cls}_{typ}
+ AS (SELECT place_id, centroid FROM placex
+ WHERE class = '{cls}' AND type = '{typ}')
+ """)))
+
+
+ def add_word_table(self, content):
+ data = [dict(zip(['word_id', 'word_token', 'type', 'word', 'info'], c))
+ for c in content]
+
+ async def _do_sql():
+ async with self.api._async_api.begin() as conn:
+ if 'word' not in conn.t.meta.tables:
+ await make_query_analyzer(conn)
+ word_table = conn.t.meta.tables['word']
+ await conn.connection.run_sync(word_table.create)
+ if data:
+ await conn.execute(conn.t.meta.tables['word'].insert(), data)
+
+ self.async_to_sync(_do_sql())
+
+
+ async def exec_async(self, sql, *args, **kwargs):
+ async with self.api._async_api.begin() as conn:
+ return await conn.execute(sql, *args, **kwargs)
+
+
+ async def create_tables(self):
+ async with self.api._async_api._engine.begin() as conn:
+ await conn.run_sync(self.api._async_api._tables.meta.create_all)
+
+
+@pytest.fixture
+def apiobj(temp_db_with_extensions, temp_db_conn, monkeypatch):
+ """ Create an asynchronous SQLAlchemy engine for the test DB.
+ """
+ monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'yes')
+ testapi = APITester()
+ testapi.async_to_sync(testapi.create_tables())
+
+ proc = SQLPreprocessor(temp_db_conn, testapi.api.config)
+ proc.run_sql_file(temp_db_conn, 'functions/ranking.sql')
+
+ loglib.set_log_output('text')
+ yield testapi
+ print(loglib.get_and_disable())
+
+ testapi.api.close()
+
+
+@pytest.fixture(params=['postgres_db', 'sqlite_db'])
+def frontend(request, event_loop, tmp_path):
+ testapis = []
+ if request.param == 'sqlite_db':
+ db = str(tmp_path / 'test_nominatim_python_unittest.sqlite')
+
+ def mkapi(apiobj, options={'reverse'}):
+ apiobj.add_data('properties',
+ [{'property': 'tokenizer', 'value': 'icu'},
+ {'property': 'tokenizer_import_normalisation', 'value': ':: lower();'},
+ {'property': 'tokenizer_import_transliteration', 'value': "'1' > '/1/'; 'ä' > 'ä '"},
+ ])
+
+ async def _do_sql():
+ async with apiobj.api._async_api.begin() as conn:
+ if 'word' in conn.t.meta.tables:
+ return
+ await make_query_analyzer(conn)
+ word_table = conn.t.meta.tables['word']
+ await conn.connection.run_sync(word_table.create)
+
+ apiobj.async_to_sync(_do_sql())
+
+ event_loop.run_until_complete(convert_sqlite.convert(None, db, options))
+ outapi = napi.NominatimAPI(environ={'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={db}",
+ 'NOMINATIM_USE_US_TIGER_DATA': 'yes'})
+ testapis.append(outapi)
+
+ return outapi
+ elif request.param == 'postgres_db':
+ def mkapi(apiobj, options=None):
+ return apiobj.api
+
+ yield mkapi
+
+ for api in testapis:
+ api.close()
+
+
+@pytest_asyncio.fixture
+async def api(temp_db):
+ async with napi.NominatimAPIAsync() as api:
+ yield api
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Provides dummy implementations of ASGIAdaptor for testing.
+"""
+from collections import namedtuple
+
+import nominatim_api.v1.server_glue as glue
+from nominatim_api.v1.format import dispatch as formatting
+from nominatim_api.config import Configuration
+
+class FakeError(BaseException):
+
+ def __init__(self, msg, status):
+ self.msg = msg
+ self.status = status
+
+ def __str__(self):
+ return f'{self.status} -- {self.msg}'
+
+FakeResponse = namedtuple('FakeResponse', ['status', 'output', 'content_type'])
+
+class FakeAdaptor(glue.ASGIAdaptor):
+
+ def __init__(self, params=None, headers=None, config=None):
+ self.params = params or {}
+ self.headers = headers or {}
+ self._config = config or Configuration(None)
+
+
+ def get(self, name, default=None):
+ return self.params.get(name, default)
+
+
+ def get_header(self, name, default=None):
+ return self.headers.get(name, default)
+
+
+ def error(self, msg, status=400):
+ return FakeError(msg, status)
+
+
+ def create_response(self, status, output, num_results):
+ return FakeResponse(status, output, self.content_type)
+
+
+ def base_uri(self):
+ return 'http://test'
+
+ def config(self):
+ return self._config
+
+ def formatting(self):
+ return formatting
+
+
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for normalizing search queries.
+"""
+from pathlib import Path
+
+import pytest
+
+from icu import Transliterator
+
+import nominatim_api.search.query as qmod
+from nominatim_api.query_preprocessing.config import QueryConfig
+from nominatim_api.query_preprocessing import normalize
+
+def run_preprocessor_on(query, norm):
+ normalizer = Transliterator.createFromRules("normalization", norm)
+ proc = normalize.create(QueryConfig().set_normalizer(normalizer))
+
+ return proc(query)
+
+
+def test_normalize_simple():
+ norm = ':: lower();'
+ query = [qmod.Phrase(qmod.PhraseType.NONE, 'Hallo')]
+
+ out = run_preprocessor_on(query, norm)
+
+ assert len(out) == 1
+ assert out == [qmod.Phrase(qmod.PhraseType.NONE, 'hallo')]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2025 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for japanese phrase splitting.
+"""
+from pathlib import Path
+
+import pytest
+
+from icu import Transliterator
+
+import nominatim_api.search.query as qmod
+from nominatim_api.query_preprocessing.config import QueryConfig
+from nominatim_api.query_preprocessing import split_japanese_phrases
+
+def run_preprocessor_on(query):
+ proc = split_japanese_phrases.create(QueryConfig().set_normalizer(None))
+
+ return proc(query)
+
+
+@pytest.mark.parametrize('inp,outp', [('大阪府大阪市大阪', '大阪府:大阪市:大阪'),
+ ('大阪府大阪', '大阪府:大阪'),
+ ('大阪市大阪', '大阪市:大阪')])
+def test_split_phrases(inp, outp):
+ query = [qmod.Phrase(qmod.PhraseType.NONE, inp)]
+
+ out = run_preprocessor_on(query)
+
+ assert out == [qmod.Phrase(qmod.PhraseType.NONE, outp)]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for tokenized query data structures.
+"""
+import pytest
+
+from nominatim_api.search import query
+
+class MyToken(query.Token):
+
+ def get_category(self):
+ return 'this', 'that'
+
+
+def mktoken(tid: int):
+ return MyToken(penalty=3.0, token=tid, count=1, addr_count=1,
+ lookup_word='foo')
+
+
+@pytest.mark.parametrize('ptype,ttype', [('NONE', 'WORD'),
+ ('AMENITY', 'QUALIFIER'),
+ ('STREET', 'PARTIAL'),
+ ('CITY', 'WORD'),
+ ('COUNTRY', 'COUNTRY'),
+ ('POSTCODE', 'POSTCODE')])
+def test_phrase_compatible(ptype, ttype):
+ assert query.PhraseType[ptype].compatible_with(query.TokenType[ttype], False)
+
+
+@pytest.mark.parametrize('ptype', ['COUNTRY', 'POSTCODE'])
+def test_phrase_incompatible(ptype):
+ assert not query.PhraseType[ptype].compatible_with(query.TokenType.PARTIAL, True)
+
+
+def test_query_node_empty():
+ qn = query.QueryNode(query.BreakType.PHRASE, query.PhraseType.NONE)
+
+ assert not qn.has_tokens(3, query.TokenType.PARTIAL)
+ assert qn.get_tokens(3, query.TokenType.WORD) is None
+
+
+def test_query_node_with_content():
+ qn = query.QueryNode(query.BreakType.PHRASE, query.PhraseType.NONE)
+ qn.starting.append(query.TokenList(2, query.TokenType.PARTIAL, [mktoken(100), mktoken(101)]))
+ qn.starting.append(query.TokenList(2, query.TokenType.WORD, [mktoken(1000)]))
+
+ assert not qn.has_tokens(3, query.TokenType.PARTIAL)
+ assert not qn.has_tokens(2, query.TokenType.COUNTRY)
+ assert qn.has_tokens(2, query.TokenType.PARTIAL)
+ assert qn.has_tokens(2, query.TokenType.WORD)
+
+ assert qn.get_tokens(3, query.TokenType.PARTIAL) is None
+ assert qn.get_tokens(2, query.TokenType.COUNTRY) is None
+ assert len(qn.get_tokens(2, query.TokenType.PARTIAL)) == 2
+ assert len(qn.get_tokens(2, query.TokenType.WORD)) == 1
+
+
+def test_query_struct_empty():
+ q = query.QueryStruct([])
+
+ assert q.num_token_slots() == 0
+
+
+def test_query_struct_with_tokens():
+ q = query.QueryStruct([query.Phrase(query.PhraseType.NONE, 'foo bar')])
+ q.add_node(query.BreakType.WORD, query.PhraseType.NONE)
+ q.add_node(query.BreakType.END, query.PhraseType.NONE)
+
+ assert q.num_token_slots() == 2
+
+ q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(1, 2), query.TokenType.PARTIAL, mktoken(2))
+ q.add_token(query.TokenRange(1, 2), query.TokenType.WORD, mktoken(99))
+ q.add_token(query.TokenRange(1, 2), query.TokenType.WORD, mktoken(98))
+
+ assert q.get_tokens(query.TokenRange(0, 2), query.TokenType.WORD) == []
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.WORD)) == 2
+
+ partials = q.get_partials_list(query.TokenRange(0, 2))
+
+ assert len(partials) == 2
+ assert [t.token for t in partials] == [1, 2]
+
+ assert q.find_lookup_word_by_id(4) == 'None'
+ assert q.find_lookup_word_by_id(99) == '[W]foo'
+
+
+def test_query_struct_incompatible_token():
+ q = query.QueryStruct([query.Phrase(query.PhraseType.COUNTRY, 'foo bar')])
+ q.add_node(query.BreakType.WORD, query.PhraseType.COUNTRY)
+ q.add_node(query.BreakType.END, query.PhraseType.NONE)
+
+ q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(1, 2), query.TokenType.COUNTRY, mktoken(100))
+
+ assert q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL) == []
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.COUNTRY)) == 1
+
+
+def test_query_struct_amenity_single_word():
+ q = query.QueryStruct([query.Phrase(query.PhraseType.AMENITY, 'bar')])
+ q.add_node(query.BreakType.END, query.PhraseType.NONE)
+
+ q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM, mktoken(2))
+ q.add_token(query.TokenRange(0, 1), query.TokenType.QUALIFIER, mktoken(3))
+
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 1
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 0
+
+
+def test_query_struct_amenity_two_words():
+ q = query.QueryStruct([query.Phrase(query.PhraseType.AMENITY, 'foo bar')])
+ q.add_node(query.BreakType.WORD, query.PhraseType.AMENITY)
+ q.add_node(query.BreakType.END, query.PhraseType.NONE)
+
+ for trange in [(0, 1), (1, 2)]:
+ q.add_token(query.TokenRange(*trange), query.TokenType.PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(*trange), query.TokenType.NEAR_ITEM, mktoken(2))
+ q.add_token(query.TokenRange(*trange), query.TokenType.QUALIFIER, mktoken(3))
+
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 0
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 1
+
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.PARTIAL)) == 1
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.NEAR_ITEM)) == 0
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.QUALIFIER)) == 1
+
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2023 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for creating abstract searches from token assignments.
+"""
+import pytest
+
+from nominatim_api.search.query import Token, TokenRange, BreakType, PhraseType, TokenType, QueryStruct, Phrase
+from nominatim_api.search.db_search_builder import SearchBuilder
+from nominatim_api.search.token_assignment import TokenAssignment
+from nominatim_api.types import SearchDetails
+import nominatim_api.search.db_searches as dbs
+
+class MyToken(Token):
+ def get_category(self):
+ return 'this', 'that'
+
+
+def make_query(*args):
+ q = QueryStruct([Phrase(PhraseType.NONE, '')])
+
+ for _ in range(max(inner[0] for tlist in args for inner in tlist)):
+ q.add_node(BreakType.WORD, PhraseType.NONE)
+ q.add_node(BreakType.END, PhraseType.NONE)
+
+ for start, tlist in enumerate(args):
+ for end, ttype, tinfo in tlist:
+ for tid, word in tinfo:
+ q.add_token(TokenRange(start, end), ttype,
+ MyToken(penalty=0.5 if ttype == TokenType.PARTIAL else 0.0,
+ token=tid, count=1, addr_count=1,
+ lookup_word=word))
+
+
+ return q
+
+
+def test_country_search():
+ q = make_query([(1, TokenType.COUNTRY, [(2, 'de'), (3, 'en')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(country=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+
+ search = searches[0]
+
+ assert isinstance(search, dbs.CountrySearch)
+ assert set(search.countries.values) == {'de', 'en'}
+
+
+def test_country_search_with_country_restriction():
+ q = make_query([(1, TokenType.COUNTRY, [(2, 'de'), (3, 'en')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'countries': 'en,fr'}))
+
+ searches = list(builder.build(TokenAssignment(country=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+
+ search = searches[0]
+
+ assert isinstance(search, dbs.CountrySearch)
+ assert set(search.countries.values) == {'en'}
+
+
+def test_country_search_with_conflicting_country_restriction():
+ q = make_query([(1, TokenType.COUNTRY, [(2, 'de'), (3, 'en')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'countries': 'fr'}))
+
+ searches = list(builder.build(TokenAssignment(country=TokenRange(0, 1))))
+
+ assert len(searches) == 0
+
+
+def test_postcode_search_simple():
+ q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PostcodeSearch)
+ assert search.postcodes.values == ['2367']
+ assert not search.countries.values
+ assert not search.lookups
+ assert not search.rankings
+
+
+def test_postcode_with_country():
+ q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])],
+ [(2, TokenType.COUNTRY, [(1, 'xx')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1),
+ country=TokenRange(1, 2))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PostcodeSearch)
+ assert search.postcodes.values == ['2367']
+ assert search.countries.values == ['xx']
+ assert not search.lookups
+ assert not search.rankings
+
+
+def test_postcode_with_address():
+ q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])],
+ [(2, TokenType.PARTIAL, [(100, 'word')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1),
+ address=[TokenRange(1, 2)])))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PostcodeSearch)
+ assert search.postcodes.values == ['2367']
+ assert not search.countries
+ assert search.lookups
+ assert not search.rankings
+
+
+def test_postcode_with_address_with_full_word():
+ q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])],
+ [(2, TokenType.PARTIAL, [(100, 'word')]),
+ (2, TokenType.WORD, [(1, 'full')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1),
+ address=[TokenRange(1, 2)])))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PostcodeSearch)
+ assert search.postcodes.values == ['2367']
+ assert not search.countries
+ assert search.lookups
+ assert len(search.rankings) == 1
+
+
+@pytest.mark.parametrize('kwargs', [{'viewbox': '0,0,1,1', 'bounded_viewbox': True},
+ {'near': '10,10'}])
+def test_near_item_only(kwargs):
+ q = make_query([(1, TokenType.NEAR_ITEM, [(2, 'foo')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs(kwargs))
+
+ searches = list(builder.build(TokenAssignment(near_item=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+
+ search = searches[0]
+
+ assert isinstance(search, dbs.PoiSearch)
+ assert search.qualifiers.values == [('this', 'that')]
+
+
+@pytest.mark.parametrize('kwargs', [{'viewbox': '0,0,1,1'},
+ {}])
+def test_near_item_skipped(kwargs):
+ q = make_query([(1, TokenType.NEAR_ITEM, [(2, 'foo')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs(kwargs))
+
+ searches = list(builder.build(TokenAssignment(near_item=TokenRange(0, 1))))
+
+ assert len(searches) == 0
+
+
+def test_name_only_search():
+ q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
+ (1, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert not search.postcodes.values
+ assert not search.countries.values
+ assert not search.housenumbers.values
+ assert not search.qualifiers.values
+ assert len(search.lookups) == 1
+ assert len(search.rankings) == 1
+
+
+def test_name_with_qualifier():
+ q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
+ (1, TokenType.WORD, [(100, 'a')])],
+ [(2, TokenType.QUALIFIER, [(55, 'hotel')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1),
+ qualifier=TokenRange(1, 2))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert not search.postcodes.values
+ assert not search.countries.values
+ assert not search.housenumbers.values
+ assert search.qualifiers.values == [('this', 'that')]
+ assert len(search.lookups) == 1
+ assert len(search.rankings) == 1
+
+
+def test_name_with_housenumber_search():
+ q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
+ (1, TokenType.WORD, [(100, 'a')])],
+ [(2, TokenType.HOUSENUMBER, [(66, '66')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1),
+ housenumber=TokenRange(1, 2))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert not search.postcodes.values
+ assert not search.countries.values
+ assert search.housenumbers.values == ['66']
+ assert len(search.lookups) == 1
+ assert len(search.rankings) == 1
+
+
+def test_name_and_address():
+ q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
+ (1, TokenType.WORD, [(100, 'a')])],
+ [(2, TokenType.PARTIAL, [(2, 'b')]),
+ (2, TokenType.WORD, [(101, 'b')])],
+ [(3, TokenType.PARTIAL, [(3, 'c')]),
+ (3, TokenType.WORD, [(102, 'c')])]
+ )
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1),
+ address=[TokenRange(1, 2),
+ TokenRange(2, 3)])))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert not search.postcodes.values
+ assert not search.countries.values
+ assert not search.housenumbers.values
+ assert len(search.lookups) == 2
+ assert len(search.rankings) == 3
+
+
+def test_name_and_complex_address():
+ q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
+ (1, TokenType.WORD, [(100, 'a')])],
+ [(2, TokenType.PARTIAL, [(2, 'b')]),
+ (3, TokenType.WORD, [(101, 'bc')])],
+ [(3, TokenType.PARTIAL, [(3, 'c')])],
+ [(4, TokenType.PARTIAL, [(4, 'd')]),
+ (4, TokenType.WORD, [(103, 'd')])]
+ )
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1),
+ address=[TokenRange(1, 2),
+ TokenRange(2, 4)])))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert not search.postcodes.values
+ assert not search.countries.values
+ assert not search.housenumbers.values
+ assert len(search.lookups) == 2
+ assert len(search.rankings) == 2
+
+
+def test_name_only_near_search():
+ q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails())
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ near_item=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.NearSearch)
+ assert isinstance(search.search, dbs.PlaceSearch)
+
+
+def test_name_only_search_with_category():
+ q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
+ (1, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert search.qualifiers.values == [('foo', 'bar')]
+
+
+def test_name_with_near_item_search_with_category_mismatch():
+ q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ near_item=TokenRange(0, 1))))
+
+ assert len(searches) == 0
+
+
+def test_name_with_near_item_search_with_category_match():
+ q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar'),
+ ('this', 'that')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ near_item=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.NearSearch)
+ assert isinstance(search.search, dbs.PlaceSearch)
+
+
+def test_name_with_qualifier_search_with_category_mismatch():
+ q = make_query([(1, TokenType.QUALIFIER, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ qualifier=TokenRange(0, 1))))
+
+ assert len(searches) == 0
+
+
+def test_name_with_qualifier_search_with_category_match():
+ q = make_query([(1, TokenType.QUALIFIER, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar'),
+ ('this', 'that')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ qualifier=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert search.qualifiers.values == [('this', 'that')]
+
+
+def test_name_only_search_with_countries():
+ q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
+ (1, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'countries': 'de,en'}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert not search.postcodes.values
+ assert set(search.countries.values) == {'de', 'en'}
+ assert not search.housenumbers.values
+
+
+def make_counted_searches(name_part, name_full, address_part, address_full,
+ num_address_parts=1):
+ q = QueryStruct([Phrase(PhraseType.NONE, '')])
+ for i in range(1 + num_address_parts):
+ q.add_node(BreakType.WORD, PhraseType.NONE)
+ q.add_node(BreakType.END, PhraseType.NONE)
+
+ q.add_token(TokenRange(0, 1), TokenType.PARTIAL,
+ MyToken(0.5, 1, name_part, 1, 'name_part'))
+ q.add_token(TokenRange(0, 1), TokenType.WORD,
+ MyToken(0, 101, name_full, 1, 'name_full'))
+ for i in range(num_address_parts):
+ q.add_token(TokenRange(i + 1, i + 2), TokenType.PARTIAL,
+ MyToken(0.5, 2, address_part, 1, 'address_part'))
+ q.add_token(TokenRange(i + 1, i + 2), TokenType.WORD,
+ MyToken(0, 102, address_full, 1, 'address_full'))
+
+ builder = SearchBuilder(q, SearchDetails())
+
+ return list(builder.build(TokenAssignment(name=TokenRange(0, 1),
+ address=[TokenRange(1, 1 + num_address_parts)])))
+
+
+def test_infrequent_partials_in_name():
+ searches = make_counted_searches(1, 1, 1, 1)
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert len(search.lookups) == 2
+ assert len(search.rankings) == 2
+
+ assert set((l.column, l.lookup_type.__name__) for l in search.lookups) == \
+ {('name_vector', 'LookupAll'), ('nameaddress_vector', 'Restrict')}
+
+
+def test_frequent_partials_in_name_and_address():
+ searches = make_counted_searches(9999, 1, 9999, 1)
+
+ assert len(searches) == 2
+
+ assert all(isinstance(s, dbs.PlaceSearch) for s in searches)
+ searches.sort(key=lambda s: s.penalty)
+
+ assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \
+ {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')}
+ assert set((l.column, l.lookup_type.__name__) for l in searches[1].lookups) == \
+ {('nameaddress_vector', 'LookupAll'), ('name_vector', 'LookupAll')}
+
+
+def test_too_frequent_partials_in_name_and_address():
+ searches = make_counted_searches(20000, 1, 10000, 1)
+
+ assert len(searches) == 1
+
+ assert all(isinstance(s, dbs.PlaceSearch) for s in searches)
+ searches.sort(key=lambda s: s.penalty)
+
+ assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \
+ {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')}
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for query analyzer for ICU tokenizer.
+"""
+import pytest
+import pytest_asyncio
+
+from nominatim_api import NominatimAPIAsync
+from nominatim_api.search.query import Phrase, PhraseType, TokenType, BreakType
+import nominatim_api.search.icu_tokenizer as tok
+from nominatim_api.logging import set_log_output, get_and_disable
+
+async def add_word(conn, word_id, word_token, wtype, word, info = None):
+ t = conn.t.meta.tables['word']
+ await conn.execute(t.insert(), {'word_id': word_id,
+ 'word_token': word_token,
+ 'type': wtype,
+ 'word': word,
+ 'info': info})
+
+
+def make_phrase(query):
+ return [Phrase(PhraseType.NONE, s) for s in query.split(',')]
+
+@pytest_asyncio.fixture
+async def conn(table_factory):
+ """ Create an asynchronous SQLAlchemy engine for the test DB.
+ """
+ table_factory('nominatim_properties',
+ definition='property TEXT, value TEXT',
+ content=(('tokenizer_import_normalisation', ':: lower();'),
+ ('tokenizer_import_transliteration', "'1' > '/1/'; 'ä' > 'ä '")))
+ table_factory('word',
+ definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB')
+
+ async with NominatimAPIAsync() as api:
+ async with api.begin() as conn:
+ yield conn
+
+
+@pytest.mark.asyncio
+async def test_empty_phrase(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ query = await ana.analyze_query([])
+
+ assert len(query.source) == 0
+ assert query.num_token_slots() == 0
+
+
+@pytest.mark.asyncio
+async def test_single_phrase_with_unknown_terms(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, 'foo', 'w', 'FOO')
+
+ query = await ana.analyze_query(make_phrase('foo BAR'))
+
+ assert len(query.source) == 1
+ assert query.source[0].ptype == PhraseType.NONE
+ assert query.source[0].text == 'foo bar'
+
+ assert query.num_token_slots() == 2
+ assert len(query.nodes[0].starting) == 1
+ assert not query.nodes[1].starting
+
+
+@pytest.mark.asyncio
+async def test_multiple_phrases(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, 'one', 'w', 'one')
+ await add_word(conn, 2, 'two', 'w', 'two')
+ await add_word(conn, 100, 'one two', 'W', 'one two')
+ await add_word(conn, 3, 'three', 'w', 'three')
+
+ query = await ana.analyze_query(make_phrase('one two,three'))
+
+ assert len(query.source) == 2
+
+
+@pytest.mark.asyncio
+async def test_splitting_in_transliteration(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, 'mä', 'W', 'ma')
+ await add_word(conn, 2, 'fo', 'W', 'fo')
+
+ query = await ana.analyze_query(make_phrase('mäfo'))
+
+ assert query.num_token_slots() == 2
+ assert query.nodes[0].starting
+ assert query.nodes[1].starting
+ assert query.nodes[1].btype == BreakType.TOKEN
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize('term,order', [('23456', ['POSTCODE', 'HOUSENUMBER', 'WORD', 'PARTIAL']),
+ ('3', ['HOUSENUMBER', 'POSTCODE', 'WORD', 'PARTIAL'])
+ ])
+async def test_penalty_postcodes_and_housenumbers(conn, term, order):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, term, 'P', None)
+ await add_word(conn, 2, term, 'H', term)
+ await add_word(conn, 3, term, 'w', term)
+ await add_word(conn, 4, term, 'W', term)
+
+ query = await ana.analyze_query(make_phrase(term))
+
+ assert query.num_token_slots() == 1
+
+ torder = [(tl.tokens[0].penalty, tl.ttype.name) for tl in query.nodes[0].starting]
+ torder.sort()
+
+ assert [t[1] for t in torder] == order
+
+@pytest.mark.asyncio
+async def test_category_words_only_at_beginning(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': 'in'})
+ await add_word(conn, 2, 'bar', 'w', 'BAR')
+
+ query = await ana.analyze_query(make_phrase('foo BAR foo'))
+
+ assert query.num_token_slots() == 3
+ assert len(query.nodes[0].starting) == 1
+ assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM
+ assert not query.nodes[2].starting
+
+
+@pytest.mark.asyncio
+async def test_freestanding_qualifier_words_become_category(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': '-'})
+
+ query = await ana.analyze_query(make_phrase('foo'))
+
+ assert query.num_token_slots() == 1
+ assert len(query.nodes[0].starting) == 1
+ assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM
+
+
+@pytest.mark.asyncio
+async def test_qualifier_words(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, 'foo', 'S', None, {'op': '-'})
+ await add_word(conn, 2, 'bar', 'w', None)
+
+ query = await ana.analyze_query(make_phrase('foo BAR foo BAR foo'))
+
+ assert query.num_token_slots() == 5
+ assert set(t.ttype for t in query.nodes[0].starting) == {TokenType.QUALIFIER}
+ assert set(t.ttype for t in query.nodes[2].starting) == {TokenType.QUALIFIER}
+ assert set(t.ttype for t in query.nodes[4].starting) == {TokenType.QUALIFIER}
+
+
+@pytest.mark.asyncio
+async def test_add_unknown_housenumbers(conn):
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, '23', 'H', '23')
+
+ query = await ana.analyze_query(make_phrase('466 23 99834 34a'))
+
+ assert query.num_token_slots() == 4
+ assert query.nodes[0].starting[0].ttype == TokenType.HOUSENUMBER
+ assert len(query.nodes[0].starting[0].tokens) == 1
+ assert query.nodes[0].starting[0].tokens[0].token == 0
+ assert query.nodes[1].starting[0].ttype == TokenType.HOUSENUMBER
+ assert len(query.nodes[1].starting[0].tokens) == 1
+ assert query.nodes[1].starting[0].tokens[0].token == 1
+ assert not query.nodes[2].starting
+ assert not query.nodes[3].starting
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize('logtype', ['text', 'html'])
+async def test_log_output(conn, logtype):
+
+ ana = await tok.create_query_analyzer(conn)
+
+ await add_word(conn, 1, 'foo', 'w', 'FOO')
+
+ set_log_output(logtype)
+ await ana.analyze_query(make_phrase('foo'))
+
+ assert get_and_disable()
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Test data types for search queries.
+"""
+import pytest
+
+import nominatim_api.search.query as nq
+
+def test_token_range_equal():
+ assert nq.TokenRange(2, 3) == nq.TokenRange(2, 3)
+ assert not (nq.TokenRange(2, 3) != nq.TokenRange(2, 3))
+
+
+@pytest.mark.parametrize('lop,rop', [((1, 2), (3, 4)),
+ ((3, 4), (3, 5)),
+ ((10, 12), (11, 12))])
+def test_token_range_unequal(lop, rop):
+ assert not (nq.TokenRange(*lop) == nq.TokenRange(*rop))
+ assert nq.TokenRange(*lop) != nq.TokenRange(*rop)
+
+
+def test_token_range_lt():
+ assert nq.TokenRange(1, 3) < nq.TokenRange(10, 12)
+ assert nq.TokenRange(5, 6) < nq.TokenRange(7, 8)
+ assert nq.TokenRange(1, 4) < nq.TokenRange(4, 5)
+ assert not(nq.TokenRange(5, 6) < nq.TokenRange(5, 6))
+ assert not(nq.TokenRange(10, 11) < nq.TokenRange(4, 5))
+
+
+def test_token_rankge_gt():
+ assert nq.TokenRange(3, 4) > nq.TokenRange(1, 2)
+ assert nq.TokenRange(100, 200) > nq.TokenRange(10, 11)
+ assert nq.TokenRange(10, 11) > nq.TokenRange(4, 10)
+ assert not(nq.TokenRange(5, 6) > nq.TokenRange(5, 6))
+ assert not(nq.TokenRange(1, 2) > nq.TokenRange(3, 4))
+ assert not(nq.TokenRange(4, 10) > nq.TokenRange(3, 5))
+
+
+def test_token_range_unimplemented_ops():
+ with pytest.raises(TypeError):
+ nq.TokenRange(1, 3) <= nq.TokenRange(10, 12)
+ with pytest.raises(TypeError):
+ nq.TokenRange(1, 3) >= nq.TokenRange(10, 12)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for query analyzer creation.
+"""
+from pathlib import Path
+
+import pytest
+
+from nominatim_api.search.query_analyzer_factory import make_query_analyzer
+from nominatim_api.search.icu_tokenizer import ICUQueryAnalyzer
+
+@pytest.mark.asyncio
+async def test_import_icu_tokenizer(table_factory, api):
+ table_factory('nominatim_properties',
+ definition='property TEXT, value TEXT',
+ content=(('tokenizer', 'icu'),
+ ('tokenizer_import_normalisation', ':: lower();'),
+ ('tokenizer_import_transliteration', "'1' > '/1/'; 'ä' > 'ä '")))
+
+ async with api.begin() as conn:
+ ana = await make_query_analyzer(conn)
+
+ assert isinstance(ana, ICUQueryAnalyzer)
+
+
+@pytest.mark.asyncio
+async def test_import_missing_property(table_factory, api):
+ table_factory('nominatim_properties',
+ definition='property TEXT, value TEXT')
+
+ async with api.begin() as conn:
+ with pytest.raises(ValueError, match='Property.*not found'):
+ await make_query_analyzer(conn)
+
+
+@pytest.mark.asyncio
+async def test_import_missing_module(table_factory, api):
+ table_factory('nominatim_properties',
+ definition='property TEXT, value TEXT',
+ content=(('tokenizer', 'missing'),))
+
+ async with api.begin() as conn:
+ with pytest.raises(RuntimeError, match='Tokenizer not found'):
+ await make_query_analyzer(conn)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for running the country searcher.
+"""
+import pytest
+
+import nominatim_api as napi
+from nominatim_api.types import SearchDetails
+from nominatim_api.search.db_searches import CountrySearch
+from nominatim_api.search.db_search_fields import WeightedStrings
+
+
+def run_search(apiobj, frontend, global_penalty, ccodes,
+ country_penalties=None, details=SearchDetails()):
+ if country_penalties is None:
+ country_penalties = [0.0] * len(ccodes)
+
+ class MySearchData:
+ penalty = global_penalty
+ countries = WeightedStrings(ccodes, country_penalties)
+
+ search = CountrySearch(MySearchData())
+ api = frontend(apiobj, options=['search'])
+
+ async def run():
+ async with api._async_api.begin() as conn:
+ return await search.lookup(conn, details)
+
+ return api._loop.run_until_complete(run())
+
+
+def test_find_from_placex(apiobj, frontend):
+ apiobj.add_placex(place_id=55, class_='boundary', type='administrative',
+ rank_search=4, rank_address=4,
+ name={'name': 'Lolaland'},
+ country_code='yw',
+ centroid=(10, 10),
+ geometry='POLYGON((9.5 9.5, 9.5 10.5, 10.5 10.5, 10.5 9.5, 9.5 9.5))')
+
+ results = run_search(apiobj, frontend, 0.5, ['de', 'yw'], [0.0, 0.3])
+
+ assert len(results) == 1
+ assert results[0].place_id == 55
+ assert results[0].accuracy == 0.8
+
+def test_find_from_fallback_countries(apiobj, frontend):
+ apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+ apiobj.add_country_name('ro', {'name': 'România'})
+
+ results = run_search(apiobj, frontend, 0.0, ['ro'])
+
+ assert len(results) == 1
+ assert results[0].names == {'name': 'România'}
+
+
+def test_find_none(apiobj, frontend):
+ assert len(run_search(apiobj, frontend, 0.0, ['xx'])) == 0
+
+
+@pytest.mark.parametrize('coord,numres', [((0.5, 1), 1), ((10, 10), 0)])
+def test_find_near(apiobj, frontend, coord, numres):
+ apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+ apiobj.add_country_name('ro', {'name': 'România'})
+
+ results = run_search(apiobj, frontend, 0.0, ['ro'],
+ details=SearchDetails(near=napi.Point(*coord),
+ near_radius=0.1))
+
+ assert len(results) == numres
+
+
+class TestCountryParameters:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_placex(place_id=55, class_='boundary', type='administrative',
+ rank_search=4, rank_address=4,
+ name={'name': 'Lolaland'},
+ country_code='yw',
+ centroid=(10, 10),
+ geometry='POLYGON((9.5 9.5, 9.5 10.5, 10.5 10.5, 10.5 9.5, 9.5 9.5))')
+ apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+ apiobj.add_country_name('ro', {'name': 'România'})
+
+
+ @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON,
+ napi.GeometryFormat.KML,
+ napi.GeometryFormat.SVG,
+ napi.GeometryFormat.TEXT])
+ @pytest.mark.parametrize('cc', ['yw', 'ro'])
+ def test_return_geometries(self, apiobj, frontend, geom, cc):
+ results = run_search(apiobj, frontend, 0.5, [cc],
+ details=SearchDetails(geometry_output=geom))
+
+ assert len(results) == 1
+ assert geom.name.lower() in results[0].geometry
+
+
+ @pytest.mark.parametrize('pid,rids', [(76, [55]), (55, [])])
+ def test_exclude_place_id(self, apiobj, frontend, pid, rids):
+ results = run_search(apiobj, frontend, 0.5, ['yw', 'ro'],
+ details=SearchDetails(excluded=[pid]))
+
+ assert [r.place_id for r in results] == rids
+
+
+ @pytest.mark.parametrize('viewbox,rids', [((9, 9, 11, 11), [55]),
+ ((-10, -10, -3, -3), [])])
+ def test_bounded_viewbox_in_placex(self, apiobj, frontend, viewbox, rids):
+ results = run_search(apiobj, frontend, 0.5, ['yw'],
+ details=SearchDetails.from_kwargs({'viewbox': viewbox,
+ 'bounded_viewbox': True}))
+
+ assert [r.place_id for r in results] == rids
+
+
+ @pytest.mark.parametrize('viewbox,numres', [((0, 0, 1, 1), 1),
+ ((-10, -10, -3, -3), 0)])
+ def test_bounded_viewbox_in_fallback(self, apiobj, frontend, viewbox, numres):
+ results = run_search(apiobj, frontend, 0.5, ['ro'],
+ details=SearchDetails.from_kwargs({'viewbox': viewbox,
+ 'bounded_viewbox': True}))
+
+ assert len(results) == numres
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for running the near searcher.
+"""
+import pytest
+
+import nominatim_api as napi
+from nominatim_api.types import SearchDetails
+from nominatim_api.search.db_searches import NearSearch, PlaceSearch
+from nominatim_api.search.db_search_fields import WeightedStrings, WeightedCategories,\
+ FieldLookup, FieldRanking, RankedTokens
+from nominatim_api.search.db_search_lookups import LookupAll
+
+
+def run_search(apiobj, frontend, global_penalty, cat, cat_penalty=None, ccodes=[],
+ details=SearchDetails()):
+
+ class PlaceSearchData:
+ penalty = 0.0
+ postcodes = WeightedStrings([], [])
+ countries = WeightedStrings(ccodes, [0.0] * len(ccodes))
+ housenumbers = WeightedStrings([], [])
+ qualifiers = WeightedStrings([], [])
+ lookups = [FieldLookup('name_vector', [56], LookupAll)]
+ rankings = []
+
+ if ccodes is not None:
+ details.countries = ccodes
+
+ place_search = PlaceSearch(0.0, PlaceSearchData(), 2)
+
+ if cat_penalty is None:
+ cat_penalty = [0.0] * len(cat)
+
+ near_search = NearSearch(0.1, WeightedCategories(cat, cat_penalty), place_search)
+
+ api = frontend(apiobj, options=['search'])
+
+ async def run():
+ async with api._async_api.begin() as conn:
+ return await near_search.lookup(conn, details)
+
+ results = api._loop.run_until_complete(run())
+ results.sort(key=lambda r: r.accuracy)
+
+ return results
+
+
+def test_no_results_inner_query(apiobj, frontend):
+ assert not run_search(apiobj, frontend, 0.4, [('this', 'that')])
+
+
+def test_no_appropriate_results_inner_query(apiobj, frontend):
+ apiobj.add_placex(place_id=100, country_code='us',
+ centroid=(5.6, 4.3),
+ geometry='POLYGON((0.0 0.0, 10.0 0.0, 10.0 2.0, 0.0 2.0, 0.0 0.0))')
+ apiobj.add_search_name(100, names=[56], country_code='us',
+ centroid=(5.6, 4.3))
+ apiobj.add_placex(place_id=22, class_='amenity', type='bank',
+ centroid=(5.6001, 4.2994))
+
+ assert not run_search(apiobj, frontend, 0.4, [('amenity', 'bank')])
+
+
+class TestNearSearch:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_placex(place_id=100, country_code='us',
+ centroid=(5.6, 4.3))
+ apiobj.add_search_name(100, names=[56], country_code='us',
+ centroid=(5.6, 4.3))
+ apiobj.add_placex(place_id=101, country_code='mx',
+ centroid=(-10.3, 56.9))
+ apiobj.add_search_name(101, names=[56], country_code='mx',
+ centroid=(-10.3, 56.9))
+
+
+ def test_near_in_placex(self, apiobj, frontend):
+ apiobj.add_placex(place_id=22, class_='amenity', type='bank',
+ centroid=(5.6001, 4.2994))
+ apiobj.add_placex(place_id=23, class_='amenity', type='bench',
+ centroid=(5.6001, 4.2994))
+
+ results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')])
+
+ assert [r.place_id for r in results] == [22]
+
+
+ def test_multiple_types_near_in_placex(self, apiobj, frontend):
+ apiobj.add_placex(place_id=22, class_='amenity', type='bank',
+ importance=0.002,
+ centroid=(5.6001, 4.2994))
+ apiobj.add_placex(place_id=23, class_='amenity', type='bench',
+ importance=0.001,
+ centroid=(5.6001, 4.2994))
+
+ results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank'),
+ ('amenity', 'bench')])
+
+ assert [r.place_id for r in results] == [22, 23]
+
+
+ def test_near_in_classtype(self, apiobj, frontend):
+ apiobj.add_placex(place_id=22, class_='amenity', type='bank',
+ centroid=(5.6, 4.34))
+ apiobj.add_placex(place_id=23, class_='amenity', type='bench',
+ centroid=(5.6, 4.34))
+ apiobj.add_class_type_table('amenity', 'bank')
+ apiobj.add_class_type_table('amenity', 'bench')
+
+ results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')])
+
+ assert [r.place_id for r in results] == [22]
+
+
+ @pytest.mark.parametrize('cc,rid', [('us', 22), ('mx', 23)])
+ def test_restrict_by_country(self, apiobj, frontend, cc, rid):
+ apiobj.add_placex(place_id=22, class_='amenity', type='bank',
+ centroid=(5.6001, 4.2994),
+ country_code='us')
+ apiobj.add_placex(place_id=122, class_='amenity', type='bank',
+ centroid=(5.6001, 4.2994),
+ country_code='mx')
+ apiobj.add_placex(place_id=23, class_='amenity', type='bank',
+ centroid=(-10.3001, 56.9),
+ country_code='mx')
+ apiobj.add_placex(place_id=123, class_='amenity', type='bank',
+ centroid=(-10.3001, 56.9),
+ country_code='us')
+
+ results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')], ccodes=[cc, 'fr'])
+
+ assert [r.place_id for r in results] == [rid]
+
+
+ @pytest.mark.parametrize('excluded,rid', [(22, 122), (122, 22)])
+ def test_exclude_place_by_id(self, apiobj, frontend, excluded, rid):
+ apiobj.add_placex(place_id=22, class_='amenity', type='bank',
+ centroid=(5.6001, 4.2994),
+ country_code='us')
+ apiobj.add_placex(place_id=122, class_='amenity', type='bank',
+ centroid=(5.6001, 4.2994),
+ country_code='us')
+
+
+ results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')],
+ details=SearchDetails(excluded=[excluded]))
+
+ assert [r.place_id for r in results] == [rid]
+
+
+ @pytest.mark.parametrize('layer,rids', [(napi.DataLayer.POI, [22]),
+ (napi.DataLayer.MANMADE, [])])
+ def test_with_layer(self, apiobj, frontend, layer, rids):
+ apiobj.add_placex(place_id=22, class_='amenity', type='bank',
+ centroid=(5.6001, 4.2994),
+ country_code='us')
+
+ results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')],
+ details=SearchDetails(layers=layer))
+
+ assert [r.place_id for r in results] == rids
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for running the generic place searcher.
+"""
+import json
+
+import pytest
+
+import nominatim_api as napi
+from nominatim_api.types import SearchDetails
+from nominatim_api.search.db_searches import PlaceSearch
+from nominatim_api.search.db_search_fields import WeightedStrings, WeightedCategories,\
+ FieldLookup, FieldRanking, RankedTokens
+from nominatim_api.search.db_search_lookups import LookupAll, LookupAny, Restrict
+
+APIOPTIONS = ['search']
+
+def run_search(apiobj, frontend, global_penalty, lookup, ranking, count=2,
+ hnrs=[], pcs=[], ccodes=[], quals=[],
+ details=SearchDetails()):
+ class MySearchData:
+ penalty = global_penalty
+ postcodes = WeightedStrings(pcs, [0.0] * len(pcs))
+ countries = WeightedStrings(ccodes, [0.0] * len(ccodes))
+ housenumbers = WeightedStrings(hnrs, [0.0] * len(hnrs))
+ qualifiers = WeightedCategories(quals, [0.0] * len(quals))
+ lookups = lookup
+ rankings = ranking
+
+ search = PlaceSearch(0.0, MySearchData(), count)
+
+ if frontend is None:
+ api = apiobj
+ else:
+ api = frontend(apiobj, options=APIOPTIONS)
+
+ async def run():
+ async with api._async_api.begin() as conn:
+ return await search.lookup(conn, details)
+
+ results = api._loop.run_until_complete(run())
+ results.sort(key=lambda r: r.accuracy)
+
+ return results
+
+
+class TestNameOnlySearches:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_placex(place_id=100, country_code='us',
+ centroid=(5.6, 4.3))
+ apiobj.add_search_name(100, names=[1,2,10,11], country_code='us',
+ centroid=(5.6, 4.3))
+ apiobj.add_placex(place_id=101, country_code='mx',
+ centroid=(-10.3, 56.9))
+ apiobj.add_search_name(101, names=[1,2,20,21], country_code='mx',
+ centroid=(-10.3, 56.9))
+
+
+ @pytest.mark.parametrize('lookup_type', [LookupAll, Restrict])
+ @pytest.mark.parametrize('rank,res', [([10], [100, 101]),
+ ([20], [101, 100])])
+ def test_lookup_all_match(self, apiobj, frontend, lookup_type, rank, res):
+ lookup = FieldLookup('name_vector', [1,2], lookup_type)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, rank)])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking])
+
+ assert [r.place_id for r in results] == res
+
+
+ @pytest.mark.parametrize('lookup_type', [LookupAll, Restrict])
+ def test_lookup_all_partial_match(self, apiobj, frontend, lookup_type):
+ lookup = FieldLookup('name_vector', [1,20], lookup_type)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking])
+
+ assert len(results) == 1
+ assert results[0].place_id == 101
+
+ @pytest.mark.parametrize('rank,res', [([10], [100, 101]),
+ ([20], [101, 100])])
+ def test_lookup_any_match(self, apiobj, frontend, rank, res):
+ lookup = FieldLookup('name_vector', [11,21], LookupAny)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, rank)])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking])
+
+ assert [r.place_id for r in results] == res
+
+
+ def test_lookup_any_partial_match(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [20], LookupAll)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking])
+
+ assert len(results) == 1
+ assert results[0].place_id == 101
+
+
+ @pytest.mark.parametrize('cc,res', [('us', 100), ('mx', 101)])
+ def test_lookup_restrict_country(self, apiobj, frontend, cc, res):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], ccodes=[cc])
+
+ assert [r.place_id for r in results] == [res]
+
+
+ def test_lookup_restrict_placeid(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking],
+ details=SearchDetails(excluded=[101]))
+
+ assert [r.place_id for r in results] == [100]
+
+
+ @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON,
+ napi.GeometryFormat.KML,
+ napi.GeometryFormat.SVG,
+ napi.GeometryFormat.TEXT])
+ def test_return_geometries(self, apiobj, frontend, geom):
+ lookup = FieldLookup('name_vector', [20], LookupAll)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking],
+ details=SearchDetails(geometry_output=geom))
+
+ assert geom.name.lower() in results[0].geometry
+
+
+ @pytest.mark.parametrize('factor,npoints', [(0.0, 3), (1.0, 2)])
+ def test_return_simplified_geometry(self, apiobj, frontend, factor, npoints):
+ apiobj.add_placex(place_id=333, country_code='us',
+ centroid=(9.0, 9.0),
+ geometry='LINESTRING(8.9 9.0, 9.0 9.0, 9.1 9.0)')
+ apiobj.add_search_name(333, names=[55], country_code='us',
+ centroid=(5.6, 4.3))
+
+ lookup = FieldLookup('name_vector', [55], LookupAll)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking],
+ details=SearchDetails(geometry_output=napi.GeometryFormat.GEOJSON,
+ geometry_simplification=factor))
+
+ assert len(results) == 1
+ result = results[0]
+ geom = json.loads(result.geometry['geojson'])
+
+ assert result.place_id == 333
+ assert len(geom['coordinates']) == npoints
+
+
+ @pytest.mark.parametrize('viewbox', ['5.0,4.0,6.0,5.0', '5.7,4.0,6.0,5.0'])
+ @pytest.mark.parametrize('wcount,rids', [(2, [100, 101]), (20000, [100])])
+ def test_prefer_viewbox(self, apiobj, frontend, viewbox, wcount, rids):
+ lookup = FieldLookup('name_vector', [1, 2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.2, [RankedTokens(0.0, [21])])
+
+ api = frontend(apiobj, options=APIOPTIONS)
+ results = run_search(api, None, 0.1, [lookup], [ranking])
+ assert [r.place_id for r in results] == [101, 100]
+
+ results = run_search(api, None, 0.1, [lookup], [ranking], count=wcount,
+ details=SearchDetails.from_kwargs({'viewbox': viewbox}))
+ assert [r.place_id for r in results] == rids
+
+
+ @pytest.mark.parametrize('viewbox', ['5.0,4.0,6.0,5.0', '5.55,4.27,5.62,4.31'])
+ def test_force_viewbox(self, apiobj, frontend, viewbox):
+ lookup = FieldLookup('name_vector', [1, 2], LookupAll)
+
+ details=SearchDetails.from_kwargs({'viewbox': viewbox,
+ 'bounded_viewbox': True})
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], details=details)
+ assert [r.place_id for r in results] == [100]
+
+
+ def test_prefer_near(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [1, 2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])])
+
+ api = frontend(apiobj, options=APIOPTIONS)
+ results = run_search(api, None, 0.1, [lookup], [ranking])
+ assert [r.place_id for r in results] == [101, 100]
+
+ results = run_search(api, None, 0.1, [lookup], [ranking],
+ details=SearchDetails.from_kwargs({'near': '5.6,4.3'}))
+ results.sort(key=lambda r: -r.importance)
+ assert [r.place_id for r in results] == [100, 101]
+
+
+ @pytest.mark.parametrize('radius', [0.09, 0.11])
+ def test_force_near(self, apiobj, frontend, radius):
+ lookup = FieldLookup('name_vector', [1, 2], LookupAll)
+
+ details=SearchDetails.from_kwargs({'near': '5.6,4.3',
+ 'near_radius': radius})
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], details=details)
+
+ assert [r.place_id for r in results] == [100]
+
+
+class TestStreetWithHousenumber:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_placex(place_id=1, class_='place', type='house',
+ parent_place_id=1000,
+ housenumber='20 a', country_code='es')
+ apiobj.add_placex(place_id=2, class_='place', type='house',
+ parent_place_id=1000,
+ housenumber='21;22', country_code='es')
+ apiobj.add_placex(place_id=1000, class_='highway', type='residential',
+ rank_search=26, rank_address=26,
+ country_code='es')
+ apiobj.add_search_name(1000, names=[1,2,10,11],
+ search_rank=26, address_rank=26,
+ country_code='es')
+ apiobj.add_placex(place_id=91, class_='place', type='house',
+ parent_place_id=2000,
+ housenumber='20', country_code='pt')
+ apiobj.add_placex(place_id=92, class_='place', type='house',
+ parent_place_id=2000,
+ housenumber='22', country_code='pt')
+ apiobj.add_placex(place_id=93, class_='place', type='house',
+ parent_place_id=2000,
+ housenumber='24', country_code='pt')
+ apiobj.add_placex(place_id=2000, class_='highway', type='residential',
+ rank_search=26, rank_address=26,
+ country_code='pt')
+ apiobj.add_search_name(2000, names=[1,2,20,21],
+ search_rank=26, address_rank=26,
+ country_code='pt')
+
+
+ @pytest.mark.parametrize('hnr,res', [('20', [91, 1]), ('20 a', [1]),
+ ('21', [2]), ('22', [2, 92]),
+ ('24', [93]), ('25', [])])
+ def test_lookup_by_single_housenumber(self, apiobj, frontend, hnr, res):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=[hnr])
+
+ assert [r.place_id for r in results] == res + [1000, 2000]
+
+
+ @pytest.mark.parametrize('cc,res', [('es', [2, 1000]), ('pt', [92, 2000])])
+ def test_lookup_with_country_restriction(self, apiobj, frontend, cc, res):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'],
+ ccodes=[cc])
+
+ assert [r.place_id for r in results] == res
+
+
+ def test_lookup_exclude_housenumber_placeid(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'],
+ details=SearchDetails(excluded=[92]))
+
+ assert [r.place_id for r in results] == [2, 1000, 2000]
+
+
+ def test_lookup_exclude_street_placeid(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'],
+ details=SearchDetails(excluded=[1000]))
+
+ assert [r.place_id for r in results] == [2, 92, 2000]
+
+
+ def test_lookup_only_house_qualifier(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'],
+ quals=[('place', 'house')])
+
+ assert [r.place_id for r in results] == [2, 92]
+
+
+ def test_lookup_only_street_qualifier(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'],
+ quals=[('highway', 'residential')])
+
+ assert [r.place_id for r in results] == [1000, 2000]
+
+
+ @pytest.mark.parametrize('rank,found', [(26, True), (27, False), (30, False)])
+ def test_lookup_min_rank(self, apiobj, frontend, rank, found):
+ lookup = FieldLookup('name_vector', [1,2], LookupAll)
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'],
+ details=SearchDetails(min_rank=rank))
+
+ assert [r.place_id for r in results] == ([2, 92, 1000, 2000] if found else [2, 92])
+
+
+ @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON,
+ napi.GeometryFormat.KML,
+ napi.GeometryFormat.SVG,
+ napi.GeometryFormat.TEXT])
+ def test_return_geometries(self, apiobj, frontend, geom):
+ lookup = FieldLookup('name_vector', [1, 2], LookupAll)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['20', '21', '22'],
+ details=SearchDetails(geometry_output=geom))
+
+ assert results
+ assert all(geom.name.lower() in r.geometry for r in results)
+
+
+def test_very_large_housenumber(apiobj, frontend):
+ apiobj.add_placex(place_id=93, class_='place', type='house',
+ parent_place_id=2000,
+ housenumber='2467463524544', country_code='pt')
+ apiobj.add_placex(place_id=2000, class_='highway', type='residential',
+ rank_search=26, rank_address=26,
+ country_code='pt')
+ apiobj.add_search_name(2000, names=[1,2],
+ search_rank=26, address_rank=26,
+ country_code='pt')
+
+ lookup = FieldLookup('name_vector', [1, 2], LookupAll)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['2467463524544'],
+ details=SearchDetails())
+
+ assert results
+ assert [r.place_id for r in results] == [93, 2000]
+
+
+@pytest.mark.parametrize('wcount,rids', [(2, [990, 991]), (30000, [990])])
+def test_name_and_postcode(apiobj, frontend, wcount, rids):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ postcode='11225',
+ centroid=(10.0, 10.0),
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_search_name(990, names=[111], centroid=(10.0, 10.0),
+ search_rank=27, address_rank=27)
+ apiobj.add_placex(place_id=991, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ postcode='11221',
+ centroid=(10.3, 10.3),
+ geometry='LINESTRING(9.995 10.3, 10.005 10.3)')
+ apiobj.add_search_name(991, names=[111], centroid=(10.3, 10.3),
+ search_rank=27, address_rank=27)
+ apiobj.add_postcode(place_id=100, country_code='ch', postcode='11225',
+ geometry='POINT(10 10)')
+
+ lookup = FieldLookup('name_vector', [111], LookupAll)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], pcs=['11225'], count=wcount,
+ details=SearchDetails())
+
+ assert results
+ assert [r.place_id for r in results] == rids
+
+
+class TestInterpolations:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ centroid=(10.0, 10.0),
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_search_name(990, names=[111],
+ search_rank=27, address_rank=27)
+ apiobj.add_placex(place_id=991, class_='place', type='house',
+ parent_place_id=990,
+ rank_search=30, rank_address=30,
+ housenumber='23',
+ centroid=(10.0, 10.00002))
+ apiobj.add_osmline(place_id=992,
+ parent_place_id=990,
+ startnumber=21, endnumber=29, step=2,
+ centroid=(10.0, 10.00001),
+ geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
+
+
+ @pytest.mark.parametrize('hnr,res', [('21', [992]), ('22', []), ('23', [991])])
+ def test_lookup_housenumber(self, apiobj, frontend, hnr, res):
+ lookup = FieldLookup('name_vector', [111], LookupAll)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=[hnr])
+
+ assert [r.place_id for r in results] == res + [990]
+
+
+ @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON,
+ napi.GeometryFormat.KML,
+ napi.GeometryFormat.SVG,
+ napi.GeometryFormat.TEXT])
+ def test_osmline_with_geometries(self, apiobj, frontend, geom):
+ lookup = FieldLookup('name_vector', [111], LookupAll)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['21'],
+ details=SearchDetails(geometry_output=geom))
+
+ assert results[0].place_id == 992
+ assert geom.name.lower() in results[0].geometry
+
+
+
+class TestTiger:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ country_code='us',
+ centroid=(10.0, 10.0),
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_search_name(990, names=[111], country_code='us',
+ search_rank=27, address_rank=27)
+ apiobj.add_placex(place_id=991, class_='place', type='house',
+ parent_place_id=990,
+ rank_search=30, rank_address=30,
+ housenumber='23',
+ country_code='us',
+ centroid=(10.0, 10.00002))
+ apiobj.add_tiger(place_id=992,
+ parent_place_id=990,
+ startnumber=21, endnumber=29, step=2,
+ centroid=(10.0, 10.00001),
+ geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
+
+
+ @pytest.mark.parametrize('hnr,res', [('21', [992]), ('22', []), ('23', [991])])
+ def test_lookup_housenumber(self, apiobj, frontend, hnr, res):
+ lookup = FieldLookup('name_vector', [111], LookupAll)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=[hnr])
+
+ assert [r.place_id for r in results] == res + [990]
+
+
+ @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON,
+ napi.GeometryFormat.KML,
+ napi.GeometryFormat.SVG,
+ napi.GeometryFormat.TEXT])
+ def test_tiger_with_geometries(self, apiobj, frontend, geom):
+ lookup = FieldLookup('name_vector', [111], LookupAll)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['21'],
+ details=SearchDetails(geometry_output=geom))
+
+ assert results[0].place_id == 992
+ assert geom.name.lower() in results[0].geometry
+
+
+class TestLayersRank30:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_placex(place_id=223, class_='place', type='house',
+ housenumber='1',
+ rank_address=30,
+ rank_search=30)
+ apiobj.add_search_name(223, names=[34],
+ importance=0.0009,
+ address_rank=30, search_rank=30)
+ apiobj.add_placex(place_id=224, class_='amenity', type='toilet',
+ rank_address=30,
+ rank_search=30)
+ apiobj.add_search_name(224, names=[34],
+ importance=0.0008,
+ address_rank=30, search_rank=30)
+ apiobj.add_placex(place_id=225, class_='man_made', type='tower',
+ rank_address=0,
+ rank_search=30)
+ apiobj.add_search_name(225, names=[34],
+ importance=0.0007,
+ address_rank=0, search_rank=30)
+ apiobj.add_placex(place_id=226, class_='railway', type='station',
+ rank_address=0,
+ rank_search=30)
+ apiobj.add_search_name(226, names=[34],
+ importance=0.0006,
+ address_rank=0, search_rank=30)
+ apiobj.add_placex(place_id=227, class_='natural', type='cave',
+ rank_address=0,
+ rank_search=30)
+ apiobj.add_search_name(227, names=[34],
+ importance=0.0005,
+ address_rank=0, search_rank=30)
+
+
+ @pytest.mark.parametrize('layer,res', [(napi.DataLayer.ADDRESS, [223]),
+ (napi.DataLayer.POI, [224]),
+ (napi.DataLayer.ADDRESS | napi.DataLayer.POI, [223, 224]),
+ (napi.DataLayer.MANMADE, [225]),
+ (napi.DataLayer.RAILWAY, [226]),
+ (napi.DataLayer.NATURAL, [227]),
+ (napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, [225, 227]),
+ (napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, [225, 226])])
+ def test_layers_rank30(self, apiobj, frontend, layer, res):
+ lookup = FieldLookup('name_vector', [34], LookupAny)
+
+ results = run_search(apiobj, frontend, 0.1, [lookup], [],
+ details=SearchDetails(layers=layer))
+
+ assert [r.place_id for r in results] == res
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for running the POI searcher.
+"""
+import pytest
+
+import nominatim_api as napi
+from nominatim_api.types import SearchDetails
+from nominatim_api.search.db_searches import PoiSearch
+from nominatim_api.search.db_search_fields import WeightedStrings, WeightedCategories
+
+
+def run_search(apiobj, frontend, global_penalty, poitypes, poi_penalties=None,
+ ccodes=[], details=SearchDetails()):
+ if poi_penalties is None:
+ poi_penalties = [0.0] * len(poitypes)
+
+ class MySearchData:
+ penalty = global_penalty
+ qualifiers = WeightedCategories(poitypes, poi_penalties)
+ countries = WeightedStrings(ccodes, [0.0] * len(ccodes))
+
+ search = PoiSearch(MySearchData())
+
+ api = frontend(apiobj, options=['search'])
+
+ async def run():
+ async with api._async_api.begin() as conn:
+ return await search.lookup(conn, details)
+
+ return api._loop.run_until_complete(run())
+
+
+@pytest.mark.parametrize('coord,pid', [('34.3, 56.100021', 2),
+ ('5.0, 4.59933', 1)])
+def test_simple_near_search_in_placex(apiobj, frontend, coord, pid):
+ apiobj.add_placex(place_id=1, class_='highway', type='bus_stop',
+ centroid=(5.0, 4.6))
+ apiobj.add_placex(place_id=2, class_='highway', type='bus_stop',
+ centroid=(34.3, 56.1))
+
+ details = SearchDetails.from_kwargs({'near': coord, 'near_radius': 0.001})
+
+ results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5], details=details)
+
+ assert [r.place_id for r in results] == [pid]
+
+
+@pytest.mark.parametrize('coord,pid', [('34.3, 56.100021', 2),
+ ('34.3, 56.4', 2),
+ ('5.0, 4.59933', 1)])
+def test_simple_near_search_in_classtype(apiobj, frontend, coord, pid):
+ apiobj.add_placex(place_id=1, class_='highway', type='bus_stop',
+ centroid=(5.0, 4.6))
+ apiobj.add_placex(place_id=2, class_='highway', type='bus_stop',
+ centroid=(34.3, 56.1))
+ apiobj.add_class_type_table('highway', 'bus_stop')
+
+ details = SearchDetails.from_kwargs({'near': coord, 'near_radius': 0.5})
+
+ results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5], details=details)
+
+ assert [r.place_id for r in results] == [pid]
+
+
+class TestPoiSearchWithRestrictions:
+
+ @pytest.fixture(autouse=True, params=["placex", "classtype"])
+ def fill_database(self, apiobj, request):
+ apiobj.add_placex(place_id=1, class_='highway', type='bus_stop',
+ country_code='au',
+ centroid=(34.3, 56.10003))
+ apiobj.add_placex(place_id=2, class_='highway', type='bus_stop',
+ country_code='nz',
+ centroid=(34.3, 56.1))
+ if request.param == 'classtype':
+ apiobj.add_class_type_table('highway', 'bus_stop')
+ self.args = {'near': '34.3, 56.4', 'near_radius': 0.5}
+ else:
+ self.args = {'near': '34.3, 56.100021', 'near_radius': 0.001}
+
+
+ def test_unrestricted(self, apiobj, frontend):
+ results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5],
+ details=SearchDetails.from_kwargs(self.args))
+
+ assert [r.place_id for r in results] == [1, 2]
+
+
+ def test_restict_country(self, apiobj, frontend):
+ results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5],
+ ccodes=['de', 'nz'],
+ details=SearchDetails.from_kwargs(self.args))
+
+ assert [r.place_id for r in results] == [2]
+
+
+ def test_restrict_by_viewbox(self, apiobj, frontend):
+ args = {'bounded_viewbox': True, 'viewbox': '34.299,56.0,34.3001,56.10001'}
+ args.update(self.args)
+ results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5],
+ ccodes=['de', 'nz'],
+ details=SearchDetails.from_kwargs(args))
+
+ assert [r.place_id for r in results] == [2]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for running the postcode searcher.
+"""
+import pytest
+
+import nominatim_api as napi
+from nominatim_api.types import SearchDetails
+from nominatim_api.search.db_searches import PostcodeSearch
+from nominatim_api.search.db_search_fields import WeightedStrings, FieldLookup, \
+ FieldRanking, RankedTokens
+
+def run_search(apiobj, frontend, global_penalty, pcs, pc_penalties=None,
+ ccodes=[], lookup=[], ranking=[], details=SearchDetails()):
+ if pc_penalties is None:
+ pc_penalties = [0.0] * len(pcs)
+
+ class MySearchData:
+ penalty = global_penalty
+ postcodes = WeightedStrings(pcs, pc_penalties)
+ countries = WeightedStrings(ccodes, [0.0] * len(ccodes))
+ lookups = lookup
+ rankings = ranking
+
+ search = PostcodeSearch(0.0, MySearchData())
+
+ api = frontend(apiobj, options=['search'])
+
+ async def run():
+ async with api._async_api.begin() as conn:
+ return await search.lookup(conn, details)
+
+ return api._loop.run_until_complete(run())
+
+
+def test_postcode_only_search(apiobj, frontend):
+ apiobj.add_postcode(place_id=100, country_code='ch', postcode='12345')
+ apiobj.add_postcode(place_id=101, country_code='pl', postcode='12 345')
+
+ results = run_search(apiobj, frontend, 0.3, ['12345', '12 345'], [0.0, 0.1])
+
+ assert len(results) == 2
+ assert [r.place_id for r in results] == [100, 101]
+
+
+def test_postcode_with_country(apiobj, frontend):
+ apiobj.add_postcode(place_id=100, country_code='ch', postcode='12345')
+ apiobj.add_postcode(place_id=101, country_code='pl', postcode='12 345')
+
+ results = run_search(apiobj, frontend, 0.3, ['12345', '12 345'], [0.0, 0.1],
+ ccodes=['de', 'pl'])
+
+ assert len(results) == 1
+ assert results[0].place_id == 101
+
+
+def test_postcode_area(apiobj, frontend):
+ apiobj.add_postcode(place_id=100, country_code='ch', postcode='12345')
+ apiobj.add_placex(place_id=200, country_code='ch', postcode='12345',
+ osm_type='R', osm_id=34, class_='boundary', type='postal_code',
+ geometry='POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))')
+
+ results = run_search(apiobj, frontend, 0.3, ['12345'], [0.0])
+
+ assert len(results) == 1
+ assert results[0].place_id == 200
+ assert results[0].bbox.area == 1
+
+
+class TestPostcodeSearchWithAddress:
+
+ @pytest.fixture(autouse=True)
+ def fill_database(self, apiobj):
+ apiobj.add_postcode(place_id=100, country_code='ch',
+ parent_place_id=1000, postcode='12345',
+ geometry='POINT(17 5)')
+ apiobj.add_postcode(place_id=101, country_code='pl',
+ parent_place_id=2000, postcode='12345',
+ geometry='POINT(-45 7)')
+ apiobj.add_placex(place_id=1000, class_='place', type='village',
+ rank_search=22, rank_address=22,
+ country_code='ch')
+ apiobj.add_search_name(1000, names=[1,2,10,11],
+ search_rank=22, address_rank=22,
+ country_code='ch')
+ apiobj.add_placex(place_id=2000, class_='place', type='village',
+ rank_search=22, rank_address=22,
+ country_code='pl')
+ apiobj.add_search_name(2000, names=[1,2,20,21],
+ search_rank=22, address_rank=22,
+ country_code='pl')
+
+
+ def test_lookup_both(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [1,2], 'restrict')
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, ['12345'], lookup=[lookup], ranking=[ranking])
+
+ assert [r.place_id for r in results] == [100, 101]
+
+
+ def test_restrict_by_name(self, apiobj, frontend):
+ lookup = FieldLookup('name_vector', [10], 'restrict')
+
+ results = run_search(apiobj, frontend, 0.1, ['12345'], lookup=[lookup])
+
+ assert [r.place_id for r in results] == [100]
+
+
+ @pytest.mark.parametrize('coord,place_id', [((16.5, 5), 100),
+ ((-45.1, 7.004), 101)])
+ def test_lookup_near(self, apiobj, frontend, coord, place_id):
+ lookup = FieldLookup('name_vector', [1,2], 'restrict')
+ ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])])
+
+ results = run_search(apiobj, frontend, 0.1, ['12345'],
+ lookup=[lookup], ranking=[ranking],
+ details=SearchDetails(near=napi.Point(*coord),
+ near_radius=0.6))
+
+ assert [r.place_id for r in results] == [place_id]
+
+
+ @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON,
+ napi.GeometryFormat.KML,
+ napi.GeometryFormat.SVG,
+ napi.GeometryFormat.TEXT])
+ def test_return_geometries(self, apiobj, frontend, geom):
+ results = run_search(apiobj, frontend, 0.1, ['12345'],
+ details=SearchDetails(geometry_output=geom))
+
+ assert results
+ assert all(geom.name.lower() in r.geometry for r in results)
+
+
+ @pytest.mark.parametrize('viewbox, rids', [('-46,6,-44,8', [101,100]),
+ ('16,4,18,6', [100,101])])
+ def test_prefer_viewbox(self, apiobj, frontend, viewbox, rids):
+ results = run_search(apiobj, frontend, 0.1, ['12345'],
+ details=SearchDetails.from_kwargs({'viewbox': viewbox}))
+
+ assert [r.place_id for r in results] == rids
+
+
+ @pytest.mark.parametrize('viewbox, rid', [('-46,6,-44,8', 101),
+ ('16,4,18,6', 100)])
+ def test_restrict_to_viewbox(self, apiobj, frontend, viewbox, rid):
+ results = run_search(apiobj, frontend, 0.1, ['12345'],
+ details=SearchDetails.from_kwargs({'viewbox': viewbox,
+ 'bounded_viewbox': True}))
+
+ assert [r.place_id for r in results] == [rid]
+
+
+ @pytest.mark.parametrize('coord,rids', [((17.05, 5), [100, 101]),
+ ((-45, 7.1), [101, 100])])
+ def test_prefer_near(self, apiobj, frontend, coord, rids):
+ results = run_search(apiobj, frontend, 0.1, ['12345'],
+ details=SearchDetails(near=napi.Point(*coord)))
+
+ assert [r.place_id for r in results] == rids
+
+
+ @pytest.mark.parametrize('pid,rid', [(100, 101), (101, 100)])
+ def test_exclude(self, apiobj, frontend, pid, rid):
+ results = run_search(apiobj, frontend, 0.1, ['12345'],
+ details=SearchDetails(excluded=[pid]))
+
+ assert [r.place_id for r in results] == [rid]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Test for creation of token assignments from tokenized queries.
+"""
+import pytest
+
+from nominatim_api.search.query import QueryStruct, Phrase, PhraseType, BreakType, TokenType, TokenRange, Token
+from nominatim_api.search.token_assignment import yield_token_assignments, TokenAssignment, PENALTY_TOKENCHANGE
+
+class MyToken(Token):
+ def get_category(self):
+ return 'this', 'that'
+
+
+def make_query(*args):
+ q = QueryStruct([Phrase(args[0][1], '')])
+ dummy = MyToken(penalty=3.0, token=45, count=1, addr_count=1,
+ lookup_word='foo')
+
+ for btype, ptype, _ in args[1:]:
+ q.add_node(btype, ptype)
+ q.add_node(BreakType.END, PhraseType.NONE)
+
+ for start, t in enumerate(args):
+ for end, ttype in t[2]:
+ q.add_token(TokenRange(start, end), ttype, dummy)
+
+ return q
+
+
+def check_assignments(actual, *expected):
+ todo = list(expected)
+ for assignment in actual:
+ assert assignment in todo, f"Unexpected assignment: {assignment}"
+ todo.remove(assignment)
+
+ assert not todo, f"Missing assignments: {expected}"
+
+
+def test_query_with_missing_tokens():
+ q = QueryStruct([Phrase(PhraseType.NONE, '')])
+ q.add_node(BreakType.END, PhraseType.NONE)
+
+ assert list(yield_token_assignments(q)) == []
+
+
+def test_one_word_query():
+ q = make_query((BreakType.START, PhraseType.NONE,
+ [(1, TokenType.PARTIAL),
+ (1, TokenType.WORD),
+ (1, TokenType.HOUSENUMBER)]))
+
+ res = list(yield_token_assignments(q))
+ assert res == [TokenAssignment(name=TokenRange(0, 1))]
+
+
+def test_single_postcode():
+ q = make_query((BreakType.START, PhraseType.NONE,
+ [(1, TokenType.POSTCODE)]))
+
+ res = list(yield_token_assignments(q))
+ assert res == [TokenAssignment(postcode=TokenRange(0, 1))]
+
+
+def test_single_country_name():
+ q = make_query((BreakType.START, PhraseType.NONE,
+ [(1, TokenType.COUNTRY)]))
+
+ res = list(yield_token_assignments(q))
+ assert res == [TokenAssignment(country=TokenRange(0, 1))]
+
+
+def test_single_word_poi_search():
+ q = make_query((BreakType.START, PhraseType.NONE,
+ [(1, TokenType.NEAR_ITEM),
+ (1, TokenType.QUALIFIER)]))
+
+ res = list(yield_token_assignments(q))
+ assert res == [TokenAssignment(near_item=TokenRange(0, 1))]
+
+
+@pytest.mark.parametrize('btype', [BreakType.WORD, BreakType.PART, BreakType.TOKEN])
+def test_multiple_simple_words(btype):
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (btype, PhraseType.NONE, [(2, TokenType.PARTIAL)]),
+ (btype, PhraseType.NONE, [(3, TokenType.PARTIAL)]))
+
+ penalty = PENALTY_TOKENCHANGE[btype]
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(name=TokenRange(0, 3)),
+ TokenAssignment(penalty=penalty, name=TokenRange(0, 2),
+ address=[TokenRange(2, 3)]),
+ TokenAssignment(penalty=penalty, name=TokenRange(0, 1),
+ address=[TokenRange(1, 3)]),
+ TokenAssignment(penalty=penalty, name=TokenRange(1, 3),
+ address=[TokenRange(0, 1)]),
+ TokenAssignment(penalty=penalty, name=TokenRange(2, 3),
+ address=[TokenRange(0, 2)])
+ )
+
+
+def test_multiple_words_respect_phrase_break():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(name=TokenRange(0, 1),
+ address=[TokenRange(1, 2)]),
+ TokenAssignment(name=TokenRange(1, 2),
+ address=[TokenRange(0, 1)]))
+
+
+def test_housenumber_and_street():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.HOUSENUMBER)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(name=TokenRange(1, 2),
+ housenumber=TokenRange(0, 1)),
+ TokenAssignment(address=[TokenRange(1, 2)],
+ housenumber=TokenRange(0, 1)))
+
+
+def test_housenumber_and_street_backwards():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.HOUSENUMBER)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(name=TokenRange(0, 1),
+ housenumber=TokenRange(1, 2)),
+ TokenAssignment(address=[TokenRange(0, 1)],
+ housenumber=TokenRange(1, 2)))
+
+
+def test_housenumber_and_postcode():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.HOUSENUMBER)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(4, TokenType.POSTCODE)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=pytest.approx(0.3),
+ name=TokenRange(0, 1),
+ housenumber=TokenRange(1, 2),
+ address=[TokenRange(2, 3)],
+ postcode=TokenRange(3, 4)),
+ TokenAssignment(penalty=pytest.approx(0.3),
+ housenumber=TokenRange(1, 2),
+ address=[TokenRange(0, 1), TokenRange(2, 3)],
+ postcode=TokenRange(3, 4)))
+
+def test_postcode_and_housenumber():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.POSTCODE)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(4, TokenType.HOUSENUMBER)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=pytest.approx(0.3),
+ name=TokenRange(2, 3),
+ housenumber=TokenRange(3, 4),
+ address=[TokenRange(0, 1)],
+ postcode=TokenRange(1, 2)),
+ TokenAssignment(penalty=pytest.approx(0.3),
+ housenumber=TokenRange(3, 4),
+ address=[TokenRange(0, 1), TokenRange(2, 3)],
+ postcode=TokenRange(1, 2)))
+
+
+def test_country_housenumber_postcode():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.COUNTRY)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.HOUSENUMBER)]),
+ (BreakType.WORD, PhraseType.NONE, [(4, TokenType.POSTCODE)]))
+
+ check_assignments(yield_token_assignments(q))
+
+
+@pytest.mark.parametrize('ttype', [TokenType.POSTCODE, TokenType.COUNTRY,
+ TokenType.NEAR_ITEM, TokenType.QUALIFIER])
+def test_housenumber_with_only_special_terms(ttype):
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.HOUSENUMBER)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, ttype)]))
+
+ check_assignments(yield_token_assignments(q))
+
+
+@pytest.mark.parametrize('ttype', [TokenType.POSTCODE, TokenType.HOUSENUMBER, TokenType.COUNTRY])
+def test_multiple_special_tokens(ttype):
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, ttype)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(3, ttype)]))
+
+ check_assignments(yield_token_assignments(q))
+
+
+def test_housenumber_many_phrases():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(3, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(4, TokenType.HOUSENUMBER)]),
+ (BreakType.WORD, PhraseType.NONE, [(5, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.1,
+ name=TokenRange(4, 5),
+ housenumber=TokenRange(3, 4),\
+ address=[TokenRange(0, 1), TokenRange(1, 2),
+ TokenRange(2, 3)]),
+ TokenAssignment(penalty=0.1,
+ housenumber=TokenRange(3, 4),\
+ address=[TokenRange(0, 1), TokenRange(1, 2),
+ TokenRange(2, 3), TokenRange(4, 5)]))
+
+
+def test_country_at_beginning():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.COUNTRY)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.1, name=TokenRange(1, 2),
+ country=TokenRange(0, 1)))
+
+
+def test_country_at_end():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.COUNTRY)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.1, name=TokenRange(0, 1),
+ country=TokenRange(1, 2)))
+
+
+def test_country_in_middle():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.COUNTRY)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q))
+
+
+def test_postcode_with_designation():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.POSTCODE)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.1, name=TokenRange(1, 2),
+ postcode=TokenRange(0, 1)),
+ TokenAssignment(postcode=TokenRange(0, 1),
+ address=[TokenRange(1, 2)]))
+
+
+def test_postcode_with_designation_backwards():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.POSTCODE)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(name=TokenRange(0, 1),
+ postcode=TokenRange(1, 2)),
+ TokenAssignment(penalty=0.1, postcode=TokenRange(1, 2),
+ address=[TokenRange(0, 1)]))
+
+
+def test_near_item_at_beginning():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.NEAR_ITEM)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.1, name=TokenRange(1, 2),
+ near_item=TokenRange(0, 1)))
+
+
+def test_near_item_at_end():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.NEAR_ITEM)]))
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.1, name=TokenRange(0, 1),
+ near_item=TokenRange(1, 2)))
+
+
+def test_near_item_in_middle():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.NEAR_ITEM)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q))
+
+
+def test_qualifier_at_beginning():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.QUALIFIER)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]))
+
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.1, name=TokenRange(1, 3),
+ qualifier=TokenRange(0, 1)),
+ TokenAssignment(penalty=0.2, name=TokenRange(1, 2),
+ qualifier=TokenRange(0, 1),
+ address=[TokenRange(2, 3)]))
+
+
+def test_qualifier_after_name():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.QUALIFIER)]),
+ (BreakType.WORD, PhraseType.NONE, [(4, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(5, TokenType.PARTIAL)]))
+
+
+ check_assignments(yield_token_assignments(q),
+ TokenAssignment(penalty=0.2, name=TokenRange(0, 2),
+ qualifier=TokenRange(2, 3),
+ address=[TokenRange(3, 5)]),
+ TokenAssignment(penalty=0.2, name=TokenRange(3, 5),
+ qualifier=TokenRange(2, 3),
+ address=[TokenRange(0, 2)]))
+
+
+def test_qualifier_before_housenumber():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.QUALIFIER)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.HOUSENUMBER)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q))
+
+
+def test_qualifier_after_housenumber():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.HOUSENUMBER)]),
+ (BreakType.WORD, PhraseType.NONE, [(2, TokenType.QUALIFIER)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q))
+
+
+def test_qualifier_in_middle_of_phrase():
+ q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]),
+ (BreakType.WORD, PhraseType.NONE, [(3, TokenType.QUALIFIER)]),
+ (BreakType.WORD, PhraseType.NONE, [(4, TokenType.PARTIAL)]),
+ (BreakType.PHRASE, PhraseType.NONE, [(5, TokenType.PARTIAL)]))
+
+ check_assignments(yield_token_assignments(q))
+
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for enhanced connection class for API functions.
+"""
+from pathlib import Path
+import pytest
+
+import sqlalchemy as sa
+
+
+@pytest.mark.asyncio
+async def test_run_scalar(api, table_factory):
+ table_factory('foo', definition='that TEXT', content=(('a', ),))
+
+ async with api.begin() as conn:
+ assert await conn.scalar(sa.text('SELECT * FROM foo')) == 'a'
+
+
+@pytest.mark.asyncio
+async def test_run_execute(api, table_factory):
+ table_factory('foo', definition='that TEXT', content=(('a', ),))
+
+ async with api.begin() as conn:
+ result = await conn.execute(sa.text('SELECT * FROM foo'))
+ assert result.fetchone()[0] == 'a'
+
+
+@pytest.mark.asyncio
+async def test_get_property_existing_cached(api, table_factory):
+ table_factory('nominatim_properties',
+ definition='property TEXT, value TEXT',
+ content=(('dbv', '96723'), ))
+
+ async with api.begin() as conn:
+ assert await conn.get_property('dbv') == '96723'
+
+ await conn.execute(sa.text('TRUNCATE nominatim_properties'))
+
+ assert await conn.get_property('dbv') == '96723'
+
+
+@pytest.mark.asyncio
+async def test_get_property_existing_uncached(api, table_factory):
+ table_factory('nominatim_properties',
+ definition='property TEXT, value TEXT',
+ content=(('dbv', '96723'), ))
+
+ async with api.begin() as conn:
+ assert await conn.get_property('dbv') == '96723'
+
+ await conn.execute(sa.text("UPDATE nominatim_properties SET value = '1'"))
+
+ assert await conn.get_property('dbv', cached=False) == '1'
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize('param', ['foo', 'DB:server_version'])
+async def test_get_property_missing(api, table_factory, param):
+ table_factory('nominatim_properties',
+ definition='property TEXT, value TEXT')
+
+ async with api.begin() as conn:
+ with pytest.raises(ValueError):
+ await conn.get_property(param)
+
+
+@pytest.mark.asyncio
+async def test_get_db_property_existing(api):
+ async with api.begin() as conn:
+ assert await conn.get_db_property('server_version') > 0
+
+
+@pytest.mark.asyncio
+async def test_get_db_property_existing(api):
+ async with api.begin() as conn:
+ with pytest.raises(ValueError):
+ await conn.get_db_property('dfkgjd.rijg')
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for the deletable v1 API call.
+"""
+import json
+from pathlib import Path
+
+import pytest
+
+from fake_adaptor import FakeAdaptor, FakeError, FakeResponse
+
+import nominatim_api.v1.server_glue as glue
+
+class TestDeletableEndPoint:
+
+ @pytest.fixture(autouse=True)
+ def setup_deletable_table(self, temp_db_cursor, table_factory, temp_db_with_extensions):
+ table_factory('import_polygon_delete',
+ definition='osm_id bigint, osm_type char(1), class text, type text',
+ content=[(345, 'N', 'boundary', 'administrative'),
+ (781, 'R', 'landuse', 'wood'),
+ (781, 'R', 'landcover', 'grass')])
+ table_factory('placex',
+ definition="""place_id bigint, osm_id bigint, osm_type char(1),
+ class text, type text, name HSTORE, country_code char(2)""",
+ content=[(1, 345, 'N', 'boundary', 'administrative', {'old_name': 'Former'}, 'ab'),
+ (2, 781, 'R', 'landuse', 'wood', {'name': 'Wood'}, 'cd'),
+ (3, 781, 'R', 'landcover', 'grass', None, 'cd')])
+
+
+
+ @pytest.mark.asyncio
+ async def test_deletable(self, api):
+ a = FakeAdaptor()
+
+ resp = await glue.deletable_endpoint(api, a)
+ results = json.loads(resp.output)
+
+ results.sort(key=lambda r: r['place_id'])
+
+ assert results == [{'place_id': 1, 'country_code': 'ab', 'name': None,
+ 'osm_id': 345, 'osm_type': 'N',
+ 'class': 'boundary', 'type': 'administrative'},
+ {'place_id': 2, 'country_code': 'cd', 'name': 'Wood',
+ 'osm_id': 781, 'osm_type': 'R',
+ 'class': 'landuse', 'type': 'wood'},
+ {'place_id': 3, 'country_code': 'cd', 'name': None,
+ 'osm_id': 781, 'osm_type': 'R',
+ 'class': 'landcover', 'type': 'grass'}]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for details API call.
+"""
+import datetime as dt
+
+import pytest
+
+import nominatim_api as napi
+
+@pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
+ napi.OsmID('W', 4, 'highway')))
+def test_lookup_in_placex(apiobj, frontend, idobj):
+ import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential',
+ name={'name': 'Road'}, address={'city': 'Barrow'},
+ extratags={'surface': 'paved'},
+ parent_place_id=34, linked_place_id=55,
+ admin_level=15, country_code='gb',
+ housenumber='4',
+ postcode='34425', wikipedia='en:Faa',
+ rank_search=27, rank_address=26,
+ importance=0.01,
+ centroid=(23, 34),
+ indexed_date=import_date,
+ geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(idobj)
+
+ assert result is not None
+
+ assert result.source_table.name == 'PLACEX'
+ assert result.category == ('highway', 'residential')
+ assert result.centroid == (pytest.approx(23.0), pytest.approx(34.0))
+
+ assert result.place_id == 332
+ assert result.parent_place_id == 34
+ assert result.linked_place_id == 55
+ assert result.osm_object == ('W', 4)
+ assert result.admin_level == 15
+
+ assert result.names == {'name': 'Road'}
+ assert result.address == {'city': 'Barrow'}
+ assert result.extratags == {'surface': 'paved'}
+
+ assert result.housenumber == '4'
+ assert result.postcode == '34425'
+ assert result.wikipedia == 'en:Faa'
+
+ assert result.rank_search == 27
+ assert result.rank_address == 26
+ assert result.importance == pytest.approx(0.01)
+
+ assert result.country_code == 'gb'
+ assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc)
+
+ assert result.address_rows is None
+ assert result.linked_rows is None
+ assert result.parented_rows is None
+ assert result.name_keywords is None
+ assert result.address_keywords is None
+
+ assert result.geometry == {'type': 'ST_LineString'}
+
+
+def test_lookup_in_placex_minimal_info(apiobj, frontend):
+ import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential',
+ admin_level=15,
+ rank_search=27, rank_address=26,
+ centroid=(23, 34),
+ indexed_date=import_date,
+ geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332))
+
+ assert result is not None
+
+ assert result.source_table.name == 'PLACEX'
+ assert result.category == ('highway', 'residential')
+ assert result.centroid == (pytest.approx(23.0), pytest.approx(34.0))
+
+ assert result.place_id == 332
+ assert result.parent_place_id is None
+ assert result.linked_place_id is None
+ assert result.osm_object == ('W', 4)
+ assert result.admin_level == 15
+
+ assert result.names is None
+ assert result.address is None
+ assert result.extratags is None
+
+ assert result.housenumber is None
+ assert result.postcode is None
+ assert result.wikipedia is None
+
+ assert result.rank_search == 27
+ assert result.rank_address == 26
+ assert result.importance is None
+
+ assert result.country_code is None
+ assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc)
+
+ assert result.address_rows is None
+ assert result.linked_rows is None
+ assert result.parented_rows is None
+ assert result.name_keywords is None
+ assert result.address_keywords is None
+
+ assert result.geometry == {'type': 'ST_LineString'}
+
+
+def test_lookup_in_placex_with_geometry(apiobj, frontend):
+ apiobj.add_placex(place_id=332,
+ geometry='LINESTRING(23 34, 23.1 34)')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON)
+
+ assert result.geometry == {'geojson': '{"type":"LineString","coordinates":[[23,34],[23.1,34]]}'}
+
+
+def test_lookup_placex_with_address_details(apiobj, frontend):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='pl',
+ rank_search=27, rank_address=26)
+ apiobj.add_address_placex(332, fromarea=False, isaddress=False,
+ distance=0.0034,
+ place_id=1000, osm_type='N', osm_id=3333,
+ class_='place', type='suburb', name='Smallplace',
+ country_code='pl', admin_level=13,
+ rank_search=24, rank_address=23)
+ apiobj.add_address_placex(332, fromarea=True, isaddress=True,
+ place_id=1001, osm_type='N', osm_id=3334,
+ class_='place', type='city', name='Bigplace',
+ country_code='pl',
+ rank_search=17, rank_address=16)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), address_details=True)
+
+ assert result.address_rows == [
+ napi.AddressLine(place_id=332, osm_object=('W', 4),
+ category=('highway', 'residential'),
+ names={'name': 'Street'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=26, distance=0.0,
+ local_name='Street'),
+ napi.AddressLine(place_id=1000, osm_object=('N', 3333),
+ category=('place', 'suburb'),
+ names={'name': 'Smallplace'}, extratags={},
+ admin_level=13, fromarea=False, isaddress=True,
+ rank_address=23, distance=0.0034,
+ local_name='Smallplace'),
+ napi.AddressLine(place_id=1001, osm_object=('N', 3334),
+ category=('place', 'city'),
+ names={'name': 'Bigplace'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=16, distance=0.0,
+ local_name='Bigplace'),
+ napi.AddressLine(place_id=None, osm_object=None,
+ category=('place', 'country_code'),
+ names={'ref': 'pl'}, extratags={},
+ admin_level=None, fromarea=True, isaddress=False,
+ rank_address=4, distance=0.0)
+ ]
+
+
+def test_lookup_place_with_linked_places_none_existing(apiobj, frontend):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', linked_place_id=45,
+ rank_search=27, rank_address=26)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), linked_places=True)
+
+ assert result.linked_rows == []
+
+
+def test_lookup_place_with_linked_places_existing(apiobj, frontend):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', linked_place_id=45,
+ rank_search=27, rank_address=26)
+ apiobj.add_placex(place_id=1001, osm_type='W', osm_id=5,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', linked_place_id=332,
+ rank_search=27, rank_address=26)
+ apiobj.add_placex(place_id=1002, osm_type='W', osm_id=6,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', linked_place_id=332,
+ rank_search=27, rank_address=26)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), linked_places=True)
+
+ assert result.linked_rows == [
+ napi.AddressLine(place_id=1001, osm_object=('W', 5),
+ category=('highway', 'residential'),
+ names={'name': 'Street'}, extratags={},
+ admin_level=15, fromarea=False, isaddress=True,
+ rank_address=26, distance=0.0),
+ napi.AddressLine(place_id=1002, osm_object=('W', 6),
+ category=('highway', 'residential'),
+ names={'name': 'Street'}, extratags={},
+ admin_level=15, fromarea=False, isaddress=True,
+ rank_address=26, distance=0.0),
+ ]
+
+
+def test_lookup_place_with_parented_places_not_existing(apiobj, frontend):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', parent_place_id=45,
+ rank_search=27, rank_address=26)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), parented_places=True)
+
+ assert result.parented_rows == []
+
+
+def test_lookup_place_with_parented_places_existing(apiobj, frontend):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', parent_place_id=45,
+ rank_search=27, rank_address=26)
+ apiobj.add_placex(place_id=1001, osm_type='N', osm_id=5,
+ class_='place', type='house', housenumber='23',
+ country_code='pl', parent_place_id=332,
+ rank_search=30, rank_address=30)
+ apiobj.add_placex(place_id=1002, osm_type='W', osm_id=6,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', parent_place_id=332,
+ rank_search=27, rank_address=26)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), parented_places=True)
+
+ assert result.parented_rows == [
+ napi.AddressLine(place_id=1001, osm_object=('N', 5),
+ category=('place', 'house'),
+ names={'housenumber': '23'}, extratags={},
+ admin_level=15, fromarea=False, isaddress=True,
+ rank_address=30, distance=0.0),
+ ]
+
+
+@pytest.mark.parametrize('idobj', (napi.PlaceID(4924), napi.OsmID('W', 9928)))
+def test_lookup_in_osmline(apiobj, frontend, idobj):
+ import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
+ apiobj.add_osmline(place_id=4924, osm_id=9928,
+ parent_place_id=12,
+ startnumber=1, endnumber=4, step=1,
+ country_code='gb', postcode='34425',
+ address={'city': 'Big'},
+ indexed_date=import_date,
+ geometry='LINESTRING(23 34, 23 35)')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(idobj)
+
+ assert result is not None
+
+ assert result.source_table.name == 'OSMLINE'
+ assert result.category == ('place', 'houses')
+ assert result.centroid == (pytest.approx(23.0), pytest.approx(34.5))
+
+ assert result.place_id == 4924
+ assert result.parent_place_id == 12
+ assert result.linked_place_id is None
+ assert result.osm_object == ('W', 9928)
+ assert result.admin_level == 15
+
+ assert result.names is None
+ assert result.address == {'city': 'Big'}
+ assert result.extratags == {'startnumber': '1', 'endnumber': '4', 'step': '1'}
+
+ assert result.housenumber is None
+ assert result.postcode == '34425'
+ assert result.wikipedia is None
+
+ assert result.rank_search == 30
+ assert result.rank_address == 30
+ assert result.importance is None
+
+ assert result.country_code == 'gb'
+ assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc)
+
+ assert result.address_rows is None
+ assert result.linked_rows is None
+ assert result.parented_rows is None
+ assert result.name_keywords is None
+ assert result.address_keywords is None
+
+ assert result.geometry == {'type': 'ST_LineString'}
+
+
+def test_lookup_in_osmline_split_interpolation(apiobj, frontend):
+ apiobj.add_osmline(place_id=1000, osm_id=9,
+ startnumber=2, endnumber=4, step=1)
+ apiobj.add_osmline(place_id=1001, osm_id=9,
+ startnumber=6, endnumber=9, step=1)
+ apiobj.add_osmline(place_id=1002, osm_id=9,
+ startnumber=11, endnumber=20, step=1)
+
+ api = frontend(apiobj, options={'details'})
+ for i in range(1, 6):
+ result = api.details(napi.OsmID('W', 9, str(i)))
+ assert result.place_id == 1000
+ for i in range(7, 11):
+ result = api.details(napi.OsmID('W', 9, str(i)))
+ assert result.place_id == 1001
+ for i in range(12, 22):
+ result = api.details(napi.OsmID('W', 9, str(i)))
+ assert result.place_id == 1002
+
+
+def test_lookup_osmline_with_address_details(apiobj, frontend):
+ apiobj.add_osmline(place_id=9000, osm_id=9,
+ startnumber=2, endnumber=4, step=1,
+ parent_place_id=332)
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='pl',
+ rank_search=27, rank_address=26)
+ apiobj.add_address_placex(332, fromarea=False, isaddress=False,
+ distance=0.0034,
+ place_id=1000, osm_type='N', osm_id=3333,
+ class_='place', type='suburb', name='Smallplace',
+ country_code='pl', admin_level=13,
+ rank_search=24, rank_address=23)
+ apiobj.add_address_placex(332, fromarea=True, isaddress=True,
+ place_id=1001, osm_type='N', osm_id=3334,
+ class_='place', type='city', name='Bigplace',
+ country_code='pl',
+ rank_search=17, rank_address=16)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(9000), address_details=True)
+
+ assert result.address_rows == [
+ napi.AddressLine(place_id=332, osm_object=('W', 4),
+ category=('highway', 'residential'),
+ names={'name': 'Street'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=26, distance=0.0,
+ local_name='Street'),
+ napi.AddressLine(place_id=1000, osm_object=('N', 3333),
+ category=('place', 'suburb'),
+ names={'name': 'Smallplace'}, extratags={},
+ admin_level=13, fromarea=False, isaddress=True,
+ rank_address=23, distance=0.0034,
+ local_name='Smallplace'),
+ napi.AddressLine(place_id=1001, osm_object=('N', 3334),
+ category=('place', 'city'),
+ names={'name': 'Bigplace'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=16, distance=0.0,
+ local_name='Bigplace'),
+ napi.AddressLine(place_id=None, osm_object=None,
+ category=('place', 'country_code'),
+ names={'ref': 'pl'}, extratags={},
+ admin_level=None, fromarea=True, isaddress=False,
+ rank_address=4, distance=0.0)
+ ]
+
+
+def test_lookup_in_tiger(apiobj, frontend):
+ apiobj.add_tiger(place_id=4924,
+ parent_place_id=12,
+ startnumber=1, endnumber=4, step=1,
+ postcode='34425',
+ geometry='LINESTRING(23 34, 23 35)')
+ apiobj.add_placex(place_id=12,
+ category=('highway', 'residential'),
+ osm_type='W', osm_id=6601223,
+ geometry='LINESTRING(23 34, 23 35)')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(4924))
+
+ assert result is not None
+
+ assert result.source_table.name == 'TIGER'
+ assert result.category == ('place', 'houses')
+ assert result.centroid == (pytest.approx(23.0), pytest.approx(34.5))
+
+ assert result.place_id == 4924
+ assert result.parent_place_id == 12
+ assert result.linked_place_id is None
+ assert result.osm_object == ('W', 6601223)
+ assert result.admin_level == 15
+
+ assert result.names is None
+ assert result.address is None
+ assert result.extratags == {'startnumber': '1', 'endnumber': '4', 'step': '1'}
+
+ assert result.housenumber is None
+ assert result.postcode == '34425'
+ assert result.wikipedia is None
+
+ assert result.rank_search == 30
+ assert result.rank_address == 30
+ assert result.importance is None
+
+ assert result.country_code == 'us'
+ assert result.indexed_date is None
+
+ assert result.address_rows is None
+ assert result.linked_rows is None
+ assert result.parented_rows is None
+ assert result.name_keywords is None
+ assert result.address_keywords is None
+
+ assert result.geometry == {'type': 'ST_LineString'}
+
+
+def test_lookup_tiger_with_address_details(apiobj, frontend):
+ apiobj.add_tiger(place_id=9000,
+ startnumber=2, endnumber=4, step=1,
+ parent_place_id=332)
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='us',
+ rank_search=27, rank_address=26)
+ apiobj.add_address_placex(332, fromarea=False, isaddress=False,
+ distance=0.0034,
+ place_id=1000, osm_type='N', osm_id=3333,
+ class_='place', type='suburb', name='Smallplace',
+ country_code='us', admin_level=13,
+ rank_search=24, rank_address=23)
+ apiobj.add_address_placex(332, fromarea=True, isaddress=True,
+ place_id=1001, osm_type='N', osm_id=3334,
+ class_='place', type='city', name='Bigplace',
+ country_code='us',
+ rank_search=17, rank_address=16)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(9000), address_details=True)
+
+ assert result.address_rows == [
+ napi.AddressLine(place_id=332, osm_object=('W', 4),
+ category=('highway', 'residential'),
+ names={'name': 'Street'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=26, distance=0.0,
+ local_name='Street'),
+ napi.AddressLine(place_id=1000, osm_object=('N', 3333),
+ category=('place', 'suburb'),
+ names={'name': 'Smallplace'}, extratags={},
+ admin_level=13, fromarea=False, isaddress=True,
+ rank_address=23, distance=0.0034,
+ local_name='Smallplace'),
+ napi.AddressLine(place_id=1001, osm_object=('N', 3334),
+ category=('place', 'city'),
+ names={'name': 'Bigplace'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=16, distance=0.0,
+ local_name='Bigplace'),
+ napi.AddressLine(place_id=None, osm_object=None,
+ category=('place', 'country_code'),
+ names={'ref': 'us'}, extratags={},
+ admin_level=None, fromarea=True, isaddress=False,
+ rank_address=4, distance=0.0)
+ ]
+
+
+def test_lookup_in_postcode(apiobj, frontend):
+ import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
+ apiobj.add_postcode(place_id=554,
+ parent_place_id=152,
+ postcode='34 425',
+ country_code='gb',
+ rank_search=20, rank_address=22,
+ indexed_date=import_date,
+ geometry='POINT(-9.45 5.6)')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(554))
+
+ assert result is not None
+
+ assert result.source_table.name == 'POSTCODE'
+ assert result.category == ('place', 'postcode')
+ assert result.centroid == (pytest.approx(-9.45), pytest.approx(5.6))
+
+ assert result.place_id == 554
+ assert result.parent_place_id == 152
+ assert result.linked_place_id is None
+ assert result.osm_object is None
+ assert result.admin_level == 15
+
+ assert result.names == {'ref': '34 425'}
+ assert result.address is None
+ assert result.extratags is None
+
+ assert result.housenumber is None
+ assert result.postcode is None
+ assert result.wikipedia is None
+
+ assert result.rank_search == 20
+ assert result.rank_address == 22
+ assert result.importance is None
+
+ assert result.country_code == 'gb'
+ assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc)
+
+ assert result.address_rows is None
+ assert result.linked_rows is None
+ assert result.parented_rows is None
+ assert result.name_keywords is None
+ assert result.address_keywords is None
+
+ assert result.geometry == {'type': 'ST_Point'}
+
+
+def test_lookup_postcode_with_address_details(apiobj, frontend):
+ apiobj.add_postcode(place_id=9000,
+ parent_place_id=332,
+ postcode='34 425',
+ country_code='gb',
+ rank_search=25, rank_address=25)
+ apiobj.add_placex(place_id=332, osm_type='N', osm_id=3333,
+ class_='place', type='suburb', name='Smallplace',
+ country_code='gb', admin_level=13,
+ rank_search=24, rank_address=23)
+ apiobj.add_address_placex(332, fromarea=True, isaddress=True,
+ place_id=1001, osm_type='N', osm_id=3334,
+ class_='place', type='city', name='Bigplace',
+ country_code='gb',
+ rank_search=17, rank_address=16)
+
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(9000), address_details=True)
+
+ assert result.address_rows == [
+ napi.AddressLine(place_id=9000, osm_object=None,
+ category=('place', 'postcode'),
+ names={'ref': '34 425'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=25, distance=0.0,
+ local_name='34 425'),
+ napi.AddressLine(place_id=332, osm_object=('N', 3333),
+ category=('place', 'suburb'),
+ names={'name': 'Smallplace'}, extratags={},
+ admin_level=13, fromarea=True, isaddress=True,
+ rank_address=23, distance=0.0,
+ local_name='Smallplace'),
+ napi.AddressLine(place_id=1001, osm_object=('N', 3334),
+ category=('place', 'city'),
+ names={'name': 'Bigplace'}, extratags={},
+ admin_level=15, fromarea=True, isaddress=True,
+ rank_address=16, distance=0.0,
+ local_name='Bigplace'),
+ napi.AddressLine(place_id=None, osm_object=None,
+ category=('place', 'country_code'),
+ names={'ref': 'gb'}, extratags={},
+ admin_level=None, fromarea=True, isaddress=False,
+ rank_address=4, distance=0.0)
+ ]
+
+@pytest.mark.parametrize('objid', [napi.PlaceID(1736),
+ napi.OsmID('W', 55),
+ napi.OsmID('N', 55, 'amenity')])
+def test_lookup_missing_object(apiobj, frontend, objid):
+ apiobj.add_placex(place_id=1, osm_type='N', osm_id=55,
+ class_='place', type='suburb')
+
+ api = frontend(apiobj, options={'details'})
+ assert api.details(objid) is None
+
+
+@pytest.mark.parametrize('gtype', (napi.GeometryFormat.KML,
+ napi.GeometryFormat.SVG,
+ napi.GeometryFormat.TEXT))
+def test_lookup_unsupported_geometry(apiobj, frontend, gtype):
+ apiobj.add_placex(place_id=332)
+
+ api = frontend(apiobj, options={'details'})
+ with pytest.raises(ValueError):
+ api.details(napi.PlaceID(332), geometry_output=gtype)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for lookup API call.
+"""
+import json
+
+import pytest
+
+import nominatim_api as napi
+
+def test_lookup_empty_list(apiobj, frontend):
+ api = frontend(apiobj, options={'details'})
+ assert api.lookup([]) == []
+
+
+def test_lookup_non_existing(apiobj, frontend):
+ api = frontend(apiobj, options={'details'})
+ assert api.lookup((napi.PlaceID(332), napi.OsmID('W', 4),
+ napi.OsmID('W', 4, 'highway'))) == []
+
+
+@pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
+ napi.OsmID('W', 4, 'highway')))
+def test_lookup_single_placex(apiobj, frontend, idobj):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential',
+ name={'name': 'Road'}, address={'city': 'Barrow'},
+ extratags={'surface': 'paved'},
+ parent_place_id=34, linked_place_id=55,
+ admin_level=15, country_code='gb',
+ housenumber='4',
+ postcode='34425', wikipedia='en:Faa',
+ rank_search=27, rank_address=26,
+ importance=0.01,
+ centroid=(23, 34),
+ geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup([idobj])
+
+ assert len(result) == 1
+
+ result = result[0]
+
+ assert result.source_table.name == 'PLACEX'
+ assert result.category == ('highway', 'residential')
+ assert result.centroid == (pytest.approx(23.0), pytest.approx(34.0))
+
+ assert result.place_id == 332
+ assert result.osm_object == ('W', 4)
+
+ assert result.names == {'name': 'Road'}
+ assert result.address == {'city': 'Barrow'}
+ assert result.extratags == {'surface': 'paved'}
+
+ assert result.housenumber == '4'
+ assert result.postcode == '34425'
+ assert result.wikipedia == 'en:Faa'
+
+ assert result.rank_search == 27
+ assert result.rank_address == 26
+ assert result.importance == pytest.approx(0.01)
+
+ assert result.country_code == 'gb'
+
+ assert result.address_rows is None
+ assert result.linked_rows is None
+ assert result.parented_rows is None
+ assert result.name_keywords is None
+ assert result.address_keywords is None
+
+ assert result.geometry == {}
+
+
+def test_lookup_multiple_places(apiobj, frontend):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential',
+ name={'name': 'Road'}, address={'city': 'Barrow'},
+ extratags={'surface': 'paved'},
+ parent_place_id=34, linked_place_id=55,
+ admin_level=15, country_code='gb',
+ housenumber='4',
+ postcode='34425', wikipedia='en:Faa',
+ rank_search=27, rank_address=26,
+ importance=0.01,
+ centroid=(23, 34),
+ geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
+ apiobj.add_osmline(place_id=4924, osm_id=9928,
+ parent_place_id=12,
+ startnumber=1, endnumber=4, step=1,
+ country_code='gb', postcode='34425',
+ address={'city': 'Big'},
+ geometry='LINESTRING(23 34, 23 35)')
+
+
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup((napi.OsmID('W', 1),
+ napi.OsmID('W', 4),
+ napi.OsmID('W', 9928)))
+
+ assert len(result) == 2
+
+ assert set(r.place_id for r in result) == {332, 4924}
+
+
+@pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
+def test_simple_place_with_geometry(apiobj, frontend, gtype):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential',
+ name={'name': 'Road'}, address={'city': 'Barrow'},
+ extratags={'surface': 'paved'},
+ parent_place_id=34, linked_place_id=55,
+ admin_level=15, country_code='gb',
+ housenumber='4',
+ postcode='34425', wikipedia='en:Faa',
+ rank_search=27, rank_address=26,
+ importance=0.01,
+ centroid=(23, 34),
+ geometry='POLYGON((23 34, 23.1 34, 23.1 34.1, 23 34))')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup([napi.OsmID('W', 4)], geometry_output=gtype)
+
+ assert len(result) == 1
+ assert result[0].place_id == 332
+
+ if gtype == napi.GeometryFormat.NONE:
+ assert list(result[0].geometry.keys()) == []
+ else:
+ assert list(result[0].geometry.keys()) == [gtype.name.lower()]
+
+
+def test_simple_place_with_geometry_simplified(apiobj, frontend):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential',
+ name={'name': 'Road'}, address={'city': 'Barrow'},
+ extratags={'surface': 'paved'},
+ parent_place_id=34, linked_place_id=55,
+ admin_level=15, country_code='gb',
+ housenumber='4',
+ postcode='34425', wikipedia='en:Faa',
+ rank_search=27, rank_address=26,
+ importance=0.01,
+ centroid=(23, 34),
+ geometry='POLYGON((23 34, 22.999 34, 23.1 34, 23.1 34.1, 23 34))')
+
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup([napi.OsmID('W', 4)],
+ geometry_output=napi.GeometryFormat.GEOJSON,
+ geometry_simplification=0.1)
+
+ assert len(result) == 1
+ assert result[0].place_id == 332
+
+ geom = json.loads(result[0].geometry['geojson'])
+
+ assert geom['type'] == 'Polygon'
+ assert geom['coordinates'] == [[[23, 34], [23.1, 34], [23.1, 34.1], [23, 34]]]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for the deletable v1 API call.
+"""
+import json
+import datetime as dt
+from pathlib import Path
+
+import pytest
+
+from fake_adaptor import FakeAdaptor, FakeError, FakeResponse
+
+import nominatim_api.v1.server_glue as glue
+
+class TestPolygonsEndPoint:
+
+ @pytest.fixture(autouse=True)
+ def setup_deletable_table(self, temp_db_cursor, table_factory, temp_db_with_extensions):
+ self.now = dt.datetime.now()
+ self.recent = dt.datetime.now() - dt.timedelta(days=3)
+
+ table_factory('import_polygon_error',
+ definition="""osm_id bigint,
+ osm_type character(1),
+ class text,
+ type text,
+ name hstore,
+ country_code character varying(2),
+ updated timestamp without time zone,
+ errormessage text,
+ prevgeometry geometry(Geometry,4326),
+ newgeometry geometry(Geometry,4326)""",
+ content=[(345, 'N', 'boundary', 'administrative',
+ {'name': 'Foo'}, 'xx', self.recent,
+ 'some text', None, None),
+ (781, 'R', 'landuse', 'wood',
+ None, 'ds', self.now,
+ 'Area reduced by lots', None, None)])
+
+
+ @pytest.mark.asyncio
+ async def test_polygons_simple(self, api):
+ a = FakeAdaptor()
+
+ resp = await glue.polygons_endpoint(api, a)
+ results = json.loads(resp.output)
+
+ results.sort(key=lambda r: (r['osm_type'], r['osm_id']))
+
+ assert results == [{'osm_type': 'N', 'osm_id': 345,
+ 'class': 'boundary', 'type': 'administrative',
+ 'name': 'Foo', 'country_code': 'xx',
+ 'errormessage': 'some text',
+ 'updated': self.recent.isoformat(sep=' ', timespec='seconds')},
+ {'osm_type': 'R', 'osm_id': 781,
+ 'class': 'landuse', 'type': 'wood',
+ 'name': None, 'country_code': 'ds',
+ 'errormessage': 'Area reduced by lots',
+ 'updated': self.now.isoformat(sep=' ', timespec='seconds')}]
+
+
+ @pytest.mark.asyncio
+ async def test_polygons_days(self, api):
+ a = FakeAdaptor()
+ a.params['days'] = '2'
+
+ resp = await glue.polygons_endpoint(api, a)
+ results = json.loads(resp.output)
+
+ assert [r['osm_id'] for r in results] == [781]
+
+
+ @pytest.mark.asyncio
+ async def test_polygons_class(self, api):
+ a = FakeAdaptor()
+ a.params['class'] = 'landuse'
+
+ resp = await glue.polygons_endpoint(api, a)
+ results = json.loads(resp.output)
+
+ assert [r['osm_id'] for r in results] == [781]
+
+
+
+ @pytest.mark.asyncio
+ async def test_polygons_reduced(self, api):
+ a = FakeAdaptor()
+ a.params['reduced'] = '1'
+
+ resp = await glue.polygons_endpoint(api, a)
+ results = json.loads(resp.output)
+
+ assert [r['osm_id'] for r in results] == [781]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for reverse API call.
+
+These tests make sure that all Python code is correct and executable.
+Functional tests can be found in the BDD test suite.
+"""
+import json
+
+import pytest
+
+import nominatim_api as napi
+
+API_OPTIONS = {'reverse'}
+
+def test_reverse_rank_30(apiobj, frontend):
+ apiobj.add_placex(place_id=223, class_='place', type='house',
+ housenumber='1',
+ centroid=(1.3, 0.7),
+ geometry='POINT(1.3 0.7)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ result = api.reverse((1.3, 0.7))
+
+ assert result is not None
+ assert result.place_id == 223
+
+
+@pytest.mark.parametrize('country', ['de', 'us'])
+def test_reverse_street(apiobj, frontend, country):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(10.0, 10.0),
+ country_code=country,
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((9.995, 10)).place_id == 990
+
+
+def test_reverse_ignore_unindexed(apiobj, frontend):
+ apiobj.add_placex(place_id=223, class_='place', type='house',
+ housenumber='1',
+ indexed_status=2,
+ centroid=(1.3, 0.7),
+ geometry='POINT(1.3 0.7)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ result = api.reverse((1.3, 0.7))
+
+ assert result is None
+
+
+@pytest.mark.parametrize('y,layer,place_id', [(0.7, napi.DataLayer.ADDRESS, 223),
+ (0.70001, napi.DataLayer.POI, 224),
+ (0.7, napi.DataLayer.ADDRESS | napi.DataLayer.POI, 224),
+ (0.70001, napi.DataLayer.ADDRESS | napi.DataLayer.POI, 223),
+ (0.7, napi.DataLayer.MANMADE, 225),
+ (0.7, napi.DataLayer.RAILWAY, 226),
+ (0.7, napi.DataLayer.NATURAL, 227),
+ (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
+ (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225),
+ (5, napi.DataLayer.ADDRESS, 229)])
+def test_reverse_rank_30_layers(apiobj, frontend, y, layer, place_id):
+ apiobj.add_placex(place_id=223, osm_type='N', class_='place', type='house',
+ housenumber='1',
+ rank_address=30,
+ rank_search=30,
+ centroid=(1.3, 0.70001))
+ apiobj.add_placex(place_id=224, osm_type='N', class_='amenity', type='toilet',
+ rank_address=30,
+ rank_search=30,
+ centroid=(1.3, 0.7))
+ apiobj.add_placex(place_id=225, osm_type='N', class_='man_made', type='tower',
+ rank_address=0,
+ rank_search=30,
+ centroid=(1.3, 0.70003))
+ apiobj.add_placex(place_id=226, osm_type='N', class_='railway', type='station',
+ rank_address=0,
+ rank_search=30,
+ centroid=(1.3, 0.70004))
+ apiobj.add_placex(place_id=227, osm_type='N', class_='natural', type='cave',
+ rank_address=0,
+ rank_search=30,
+ centroid=(1.3, 0.70005))
+ apiobj.add_placex(place_id=229, class_='place', type='house',
+ name={'addr:housename': 'Old Cottage'},
+ rank_address=30,
+ rank_search=30,
+ centroid=(1.3, 5))
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((1.3, y), layers=layer).place_id == place_id
+
+
+def test_reverse_poi_layer_with_no_pois(apiobj, frontend):
+ apiobj.add_placex(place_id=223, class_='place', type='house',
+ housenumber='1',
+ rank_address=30,
+ rank_search=30,
+ centroid=(1.3, 0.70001))
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((1.3, 0.70001), max_rank=29,
+ layers=napi.DataLayer.POI) is None
+
+
+@pytest.mark.parametrize('with_geom', [True, False])
+def test_reverse_housenumber_on_street(apiobj, frontend, with_geom):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(10.0, 10.0),
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_placex(place_id=991, class_='place', type='house',
+ parent_place_id=990,
+ rank_search=30, rank_address=30,
+ housenumber='23',
+ centroid=(10.0, 10.00001))
+ apiobj.add_placex(place_id=1990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'Other Street'},
+ centroid=(10.0, 1.0),
+ geometry='LINESTRING(9.995 1, 10.005 1)')
+ apiobj.add_placex(place_id=1991, class_='place', type='house',
+ parent_place_id=1990,
+ rank_search=30, rank_address=30,
+ housenumber='23',
+ centroid=(10.0, 1.00001))
+
+ params = {'geometry_output': napi.GeometryFormat.TEXT} if with_geom else {}
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0), max_rank=30, **params).place_id == 991
+ assert api.reverse((10.0, 10.0), max_rank=27).place_id == 990
+ assert api.reverse((10.0, 10.00001), max_rank=30).place_id == 991
+ assert api.reverse((10.0, 1.0), **params).place_id == 1991
+
+
+@pytest.mark.parametrize('with_geom', [True, False])
+def test_reverse_housenumber_interpolation(apiobj, frontend, with_geom):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(10.0, 10.0),
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_placex(place_id=991, class_='place', type='house',
+ parent_place_id=990,
+ rank_search=30, rank_address=30,
+ housenumber='23',
+ centroid=(10.0, 10.00002))
+ apiobj.add_osmline(place_id=992,
+ parent_place_id=990,
+ startnumber=1, endnumber=3, step=1,
+ centroid=(10.0, 10.00001),
+ geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
+ apiobj.add_placex(place_id=1990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'Other Street'},
+ centroid=(10.0, 20.0),
+ geometry='LINESTRING(9.995 20, 10.005 20)')
+ apiobj.add_osmline(place_id=1992,
+ parent_place_id=1990,
+ startnumber=1, endnumber=3, step=1,
+ centroid=(10.0, 20.00001),
+ geometry='LINESTRING(9.995 20.00001, 10.005 20.00001)')
+
+ params = {'geometry_output': napi.GeometryFormat.TEXT} if with_geom else {}
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0), **params).place_id == 992
+ assert api.reverse((10.0, 20.0), **params).place_id == 1992
+
+
+def test_reverse_housenumber_point_interpolation(apiobj, frontend):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(10.0, 10.0),
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_osmline(place_id=992,
+ parent_place_id=990,
+ startnumber=42, endnumber=42, step=1,
+ centroid=(10.0, 10.00001),
+ geometry='POINT(10.0 10.00001)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ res = api.reverse((10.0, 10.0))
+ assert res.place_id == 992
+ assert res.housenumber == '42'
+
+
+def test_reverse_tiger_number(apiobj, frontend):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(10.0, 10.0),
+ country_code='us',
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_tiger(place_id=992,
+ parent_place_id=990,
+ startnumber=1, endnumber=3, step=1,
+ centroid=(10.0, 10.00001),
+ geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0)).place_id == 992
+ assert api.reverse((10.0, 10.00001)).place_id == 992
+
+
+def test_reverse_point_tiger(apiobj, frontend):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(10.0, 10.0),
+ country_code='us',
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_tiger(place_id=992,
+ parent_place_id=990,
+ startnumber=1, endnumber=1, step=1,
+ centroid=(10.0, 10.00001),
+ geometry='POINT(10.0 10.00001)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ res = api.reverse((10.0, 10.0))
+ assert res.place_id == 992
+ assert res.housenumber == '1'
+
+
+def test_reverse_low_zoom_address(apiobj, frontend):
+ apiobj.add_placex(place_id=1001, class_='place', type='house',
+ housenumber='1',
+ rank_address=30,
+ rank_search=30,
+ centroid=(59.3, 80.70001))
+ apiobj.add_placex(place_id=1002, class_='place', type='town',
+ name={'name': 'Town'},
+ rank_address=16,
+ rank_search=16,
+ centroid=(59.3, 80.70001),
+ geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001,
+ 59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""")
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((59.30005, 80.7005)).place_id == 1001
+ assert api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002
+
+
+def test_reverse_place_node_in_area(apiobj, frontend):
+ apiobj.add_placex(place_id=1002, class_='place', type='town',
+ name={'name': 'Town Area'},
+ rank_address=16,
+ rank_search=16,
+ centroid=(59.3, 80.70001),
+ geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001,
+ 59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""")
+ apiobj.add_placex(place_id=1003, class_='place', type='suburb',
+ name={'name': 'Suburb Point'},
+ osm_type='N',
+ rank_address=18,
+ rank_search=18,
+ centroid=(59.30004, 80.70055))
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((59.30004, 80.70055)).place_id == 1003
+
+
+@pytest.mark.parametrize('layer,place_id', [(napi.DataLayer.MANMADE, 225),
+ (napi.DataLayer.RAILWAY, 226),
+ (napi.DataLayer.NATURAL, 227),
+ (napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
+ (napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225)])
+def test_reverse_larger_area_layers(apiobj, frontend, layer, place_id):
+ apiobj.add_placex(place_id=225, class_='man_made', type='dam',
+ name={'name': 'Dam'},
+ rank_address=0,
+ rank_search=25,
+ centroid=(1.3, 0.70003))
+ apiobj.add_placex(place_id=226, class_='railway', type='yard',
+ name={'name': 'Dam'},
+ rank_address=0,
+ rank_search=20,
+ centroid=(1.3, 0.70004))
+ apiobj.add_placex(place_id=227, class_='natural', type='spring',
+ name={'name': 'Dam'},
+ rank_address=0,
+ rank_search=16,
+ centroid=(1.3, 0.70005))
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((1.3, 0.7), layers=layer).place_id == place_id
+
+
+def test_reverse_country_lookup_no_objects(apiobj, frontend):
+ apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((0.5, 0.5)) is None
+
+
+@pytest.mark.parametrize('rank', [4, 30])
+@pytest.mark.parametrize('with_geom', [True, False])
+def test_reverse_country_lookup_country_only(apiobj, frontend, rank, with_geom):
+ apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+ apiobj.add_country('yy', 'POLYGON((10 0, 10 1, 11 1, 11 0, 10 0))')
+ apiobj.add_placex(place_id=225, class_='place', type='country',
+ name={'name': 'My Country'},
+ rank_address=4,
+ rank_search=4,
+ country_code='xx',
+ centroid=(0.7, 0.7))
+
+ params = {'max_rank': rank}
+ if with_geom:
+ params['geometry_output'] = napi.GeometryFormat.TEXT
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((0.5, 0.5), **params).place_id == 225
+ assert api.reverse((10.5, 0.5), **params) is None
+
+
+@pytest.mark.parametrize('with_geom', [True, False])
+def test_reverse_country_lookup_place_node_inside(apiobj, frontend, with_geom):
+ apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+ apiobj.add_country('yy', 'POLYGON((10 0, 10 1, 11 1, 11 0, 10 0))')
+ apiobj.add_placex(place_id=225, class_='place', type='state',
+ osm_type='N',
+ name={'name': 'My State'},
+ rank_address=6,
+ rank_search=6,
+ country_code='xx',
+ centroid=(0.5, 0.505))
+ apiobj.add_placex(place_id=425, class_='place', type='state',
+ osm_type='N',
+ name={'name': 'Other State'},
+ rank_address=6,
+ rank_search=6,
+ country_code='yy',
+ centroid=(10.5, 0.505))
+
+ params = {'geometry_output': napi.GeometryFormat.KML} if with_geom else {}
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((0.5, 0.5), **params).place_id == 225
+ assert api.reverse((10.5, 0.5), **params).place_id == 425
+
+
+@pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
+def test_reverse_geometry_output_placex(apiobj, frontend, gtype):
+ apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+ apiobj.add_placex(place_id=1001, class_='place', type='house',
+ housenumber='1',
+ rank_address=30,
+ rank_search=30,
+ centroid=(59.3, 80.70001))
+ apiobj.add_placex(place_id=1003, class_='place', type='suburb',
+ name={'name': 'Suburb Point'},
+ osm_type='N',
+ rank_address=18,
+ rank_search=18,
+ country_code='xx',
+ centroid=(0.5, 0.5))
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001
+ assert api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003
+
+
+def test_reverse_simplified_geometry(apiobj, frontend):
+ apiobj.add_placex(place_id=1001, class_='place', type='house',
+ housenumber='1',
+ rank_address=30,
+ rank_search=30,
+ centroid=(59.3, 80.70001))
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ details = dict(geometry_output=napi.GeometryFormat.GEOJSON,
+ geometry_simplification=0.1)
+ assert api.reverse((59.3, 80.70001), **details).place_id == 1001
+
+
+def test_reverse_interpolation_geometry(apiobj, frontend):
+ apiobj.add_osmline(place_id=992,
+ parent_place_id=990,
+ startnumber=1, endnumber=3, step=1,
+ centroid=(10.0, 10.00001),
+ geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\
+ .geometry['text'] == 'POINT(10 10.00001)'
+
+
+def test_reverse_tiger_geometry(apiobj, frontend):
+ apiobj.add_placex(place_id=990, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(10.0, 10.0),
+ country_code='us',
+ geometry='LINESTRING(9.995 10, 10.005 10)')
+ apiobj.add_tiger(place_id=992,
+ parent_place_id=990,
+ startnumber=1, endnumber=3, step=1,
+ centroid=(10.0, 10.00001),
+ geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
+ apiobj.add_placex(place_id=1000, class_='highway', type='service',
+ rank_search=27, rank_address=27,
+ name = {'name': 'My Street'},
+ centroid=(11.0, 11.0),
+ country_code='us',
+ geometry='LINESTRING(10.995 11, 11.005 11)')
+ apiobj.add_tiger(place_id=1001,
+ parent_place_id=1000,
+ startnumber=1, endnumber=3, step=1,
+ centroid=(11.0, 11.00001),
+ geometry='LINESTRING(10.995 11.00001, 11.005 11.00001)')
+
+ api = frontend(apiobj, options=API_OPTIONS)
+
+ params = {'geometry_output': napi.GeometryFormat.GEOJSON}
+
+ output = api.reverse((10.0, 10.0), **params)
+ assert json.loads(output.geometry['geojson']) == {'coordinates': [10, 10.00001], 'type': 'Point'}
+
+ output = api.reverse((11.0, 11.0), **params)
+ assert json.loads(output.geometry['geojson']) == {'coordinates': [11, 11.00001], 'type': 'Point'}
+
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for search API calls.
+
+These tests make sure that all Python code is correct and executable.
+Functional tests can be found in the BDD test suite.
+"""
+import json
+
+import pytest
+
+import sqlalchemy as sa
+
+import nominatim_api as napi
+import nominatim_api.logging as loglib
+
+API_OPTIONS = {'search'}
+
+@pytest.fixture(autouse=True)
+def setup_icu_tokenizer(apiobj):
+ """ Setup the properties needed for using the ICU tokenizer.
+ """
+ apiobj.add_data('properties',
+ [{'property': 'tokenizer', 'value': 'icu'},
+ {'property': 'tokenizer_import_normalisation', 'value': ':: lower();'},
+ {'property': 'tokenizer_import_transliteration', 'value': "'1' > '/1/'; 'ä' > 'ä '"},
+ ])
+
+
+def test_search_no_content(apiobj, frontend):
+ apiobj.add_word_table([])
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.search('foo') == []
+
+
+def test_search_simple_word(apiobj, frontend):
+ apiobj.add_word_table([(55, 'test', 'W', 'test', None),
+ (2, 'test', 'w', 'test', None)])
+
+ apiobj.add_placex(place_id=444, class_='place', type='village',
+ centroid=(1.3, 0.7))
+ apiobj.add_search_name(444, names=[2, 55])
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ results = api.search('TEST')
+
+ assert [r.place_id for r in results] == [444]
+
+
+@pytest.mark.parametrize('logtype', ['text', 'html'])
+def test_search_with_debug(apiobj, frontend, logtype):
+ apiobj.add_word_table([(55, 'test', 'W', 'test', None),
+ (2, 'test', 'w', 'test', None)])
+
+ apiobj.add_placex(place_id=444, class_='place', type='village',
+ centroid=(1.3, 0.7))
+ apiobj.add_search_name(444, names=[2, 55])
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ loglib.set_log_output(logtype)
+ results = api.search('TEST')
+
+ assert loglib.get_and_disable()
+
+
+def test_address_no_content(apiobj, frontend):
+ apiobj.add_word_table([])
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.search_address(amenity='hotel',
+ street='Main St 34',
+ city='Happyville',
+ county='Wideland',
+ state='Praerie',
+ postalcode='55648',
+ country='xx') == []
+
+
+@pytest.mark.parametrize('atype,address,search', [('street', 26, 26),
+ ('city', 16, 18),
+ ('county', 12, 12),
+ ('state', 8, 8)])
+def test_address_simple_places(apiobj, frontend, atype, address, search):
+ apiobj.add_word_table([(55, 'test', 'W', 'test', None),
+ (2, 'test', 'w', 'test', None)])
+
+ apiobj.add_placex(place_id=444,
+ rank_address=address, rank_search=search,
+ centroid=(1.3, 0.7))
+ apiobj.add_search_name(444, names=[2, 55], address_rank=address, search_rank=search)
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ results = api.search_address(**{atype: 'TEST'})
+
+ assert [r.place_id for r in results] == [444]
+
+
+def test_address_country(apiobj, frontend):
+ apiobj.add_word_table([(None, 'ro', 'C', 'ro', None)])
+ apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
+ apiobj.add_country_name('ro', {'name': 'România'})
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert len(api.search_address(country='ro')) == 1
+
+
+def test_category_no_categories(apiobj, frontend):
+ apiobj.add_word_table([])
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.search_category([], near_query='Berlin') == []
+
+
+def test_category_no_content(apiobj, frontend):
+ apiobj.add_word_table([])
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.search_category([('amenity', 'restaurant')]) == []
+
+
+def test_category_simple_restaurant(apiobj, frontend):
+ apiobj.add_word_table([])
+
+ apiobj.add_placex(place_id=444, class_='amenity', type='restaurant',
+ centroid=(1.3, 0.7))
+ apiobj.add_search_name(444, names=[2, 55], address_rank=16, search_rank=18)
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ results = api.search_category([('amenity', 'restaurant')],
+ near=(1.3, 0.701), near_radius=0.015)
+
+ assert [r.place_id for r in results] == [444]
+
+
+def test_category_with_search_phrase(apiobj, frontend):
+ apiobj.add_word_table([(55, 'test', 'W', 'test', None),
+ (2, 'test', 'w', 'test', None)])
+
+ apiobj.add_placex(place_id=444, class_='place', type='village',
+ rank_address=16, rank_search=18,
+ centroid=(1.3, 0.7))
+ apiobj.add_search_name(444, names=[2, 55], address_rank=16, search_rank=18)
+ apiobj.add_placex(place_id=95, class_='amenity', type='restaurant',
+ centroid=(1.3, 0.7003))
+
+ api = frontend(apiobj, options=API_OPTIONS)
+ results = api.search_category([('amenity', 'restaurant')], near_query='TEST')
+
+ assert [r.place_id for r in results] == [95]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for the status API call.
+"""
+import datetime as dt
+import pytest
+
+from nominatim_db.version import NominatimVersion
+from nominatim_api.version import NOMINATIM_API_VERSION
+import nominatim_api as napi
+
+def test_status_no_extra_info(apiobj, frontend):
+ api = frontend(apiobj)
+ result = api.status()
+
+ assert result.status == 0
+ assert result.message == 'OK'
+ assert result.software_version == NOMINATIM_API_VERSION
+ assert result.database_version is None
+ assert result.data_updated is None
+
+
+def test_status_full(apiobj, frontend):
+ import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0, tzinfo=dt.timezone.utc)
+ apiobj.add_data('import_status',
+ [{'lastimportdate': import_date}])
+ apiobj.add_data('properties',
+ [{'property': 'database_version', 'value': '99.5.4-2'}])
+
+ api = frontend(apiobj)
+ result = api.status()
+
+ assert result.status == 0
+ assert result.message == 'OK'
+ assert result.software_version == NOMINATIM_API_VERSION
+ assert result.database_version == '99.5.4-2'
+ assert result.data_updated == import_date
+
+
+def test_status_database_not_found(monkeypatch):
+ monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'dbname=rgjdfkgjedkrgdfkngdfkg')
+
+ api = napi.NominatimAPI()
+
+ result = api.status()
+
+ assert result.status == 700
+ assert result.message == 'Database connection failed'
+ assert result.software_version == NOMINATIM_API_VERSION
+ assert result.database_version is None
+ assert result.data_updated is None
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for loading of parameter dataclasses.
+"""
+import pytest
+
+from nominatim_api.errors import UsageError
+import nominatim_api.types as typ
+
+def test_no_params_defaults():
+ params = typ.LookupDetails.from_kwargs({})
+
+ assert not params.parented_places
+ assert params.geometry_simplification == 0.0
+
+
+@pytest.mark.parametrize('k,v', [('geometry_output', 'a'),
+ ('linked_places', 0),
+ ('geometry_simplification', 'NaN')])
+def test_bad_format_reverse(k, v):
+ with pytest.raises(UsageError):
+ params = typ.ReverseDetails.from_kwargs({k: v})
+
+
+@pytest.mark.parametrize('rin,rout', [(-23, 0), (0, 0), (1, 1),
+ (15, 15), (30, 30), (31, 30)])
+def test_rank_params(rin, rout):
+ params = typ.ReverseDetails.from_kwargs({'max_rank': rin})
+
+ assert params.max_rank == rout
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for export CLI function.
+"""
+import pytest
+
+import nominatim_db.cli
+
+@pytest.fixture
+def run_export(tmp_path, capsys):
+ def _exec(args):
+ assert 0 == nominatim_db.cli.nominatim(osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
+ cli_args=['export', '--project-dir', str(tmp_path)]
+ + args)
+ return capsys.readouterr().out.split('\r\n')
+
+ return _exec
+
+
+@pytest.fixture(autouse=True)
+def setup_database_with_context(apiobj):
+ apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
+ class_='highway', type='residential', name='Street',
+ country_code='pl', postcode='55674',
+ rank_search=27, rank_address=26)
+ apiobj.add_address_placex(332, fromarea=False, isaddress=False,
+ distance=0.0034,
+ place_id=1000, osm_type='N', osm_id=3333,
+ class_='place', type='suburb', name='Smallplace',
+ country_code='pl', admin_level=13,
+ rank_search=24, rank_address=23)
+ apiobj.add_address_placex(332, fromarea=True, isaddress=True,
+ place_id=1001, osm_type='N', osm_id=3334,
+ class_='place', type='city', name='Bigplace',
+ country_code='pl',
+ rank_search=17, rank_address=16)
+
+
+def test_export_default(run_export):
+ csv = run_export([])
+
+ assert csv == ['street,suburb,city,county,state,country', 'Street,,Bigplace,,,', '']
+
+
+def test_export_output_type(run_export):
+ csv = run_export(['--output-type', 'city'])
+
+ assert csv == ['street,suburb,city,county,state,country', ',,Bigplace,,,', '']
+
+
+def test_export_output_format(run_export):
+ csv = run_export(['--output-format', 'placeid;street;nothing;postcode'])
+
+ assert csv == ['placeid,street,nothing,postcode', '332,Street,,55674', '']
+
+
+def test_export_restrict_to_node_good(run_export):
+ csv = run_export(['--restrict-to-osm-node', '3334'])
+
+ assert csv == ['street,suburb,city,county,state,country', 'Street,,Bigplace,,,', '']
+
+
+def test_export_restrict_to_node_not_address(run_export):
+ csv = run_export(['--restrict-to-osm-node', '3333'])
+
+ assert csv == ['street,suburb,city,county,state,country', '']
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for the helper functions for v1 API.
+"""
+import pytest
+
+import nominatim_api.v1.helpers as helper
+
+@pytest.mark.parametrize('inp', ['',
+ 'abc',
+ '12 23',
+ 'abc -78.90, 12.456 def',
+ '40 N 60 W'])
+def test_extract_coords_no_coords(inp):
+ query, x, y = helper.extract_coords_from_query(inp)
+
+ assert query == inp
+ assert x is None
+ assert y is None
+
+
+def test_extract_coords_null_island():
+ assert ('', 0.0, 0.0) == helper.extract_coords_from_query('0.0 -0.0')
+
+
+def test_extract_coords_with_text_before():
+ assert ('abc', 12.456, -78.90) == helper.extract_coords_from_query('abc -78.90, 12.456')
+
+
+def test_extract_coords_with_text_after():
+ assert ('abc', 12.456, -78.90) == helper.extract_coords_from_query('-78.90, 12.456 abc')
+
+@pytest.mark.parametrize('inp', [' [12.456,-78.90] ', ' 12.456,-78.90 '])
+def test_extract_coords_with_spaces(inp):
+ assert ('', -78.90, 12.456) == helper.extract_coords_from_query(inp)
+
+@pytest.mark.parametrize('inp', ['40 26.767 N 79 58.933 W',
+ '40° 26.767′ N 79° 58.933′ W',
+ "40° 26.767' N 79° 58.933' W",
+ "40° 26.767'\n"
+ " N 79° 58.933' W",
+ 'N 40 26.767, W 79 58.933',
+ 'N 40°26.767′, W 79°58.933′',
+ ' N 40°26.767′, W 79°58.933′',
+ "N 40°26.767', W 79°58.933'",
+
+ '40 26 46 N 79 58 56 W',
+ '40° 26′ 46″ N 79° 58′ 56″ W',
+ '40° 26′ 46.00″ N 79° 58′ 56.00″ W',
+ '40°26′46″N 79°58′56″W',
+ 'N 40 26 46 W 79 58 56',
+ 'N 40° 26′ 46″, W 79° 58′ 56″',
+ 'N 40° 26\' 46", W 79° 58\' 56"',
+ 'N 40° 26\' 46", W 79° 58\' 56"',
+
+ '40.446 -79.982',
+ '40.446,-79.982',
+ '40.446° N 79.982° W',
+ 'N 40.446° W 79.982°',
+
+ '[40.446 -79.982]',
+ '[40.446,\v-79.982]',
+ ' 40.446 , -79.982 ',
+ ' 40.446 , -79.982 ',
+ ' 40.446 , -79.982 ',
+ ' 40.446\v, -79.982 '])
+def test_extract_coords_formats(inp):
+ query, x, y = helper.extract_coords_from_query(inp)
+
+ assert query == ''
+ assert pytest.approx(x, abs=0.001) == -79.982
+ assert pytest.approx(y, abs=0.001) == 40.446
+
+ query, x, y = helper.extract_coords_from_query('foo bar ' + inp)
+
+ assert query == 'foo bar'
+ assert pytest.approx(x, abs=0.001) == -79.982
+ assert pytest.approx(y, abs=0.001) == 40.446
+
+ query, x, y = helper.extract_coords_from_query(inp + ' x')
+
+ assert query == 'x'
+ assert pytest.approx(x, abs=0.001) == -79.982
+ assert pytest.approx(y, abs=0.001) == 40.446
+
+
+def test_extract_coords_formats_southeast():
+ query, x, y = helper.extract_coords_from_query('S 40 26.767, E 79 58.933')
+
+ assert query == ''
+ assert pytest.approx(x, abs=0.001) == 79.982
+ assert pytest.approx(y, abs=0.001) == -40.446
+
+
+@pytest.mark.parametrize('inp', ['[shop=fish] foo bar',
+ 'foo [shop=fish] bar',
+ 'foo [shop=fish]bar',
+ 'foo bar [shop=fish]'])
+def test_extract_category_good(inp):
+ query, cls, typ = helper.extract_category_from_query(inp)
+
+ assert query == 'foo bar'
+ assert cls == 'shop'
+ assert typ == 'fish'
+
+def test_extract_category_only():
+ assert helper.extract_category_from_query('[shop=market]') == ('', 'shop', 'market')
+
+@pytest.mark.parametrize('inp', ['house []', 'nothing', '[352]'])
+def test_extract_category_no_match(inp):
+ assert helper.extract_category_from_query(inp) == (inp, None, None)
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Test functions for adapting results to the user's locale.
+"""
+import pytest
+
+from nominatim_api import Locales
+
+def test_display_name_empty_names():
+ l = Locales(['en', 'de'])
+
+ assert l.display_name(None) == ''
+ assert l.display_name({}) == ''
+
+def test_display_name_none_localized():
+ l = Locales()
+
+ assert l.display_name({}) == ''
+ assert l.display_name({'name:de': 'DE', 'name': 'ALL'}) == 'ALL'
+ assert l.display_name({'ref': '34', 'name:de': 'DE'}) == '34'
+
+
+def test_display_name_localized():
+ l = Locales(['en', 'de'])
+
+ assert l.display_name({}) == ''
+ assert l.display_name({'name:de': 'DE', 'name': 'ALL'}) == 'DE'
+ assert l.display_name({'ref': '34', 'name:de': 'DE'}) == 'DE'
+
+
+def test_display_name_preference():
+ l = Locales(['en', 'de'])
+
+ assert l.display_name({}) == ''
+ assert l.display_name({'name:de': 'DE', 'name:en': 'EN'}) == 'EN'
+ assert l.display_name({'official_name:en': 'EN', 'name:de': 'DE'}) == 'DE'
+
+
+@pytest.mark.parametrize('langstr,langlist',
+ [('fr', ['fr']),
+ ('fr-FR', ['fr-FR', 'fr']),
+ ('de,fr-FR', ['de', 'fr-FR', 'fr']),
+ ('fr,de,fr-FR', ['fr', 'de', 'fr-FR']),
+ ('en;q=0.5,fr', ['fr', 'en']),
+ ('en;q=0.5,fr,en-US', ['fr', 'en-US', 'en']),
+ ('en,fr;garbage,de', ['en', 'de'])])
+def test_from_language_preferences(langstr, langlist):
+ assert Locales.from_accept_languages(langstr).languages == langlist
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for formatting results for the V1 API.
+
+These test only ensure that the Python code is correct.
+For functional tests see BDD test suite.
+"""
+import datetime as dt
+import json
+
+import pytest
+
+from nominatim_api.v1.format import dispatch as v1_format
+import nominatim_api as napi
+
+STATUS_FORMATS = {'text', 'json'}
+
+# StatusResult
+
+def test_status_format_list():
+ assert set(v1_format.list_formats(napi.StatusResult)) == STATUS_FORMATS
+
+
+@pytest.mark.parametrize('fmt', list(STATUS_FORMATS))
+def test_status_supported(fmt):
+ assert v1_format.supports_format(napi.StatusResult, fmt)
+
+
+def test_status_unsupported():
+ assert not v1_format.supports_format(napi.StatusResult, 'gagaga')
+
+
+def test_status_format_text():
+ assert v1_format.format_result(napi.StatusResult(0, 'message here'), 'text', {}) == 'OK'
+
+
+def test_status_format_text():
+ assert v1_format.format_result(napi.StatusResult(500, 'message here'), 'text', {}) == 'ERROR: message here'
+
+
+def test_status_format_json_minimal():
+ status = napi.StatusResult(700, 'Bad format.')
+
+ result = v1_format.format_result(status, 'json', {})
+
+ assert result == \
+ f'{{"status":700,"message":"Bad format.","software_version":"{napi.__version__}"}}'
+
+
+def test_status_format_json_full():
+ status = napi.StatusResult(0, 'OK')
+ status.data_updated = dt.datetime(2010, 2, 7, 20, 20, 3, 0, tzinfo=dt.timezone.utc)
+ status.database_version = '5.6'
+
+ result = v1_format.format_result(status, 'json', {})
+
+ assert result == \
+ f'{{"status":0,"message":"OK","data_updated":"2010-02-07T20:20:03+00:00","software_version":"{napi.__version__}","database_version":"5.6"}}'
+
+
+# DetailedResult
+
+def test_search_details_minimal():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))
+
+ result = v1_format.format_result(search, 'json', {})
+
+ assert json.loads(result) == \
+ {'category': 'place',
+ 'type': 'thing',
+ 'admin_level': 15,
+ 'names': {},
+ 'localname': '',
+ 'calculated_importance': pytest.approx(0.00001),
+ 'rank_address': 30,
+ 'rank_search': 30,
+ 'isarea': False,
+ 'addresstags': {},
+ 'extratags': {},
+ 'centroid': {'type': 'Point', 'coordinates': [1.0, 2.0]},
+ 'geometry': {'type': 'Point', 'coordinates': [1.0, 2.0]},
+ }
+
+
+def test_search_details_full():
+ import_date = dt.datetime(2010, 2, 7, 20, 20, 3, 0, tzinfo=dt.timezone.utc)
+ search = napi.DetailedResult(
+ source_table=napi.SourceTable.PLACEX,
+ category=('amenity', 'bank'),
+ centroid=napi.Point(56.947, -87.44),
+ place_id=37563,
+ parent_place_id=114,
+ linked_place_id=55693,
+ osm_object=('W', 442100),
+ admin_level=14,
+ names={'name': 'Bank', 'name:fr': 'Banque'},
+ address={'city': 'Niento', 'housenumber': ' 3'},
+ extratags={'atm': 'yes'},
+ housenumber='3',
+ postcode='556 X23',
+ wikipedia='en:Bank',
+ rank_address=29,
+ rank_search=28,
+ importance=0.0443,
+ country_code='ll',
+ indexed_date = import_date
+ )
+ search.localize(napi.Locales())
+
+ result = v1_format.format_result(search, 'json', {})
+
+ assert json.loads(result) == \
+ {'place_id': 37563,
+ 'parent_place_id': 114,
+ 'osm_type': 'W',
+ 'osm_id': 442100,
+ 'category': 'amenity',
+ 'type': 'bank',
+ 'admin_level': 14,
+ 'localname': 'Bank',
+ 'names': {'name': 'Bank', 'name:fr': 'Banque'},
+ 'addresstags': {'city': 'Niento', 'housenumber': ' 3'},
+ 'housenumber': '3',
+ 'calculated_postcode': '556 X23',
+ 'country_code': 'll',
+ 'indexed_date': '2010-02-07T20:20:03+00:00',
+ 'importance': pytest.approx(0.0443),
+ 'calculated_importance': pytest.approx(0.0443),
+ 'extratags': {'atm': 'yes'},
+ 'calculated_wikipedia': 'en:Bank',
+ 'rank_address': 29,
+ 'rank_search': 28,
+ 'isarea': False,
+ 'centroid': {'type': 'Point', 'coordinates': [56.947, -87.44]},
+ 'geometry': {'type': 'Point', 'coordinates': [56.947, -87.44]},
+ }
+
+
+@pytest.mark.parametrize('gtype,isarea', [('ST_Point', False),
+ ('ST_LineString', False),
+ ('ST_Polygon', True),
+ ('ST_MultiPolygon', True)])
+def test_search_details_no_geometry(gtype, isarea):
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ geometry={'type': gtype})
+
+ result = v1_format.format_result(search, 'json', {})
+ js = json.loads(result)
+
+ assert js['geometry'] == {'type': 'Point', 'coordinates': [1.0, 2.0]}
+ assert js['isarea'] == isarea
+
+
+def test_search_details_with_geometry():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ geometry={'geojson': '{"type":"Point","coordinates":[56.947,-87.44]}'})
+
+ result = v1_format.format_result(search, 'json', {})
+ js = json.loads(result)
+
+ assert js['geometry'] == {'type': 'Point', 'coordinates': [56.947, -87.44]}
+ assert js['isarea'] == False
+
+
+def test_search_details_with_icon_available():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('amenity', 'restaurant'),
+ napi.Point(1.0, 2.0))
+
+ result = v1_format.format_result(search, 'json', {'icon_base_url': 'foo'})
+ js = json.loads(result)
+
+ assert js['icon'] == 'foo/food_restaurant.p.20.png'
+
+
+def test_search_details_with_icon_not_available():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('amenity', 'tree'),
+ napi.Point(1.0, 2.0))
+
+ result = v1_format.format_result(search, 'json', {'icon_base_url': 'foo'})
+ js = json.loads(result)
+
+ assert 'icon' not in js
+
+
+def test_search_details_with_address_minimal():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ address_rows=[
+ napi.AddressLine(place_id=None,
+ osm_object=None,
+ category=('bnd', 'note'),
+ names={},
+ extratags=None,
+ admin_level=None,
+ fromarea=False,
+ isaddress=False,
+ rank_address=10,
+ distance=0.0)
+ ])
+
+ result = v1_format.format_result(search, 'json', {})
+ js = json.loads(result)
+
+ assert js['address'] == [{'localname': '',
+ 'class': 'bnd',
+ 'type': 'note',
+ 'rank_address': 10,
+ 'distance': 0.0,
+ 'isaddress': False}]
+
+
+@pytest.mark.parametrize('field,outfield', [('address_rows', 'address'),
+ ('linked_rows', 'linked_places'),
+ ('parented_rows', 'hierarchy')
+ ])
+def test_search_details_with_further_infos(field, outfield):
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))
+
+ setattr(search, field, [napi.AddressLine(place_id=3498,
+ osm_object=('R', 442),
+ category=('bnd', 'note'),
+ names={'name': 'Trespass'},
+ extratags={'access': 'no',
+ 'place_type': 'spec'},
+ admin_level=4,
+ fromarea=True,
+ isaddress=True,
+ rank_address=10,
+ distance=0.034)
+ ])
+
+ result = v1_format.format_result(search, 'json', {})
+ js = json.loads(result)
+
+ assert js[outfield] == [{'localname': 'Trespass',
+ 'place_id': 3498,
+ 'osm_id': 442,
+ 'osm_type': 'R',
+ 'place_type': 'spec',
+ 'class': 'bnd',
+ 'type': 'note',
+ 'admin_level': 4,
+ 'rank_address': 10,
+ 'distance': 0.034,
+ 'isaddress': True}]
+
+
+def test_search_details_grouped_hierarchy():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ parented_rows =
+ [napi.AddressLine(place_id=3498,
+ osm_object=('R', 442),
+ category=('bnd', 'note'),
+ names={'name': 'Trespass'},
+ extratags={'access': 'no',
+ 'place_type': 'spec'},
+ admin_level=4,
+ fromarea=True,
+ isaddress=True,
+ rank_address=10,
+ distance=0.034)
+ ])
+
+ result = v1_format.format_result(search, 'json', {'group_hierarchy': True})
+ js = json.loads(result)
+
+ assert js['hierarchy'] == {'note': [{'localname': 'Trespass',
+ 'place_id': 3498,
+ 'osm_id': 442,
+ 'osm_type': 'R',
+ 'place_type': 'spec',
+ 'class': 'bnd',
+ 'type': 'note',
+ 'admin_level': 4,
+ 'rank_address': 10,
+ 'distance': 0.034,
+ 'isaddress': True}]}
+
+
+def test_search_details_keywords_name():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ name_keywords=[
+ napi.WordInfo(23, 'foo', 'mefoo'),
+ napi.WordInfo(24, 'foo', 'bafoo')])
+
+ result = v1_format.format_result(search, 'json', {'keywords': True})
+ js = json.loads(result)
+
+ assert js['keywords'] == {'name': [{'id': 23, 'token': 'foo'},
+ {'id': 24, 'token': 'foo'}],
+ 'address': []}
+
+
+def test_search_details_keywords_address():
+ search = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ address_keywords=[
+ napi.WordInfo(23, 'foo', 'mefoo'),
+ napi.WordInfo(24, 'foo', 'bafoo')])
+
+ result = v1_format.format_result(search, 'json', {'keywords': True})
+ js = json.loads(result)
+
+ assert js['keywords'] == {'address': [{'id': 23, 'token': 'foo'},
+ {'id': 24, 'token': 'foo'}],
+ 'name': []}
+
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for formatting reverse results for the V1 API.
+
+These test only ensure that the Python code is correct.
+For functional tests see BDD test suite.
+"""
+import json
+import xml.etree.ElementTree as ET
+
+import pytest
+
+from nominatim_api.v1.format import dispatch as v1_format
+import nominatim_api as napi
+
+FORMATS = ['json', 'jsonv2', 'geojson', 'geocodejson', 'xml']
+
+@pytest.mark.parametrize('fmt', FORMATS)
+def test_format_reverse_minimal(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('amenity', 'post_box'),
+ napi.Point(0.3, -8.9))
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {})
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.tag == 'reversegeocode'
+ else:
+ result = json.loads(raw)
+ assert isinstance(result, dict)
+
+
+@pytest.mark.parametrize('fmt', FORMATS)
+def test_format_reverse_no_result(fmt):
+ raw = v1_format.format_result(napi.ReverseResults(), fmt, {})
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.find('error').text == 'Unable to geocode'
+ else:
+ assert json.loads(raw) == {'error': 'Unable to geocode'}
+
+
+@pytest.mark.parametrize('fmt', FORMATS)
+def test_format_reverse_with_osm_id(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('amenity', 'post_box'),
+ napi.Point(0.3, -8.9),
+ place_id=5564,
+ osm_object=('N', 23))
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {})
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw).find('result')
+ assert root.attrib['osm_type'] == 'node'
+ assert root.attrib['osm_id'] == '23'
+ else:
+ result = json.loads(raw)
+ if fmt == 'geocodejson':
+ props = result['features'][0]['properties']['geocoding']
+ elif fmt == 'geojson':
+ props = result['features'][0]['properties']
+ else:
+ props = result
+ assert props['osm_type'] == 'node'
+ assert props['osm_id'] == 23
+
+
+@pytest.mark.parametrize('fmt', FORMATS)
+def test_format_reverse_with_address(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ country_code='fe',
+ address_rows=napi.AddressLines([
+ napi.AddressLine(place_id=None,
+ osm_object=None,
+ category=('place', 'county'),
+ names={'name': 'Hello'},
+ extratags=None,
+ admin_level=5,
+ fromarea=False,
+ isaddress=True,
+ rank_address=10,
+ distance=0.0),
+ napi.AddressLine(place_id=None,
+ osm_object=None,
+ category=('place', 'county'),
+ names={'name': 'ByeBye'},
+ extratags=None,
+ admin_level=5,
+ fromarea=False,
+ isaddress=False,
+ rank_address=10,
+ distance=0.0)
+ ]))
+ reverse.localize(napi.Locales())
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'addressdetails': True})
+
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.find('addressparts').find('county').text == 'Hello'
+ else:
+ result = json.loads(raw)
+ assert isinstance(result, dict)
+
+ if fmt == 'geocodejson':
+ props = result['features'][0]['properties']['geocoding']
+ assert 'admin' in props
+ assert props['county'] == 'Hello'
+ else:
+ if fmt == 'geojson':
+ props = result['features'][0]['properties']
+ else:
+ props = result
+ assert 'address' in props
+
+
+def test_format_reverse_geocodejson_special_parts():
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'house'),
+ napi.Point(1.0, 2.0),
+ place_id=33,
+ country_code='fe',
+ address_rows=napi.AddressLines([
+ napi.AddressLine(place_id=None,
+ osm_object=None,
+ category=('place', 'house_number'),
+ names={'ref': '1'},
+ extratags=None,
+ admin_level=15,
+ fromarea=False,
+ isaddress=True,
+ rank_address=10,
+ distance=0.0),
+ napi.AddressLine(place_id=None,
+ osm_object=None,
+ category=('place', 'postcode'),
+ names={'ref': '99446'},
+ extratags=None,
+ admin_level=11,
+ fromarea=False,
+ isaddress=True,
+ rank_address=10,
+ distance=0.0),
+ napi.AddressLine(place_id=33,
+ osm_object=None,
+ category=('place', 'county'),
+ names={'name': 'Hello'},
+ extratags=None,
+ admin_level=5,
+ fromarea=False,
+ isaddress=True,
+ rank_address=10,
+ distance=0.0)
+ ]))
+
+ reverse.localize(napi.Locales())
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), 'geocodejson',
+ {'addressdetails': True})
+
+ props = json.loads(raw)['features'][0]['properties']['geocoding']
+ assert props['housenumber'] == '1'
+ assert props['postcode'] == '99446'
+ assert 'county' not in props
+
+
+@pytest.mark.parametrize('fmt', FORMATS)
+def test_format_reverse_with_address_none(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ address_rows=napi.AddressLines())
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'addressdetails': True})
+
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.find('addressparts') is None
+ else:
+ result = json.loads(raw)
+ assert isinstance(result, dict)
+
+ if fmt == 'geocodejson':
+ props = result['features'][0]['properties']['geocoding']
+ print(props)
+ assert 'admin' in props
+ else:
+ if fmt == 'geojson':
+ props = result['features'][0]['properties']
+ else:
+ props = result
+ assert 'address' in props
+
+
+@pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml'])
+def test_format_reverse_with_extratags(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ extratags={'one': 'A', 'two':'B'})
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'extratags': True})
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.find('extratags').find('tag').attrib['key'] == 'one'
+ else:
+ result = json.loads(raw)
+ if fmt == 'geojson':
+ extra = result['features'][0]['properties']['extratags']
+ else:
+ extra = result['extratags']
+
+ assert extra == {'one': 'A', 'two':'B'}
+
+
+@pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml'])
+def test_format_reverse_with_extratags_none(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'extratags': True})
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.find('extratags') is not None
+ else:
+ result = json.loads(raw)
+ if fmt == 'geojson':
+ extra = result['features'][0]['properties']['extratags']
+ else:
+ extra = result['extratags']
+
+ assert extra is None
+
+
+@pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml'])
+def test_format_reverse_with_namedetails_with_name(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0),
+ names={'name': 'A', 'ref':'1'})
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'namedetails': True})
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.find('namedetails').find('name').text == 'A'
+ else:
+ result = json.loads(raw)
+ if fmt == 'geojson':
+ extra = result['features'][0]['properties']['namedetails']
+ else:
+ extra = result['namedetails']
+
+ assert extra == {'name': 'A', 'ref':'1'}
+
+
+@pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml'])
+def test_format_reverse_with_namedetails_without_name(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))
+
+ raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'namedetails': True})
+
+ if fmt == 'xml':
+ root = ET.fromstring(raw)
+ assert root.find('namedetails') is not None
+ else:
+ result = json.loads(raw)
+ if fmt == 'geojson':
+ extra = result['features'][0]['properties']['namedetails']
+ else:
+ extra = result['namedetails']
+
+ assert extra is None
+
+
+@pytest.mark.parametrize('fmt', ['json', 'jsonv2'])
+def test_search_details_with_icon_available(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('amenity', 'restaurant'),
+ napi.Point(1.0, 2.0))
+
+ result = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'icon_base_url': 'foo'})
+
+ js = json.loads(result)
+
+ assert js['icon'] == 'foo/food_restaurant.p.20.png'
+
+
+@pytest.mark.parametrize('fmt', ['json', 'jsonv2'])
+def test_search_details_with_icon_not_available(fmt):
+ reverse = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('amenity', 'tree'),
+ napi.Point(1.0, 2.0))
+
+ result = v1_format.format_result(napi.ReverseResults([reverse]), fmt,
+ {'icon_base_url': 'foo'})
+
+ assert 'icon' not in json.loads(result)
+
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for result datatype helper functions.
+"""
+import struct
+from binascii import hexlify
+
+import pytest
+import pytest_asyncio
+import sqlalchemy as sa
+
+
+from nominatim_api import SourceTable, DetailedResult, Point
+import nominatim_api.results as nresults
+
+def mkpoint(x, y):
+ return hexlify(struct.pack("=biidd", 1, 0x20000001, 4326, x, y)).decode('utf-8')
+
+class FakeRow:
+ def __init__(self, **kwargs):
+ if 'parent_place_id' not in kwargs:
+ kwargs['parent_place_id'] = None
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+ self._mapping = kwargs
+
+
+def test_minimal_detailed_result():
+ res = DetailedResult(SourceTable.PLACEX,
+ ('amenity', 'post_box'),
+ Point(23.1, 0.5))
+
+ assert res.lon == 23.1
+ assert res.lat == 0.5
+ assert res.calculated_importance() == pytest.approx(0.00001)
+
+def test_detailed_result_custom_importance():
+ res = DetailedResult(SourceTable.PLACEX,
+ ('amenity', 'post_box'),
+ Point(23.1, 0.5),
+ importance=0.4563)
+
+ assert res.calculated_importance() == 0.4563
+
+
+@pytest.mark.parametrize('func', (nresults.create_from_placex_row,
+ nresults.create_from_osmline_row,
+ nresults.create_from_tiger_row,
+ nresults.create_from_postcode_row))
+def test_create_row_none(func):
+ assert func(None, DetailedResult) is None
+
+
+@pytest.mark.parametrize('func', (nresults.create_from_osmline_row,
+ nresults.create_from_tiger_row))
+def test_create_row_with_housenumber(func):
+ row = FakeRow(place_id=2345, osm_type='W', osm_id=111, housenumber=4,
+ address=None, postcode='99900', country_code='xd',
+ centroid=mkpoint(0, 0))
+
+ res = func(row, DetailedResult)
+
+ assert res.housenumber == '4'
+ assert res.extratags is None
+ assert res.category == ('place', 'house')
+
+
+@pytest.mark.parametrize('func', (nresults.create_from_osmline_row,
+ nresults.create_from_tiger_row))
+def test_create_row_without_housenumber(func):
+ row = FakeRow(place_id=2345, osm_type='W', osm_id=111,
+ startnumber=1, endnumber=11, step=2,
+ address=None, postcode='99900', country_code='xd',
+ centroid=mkpoint(0, 0))
+
+ res = func(row, DetailedResult)
+
+ assert res.housenumber is None
+ assert res.extratags == {'startnumber': '1', 'endnumber': '11', 'step': '2'}
+ assert res.category == ('place', 'houses')
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for the Python web frameworks adaptor, v1 API.
+"""
+import json
+import xml.etree.ElementTree as ET
+
+import pytest
+
+from fake_adaptor import FakeAdaptor, FakeError, FakeResponse
+
+import nominatim_api.v1.server_glue as glue
+import nominatim_api as napi
+import nominatim_api.logging as loglib
+
+
+# ASGIAdaptor.get_int/bool()
+
+@pytest.mark.parametrize('func', ['get_int', 'get_bool'])
+def test_adaptor_get_int_missing_but_required(func):
+ with pytest.raises(FakeError, match='^400 -- .*missing'):
+ getattr(FakeAdaptor(), func)('something')
+
+
+@pytest.mark.parametrize('func, val', [('get_int', 23), ('get_bool', True)])
+def test_adaptor_get_int_missing_with_default(func, val):
+ assert getattr(FakeAdaptor(), func)('something', val) == val
+
+
+@pytest.mark.parametrize('inp', ['0', '234', '-4566953498567934876'])
+def test_adaptor_get_int_success(inp):
+ assert FakeAdaptor(params={'foo': inp}).get_int('foo') == int(inp)
+ assert FakeAdaptor(params={'foo': inp}).get_int('foo', 4) == int(inp)
+
+
+@pytest.mark.parametrize('inp', ['rs', '4.5', '6f'])
+def test_adaptor_get_int_bad_number(inp):
+ with pytest.raises(FakeError, match='^400 -- .*must be a number'):
+ FakeAdaptor(params={'foo': inp}).get_int('foo')
+
+
+@pytest.mark.parametrize('inp', ['1', 'true', 'whatever', 'false'])
+def test_adaptor_get_bool_trueish(inp):
+ assert FakeAdaptor(params={'foo': inp}).get_bool('foo')
+
+
+def test_adaptor_get_bool_falsish():
+ assert not FakeAdaptor(params={'foo': '0'}).get_bool('foo')
+
+
+# ASGIAdaptor.parse_format()
+
+def test_adaptor_parse_format_use_default():
+ adaptor = FakeAdaptor()
+
+ assert glue.parse_format(adaptor, napi.StatusResult, 'text') == 'text'
+ assert adaptor.content_type == 'text/plain; charset=utf-8'
+
+
+def test_adaptor_parse_format_use_configured():
+ adaptor = FakeAdaptor(params={'format': 'json'})
+
+ assert glue.parse_format(adaptor, napi.StatusResult, 'text') == 'json'
+ assert adaptor.content_type == 'application/json; charset=utf-8'
+
+
+def test_adaptor_parse_format_invalid_value():
+ adaptor = FakeAdaptor(params={'format': '@!#'})
+
+ with pytest.raises(FakeError, match='^400 -- .*must be one of'):
+ glue.parse_format(adaptor, napi.StatusResult, 'text')
+
+
+# ASGIAdaptor.get_accepted_languages()
+
+def test_accepted_languages_from_param():
+ a = FakeAdaptor(params={'accept-language': 'de'})
+ assert glue.get_accepted_languages(a) == 'de'
+
+
+def test_accepted_languages_from_header():
+ a = FakeAdaptor(headers={'accept-language': 'de'})
+ assert glue.get_accepted_languages(a) == 'de'
+
+
+def test_accepted_languages_from_default(monkeypatch):
+ monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'de')
+ a = FakeAdaptor()
+ assert glue.get_accepted_languages(a) == 'de'
+
+
+def test_accepted_languages_param_over_header():
+ a = FakeAdaptor(params={'accept-language': 'de'},
+ headers={'accept-language': 'en'})
+ assert glue.get_accepted_languages(a) == 'de'
+
+
+def test_accepted_languages_header_over_default(monkeypatch):
+ monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'en')
+ a = FakeAdaptor(headers={'accept-language': 'de'})
+ assert glue.get_accepted_languages(a) == 'de'
+
+
+# ASGIAdaptor.raise_error()
+
+class TestAdaptorRaiseError:
+
+ @pytest.fixture(autouse=True)
+ def init_adaptor(self):
+ self.adaptor = FakeAdaptor()
+ glue.setup_debugging(self.adaptor)
+
+ def run_raise_error(self, msg, status):
+ with pytest.raises(FakeError) as excinfo:
+ self.adaptor.raise_error(msg, status=status)
+
+ return excinfo.value
+
+
+ def test_without_content_set(self):
+ err = self.run_raise_error('TEST', 404)
+
+ assert self.adaptor.content_type == 'text/plain; charset=utf-8'
+ assert err.msg == 'ERROR 404: TEST'
+ assert err.status == 404
+
+
+ def test_json(self):
+ self.adaptor.content_type = 'application/json; charset=utf-8'
+
+ err = self.run_raise_error('TEST', 501)
+
+ content = json.loads(err.msg)['error']
+ assert content['code'] == 501
+ assert content['message'] == 'TEST'
+
+
+ def test_xml(self):
+ self.adaptor.content_type = 'text/xml; charset=utf-8'
+
+ err = self.run_raise_error('this!', 503)
+
+ content = ET.fromstring(err.msg)
+
+ assert content.tag == 'error'
+ assert content.find('code').text == '503'
+ assert content.find('message').text == 'this!'
+
+
+def test_raise_error_during_debug():
+ a = FakeAdaptor(params={'debug': '1'})
+ glue.setup_debugging(a)
+ loglib.log().section('Ongoing')
+
+ with pytest.raises(FakeError) as excinfo:
+ a.raise_error('badstate')
+
+ content = ET.fromstring(excinfo.value.msg)
+
+ assert content.tag == 'html'
+
+ assert '>Ongoing<' in excinfo.value.msg
+ assert 'badstate' in excinfo.value.msg
+
+
+# ASGIAdaptor.build_response
+
+def test_build_response_without_content_type():
+ resp = glue.build_response(FakeAdaptor(), 'attention')
+
+ assert isinstance(resp, FakeResponse)
+ assert resp.status == 200
+ assert resp.output == 'attention'
+ assert resp.content_type == 'text/plain; charset=utf-8'
+
+
+def test_build_response_with_status():
+ a = FakeAdaptor(params={'format': 'json'})
+ glue.parse_format(a, napi.StatusResult, 'text')
+
+ resp = glue.build_response(a, 'stuff\nmore stuff', status=404)
+
+ assert isinstance(resp, FakeResponse)
+ assert resp.status == 404
+ assert resp.output == 'stuff\nmore stuff'
+ assert resp.content_type == 'application/json; charset=utf-8'
+
+
+def test_build_response_jsonp_with_json():
+ a = FakeAdaptor(params={'format': 'json', 'json_callback': 'test.func'})
+ glue.parse_format(a, napi.StatusResult, 'text')
+
+ resp = glue.build_response(a, '{}')
+
+ assert isinstance(resp, FakeResponse)
+ assert resp.status == 200
+ assert resp.output == 'test.func({})'
+ assert resp.content_type == 'application/javascript; charset=utf-8'
+
+
+def test_build_response_jsonp_without_json():
+ a = FakeAdaptor(params={'format': 'text', 'json_callback': 'test.func'})
+ glue.parse_format(a, napi.StatusResult, 'text')
+
+ resp = glue.build_response(a, '{}')
+
+ assert isinstance(resp, FakeResponse)
+ assert resp.status == 200
+ assert resp.output == '{}'
+ assert resp.content_type == 'text/plain; charset=utf-8'
+
+
+@pytest.mark.parametrize('param', ['alert(); func', '\\n', '', 'a b'])
+def test_build_response_jsonp_bad_format(param):
+ a = FakeAdaptor(params={'format': 'json', 'json_callback': param})
+ glue.parse_format(a, napi.StatusResult, 'text')
+
+ with pytest.raises(FakeError, match='^400 -- .*Invalid'):
+ glue.build_response(a, '{}')
+
+
+# status_endpoint()
+
+class TestStatusEndpoint:
+
+ @pytest.fixture(autouse=True)
+ def patch_status_func(self, monkeypatch):
+ async def _status(*args, **kwargs):
+ return self.status
+
+ monkeypatch.setattr(napi.NominatimAPIAsync, 'status', _status)
+
+
+ @pytest.mark.asyncio
+ async def test_status_without_params(self):
+ a = FakeAdaptor()
+ self.status = napi.StatusResult(0, 'foo')
+
+ resp = await glue.status_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert isinstance(resp, FakeResponse)
+ assert resp.status == 200
+ assert resp.content_type == 'text/plain; charset=utf-8'
+
+
+ @pytest.mark.asyncio
+ async def test_status_with_error(self):
+ a = FakeAdaptor()
+ self.status = napi.StatusResult(405, 'foo')
+
+ resp = await glue.status_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert isinstance(resp, FakeResponse)
+ assert resp.status == 500
+ assert resp.content_type == 'text/plain; charset=utf-8'
+
+
+ @pytest.mark.asyncio
+ async def test_status_json_with_error(self):
+ a = FakeAdaptor(params={'format': 'json'})
+ self.status = napi.StatusResult(405, 'foo')
+
+ resp = await glue.status_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert isinstance(resp, FakeResponse)
+ assert resp.status == 200
+ assert resp.content_type == 'application/json; charset=utf-8'
+
+
+ @pytest.mark.asyncio
+ async def test_status_bad_format(self):
+ a = FakeAdaptor(params={'format': 'foo'})
+ self.status = napi.StatusResult(0, 'foo')
+
+ with pytest.raises(FakeError):
+ await glue.status_endpoint(napi.NominatimAPIAsync(), a)
+
+
+# details_endpoint()
+
+class TestDetailsEndpoint:
+
+ @pytest.fixture(autouse=True)
+ def patch_lookup_func(self, monkeypatch):
+ self.result = napi.DetailedResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))
+ self.lookup_args = []
+
+ async def _lookup(*args, **kwargs):
+ self.lookup_args.extend(args[1:])
+ return self.result
+
+ monkeypatch.setattr(napi.NominatimAPIAsync, 'details', _lookup)
+
+
+ @pytest.mark.asyncio
+ async def test_details_no_params(self):
+ a = FakeAdaptor()
+
+ with pytest.raises(FakeError, match='^400 -- .*Missing'):
+ await glue.details_endpoint(napi.NominatimAPIAsync(), a)
+
+
+ @pytest.mark.asyncio
+ async def test_details_by_place_id(self):
+ a = FakeAdaptor(params={'place_id': '4573'})
+
+ await glue.details_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert self.lookup_args[0].place_id == 4573
+
+
+ @pytest.mark.asyncio
+ async def test_details_by_osm_id(self):
+ a = FakeAdaptor(params={'osmtype': 'N', 'osmid': '45'})
+
+ await glue.details_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert self.lookup_args[0].osm_type == 'N'
+ assert self.lookup_args[0].osm_id == 45
+ assert self.lookup_args[0].osm_class is None
+
+
+ @pytest.mark.asyncio
+ async def test_details_with_debugging(self):
+ a = FakeAdaptor(params={'osmtype': 'N', 'osmid': '45', 'debug': '1'})
+
+ resp = await glue.details_endpoint(napi.NominatimAPIAsync(), a)
+ content = ET.fromstring(resp.output)
+
+ assert resp.content_type == 'text/html; charset=utf-8'
+ assert content.tag == 'html'
+
+
+ @pytest.mark.asyncio
+ async def test_details_no_result(self):
+ a = FakeAdaptor(params={'place_id': '4573'})
+ self.result = None
+
+ with pytest.raises(FakeError, match='^404 -- .*found'):
+ await glue.details_endpoint(napi.NominatimAPIAsync(), a)
+
+
+# reverse_endpoint()
+class TestReverseEndPoint:
+
+ @pytest.fixture(autouse=True)
+ def patch_reverse_func(self, monkeypatch):
+ self.result = napi.ReverseResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))
+ async def _reverse(*args, **kwargs):
+ return self.result
+
+ monkeypatch.setattr(napi.NominatimAPIAsync, 'reverse', _reverse)
+
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize('params', [{}, {'lat': '3.4'}, {'lon': '6.7'}])
+ async def test_reverse_no_params(self, params):
+ a = FakeAdaptor()
+ a.params = params
+ a.params['format'] = 'xml'
+
+ with pytest.raises(FakeError, match='^400 -- (?s:.*)missing'):
+ await glue.reverse_endpoint(napi.NominatimAPIAsync(), a)
+
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize('params', [{'lat': '45.6', 'lon': '4563'}])
+ async def test_reverse_success(self, params):
+ a = FakeAdaptor()
+ a.params = params
+ a.params['format'] = 'json'
+
+ res = await glue.reverse_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert res == ''
+
+
+ @pytest.mark.asyncio
+ async def test_reverse_success(self):
+ a = FakeAdaptor()
+ a.params['lat'] = '56.3'
+ a.params['lon'] = '6.8'
+
+ assert await glue.reverse_endpoint(napi.NominatimAPIAsync(), a)
+
+
+ @pytest.mark.asyncio
+ async def test_reverse_from_search(self):
+ a = FakeAdaptor()
+ a.params['q'] = '34.6 2.56'
+ a.params['format'] = 'json'
+
+ res = await glue.search_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == 1
+
+
+# lookup_endpoint()
+
+class TestLookupEndpoint:
+
+ @pytest.fixture(autouse=True)
+ def patch_lookup_func(self, monkeypatch):
+ self.results = [napi.SearchResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))]
+ async def _lookup(*args, **kwargs):
+ return napi.SearchResults(self.results)
+
+ monkeypatch.setattr(napi.NominatimAPIAsync, 'lookup', _lookup)
+
+
+ @pytest.mark.asyncio
+ async def test_lookup_no_params(self):
+ a = FakeAdaptor()
+ a.params['format'] = 'json'
+
+ res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert res.output == '[]'
+
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize('param', ['w', 'bad', ''])
+ async def test_lookup_bad_params(self, param):
+ a = FakeAdaptor()
+ a.params['format'] = 'json'
+ a.params['osm_ids'] = f'W34,{param},N33333'
+
+ res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == 1
+
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize('param', ['p234234', '4563'])
+ async def test_lookup_bad_osm_type(self, param):
+ a = FakeAdaptor()
+ a.params['format'] = 'json'
+ a.params['osm_ids'] = f'W34,{param},N33333'
+
+ res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == 1
+
+
+ @pytest.mark.asyncio
+ async def test_lookup_working(self):
+ a = FakeAdaptor()
+ a.params['format'] = 'json'
+ a.params['osm_ids'] = 'N23,W34'
+
+ res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == 1
+
+
+# search_endpoint()
+
+class TestSearchEndPointSearch:
+
+ @pytest.fixture(autouse=True)
+ def patch_lookup_func(self, monkeypatch):
+ self.results = [napi.SearchResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))]
+ async def _search(*args, **kwargs):
+ return napi.SearchResults(self.results)
+
+ monkeypatch.setattr(napi.NominatimAPIAsync, 'search', _search)
+
+
+ @pytest.mark.asyncio
+ async def test_search_free_text(self):
+ a = FakeAdaptor()
+ a.params['q'] = 'something'
+
+ res = await glue.search_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == 1
+
+
+ @pytest.mark.asyncio
+ async def test_search_free_text_xml(self):
+ a = FakeAdaptor()
+ a.params['q'] = 'something'
+ a.params['format'] = 'xml'
+
+ res = await glue.search_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert res.status == 200
+ assert res.output.index('something') > 0
+
+
+ @pytest.mark.asyncio
+ async def test_search_free_and_structured(self):
+ a = FakeAdaptor()
+ a.params['q'] = 'something'
+ a.params['city'] = 'ignored'
+
+ with pytest.raises(FakeError, match='^400 -- .*cannot be used together'):
+ res = await glue.search_endpoint(napi.NominatimAPIAsync(), a)
+
+
+ @pytest.mark.asyncio
+ @pytest.mark.parametrize('dedupe,numres', [(True, 1), (False, 2)])
+ async def test_search_dedupe(self, dedupe, numres):
+ self.results = self.results * 2
+ a = FakeAdaptor()
+ a.params['q'] = 'something'
+ if not dedupe:
+ a.params['dedupe'] = '0'
+
+ res = await glue.search_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == numres
+
+
+class TestSearchEndPointSearchAddress:
+
+ @pytest.fixture(autouse=True)
+ def patch_lookup_func(self, monkeypatch):
+ self.results = [napi.SearchResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))]
+ async def _search(*args, **kwargs):
+ return napi.SearchResults(self.results)
+
+ monkeypatch.setattr(napi.NominatimAPIAsync, 'search_address', _search)
+
+
+ @pytest.mark.asyncio
+ async def test_search_structured(self):
+ a = FakeAdaptor()
+ a.params['street'] = 'something'
+
+ res = await glue.search_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == 1
+
+
+class TestSearchEndPointSearchCategory:
+
+ @pytest.fixture(autouse=True)
+ def patch_lookup_func(self, monkeypatch):
+ self.results = [napi.SearchResult(napi.SourceTable.PLACEX,
+ ('place', 'thing'),
+ napi.Point(1.0, 2.0))]
+ async def _search(*args, **kwargs):
+ return napi.SearchResults(self.results)
+
+ monkeypatch.setattr(napi.NominatimAPIAsync, 'search_category', _search)
+
+
+ @pytest.mark.asyncio
+ async def test_search_category(self):
+ a = FakeAdaptor()
+ a.params['q'] = '[shop=fog]'
+
+ res = await glue.search_endpoint(napi.NominatimAPIAsync(), a)
+
+ assert len(json.loads(res.output)) == 1
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for warm-up CLI function.
+"""
+import pytest
+
+import nominatim_db.cli
+
+@pytest.fixture(autouse=True)
+def setup_database_with_context(apiobj, table_factory):
+ table_factory('word',
+ definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB',
+ content=[(55, 'test', 'W', 'test', None),
+ (2, 'test', 'w', 'test', None)])
+
+ apiobj.add_data('properties',
+ [{'property': 'tokenizer', 'value': 'icu'},
+ {'property': 'tokenizer_import_normalisation', 'value': ':: lower();'},
+ {'property': 'tokenizer_import_transliteration', 'value': "'1' > '/1/'; 'ä' > 'ä '"},
+ ])
+
+
+@pytest.mark.parametrize('args', [['--search-only'], ['--reverse-only']])
+def test_warm_all(tmp_path, args):
+ assert 0 == nominatim_db.cli.nominatim(osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
+ cli_args=['admin', '--project-dir', str(tmp_path),
+ '--warm'] + args)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
import pytest
-import nominatim.cli
+import nominatim_db.cli
class MockParamCapture:
""" Mock that records the parameters with which a function was called
return self.return_value
+class AsyncMockParamCapture:
+ """ Mock that records the parameters with which a function was called
+ as well as the number of calls.
+ """
+ def __init__(self, retval=0):
+ self.called = 0
+ self.return_value = retval
+ self.last_args = None
+ self.last_kwargs = None
+
+ async def __call__(self, *args, **kwargs):
+ self.called += 1
+ self.last_args = args
+ self.last_kwargs = kwargs
+ return self.return_value
+
+
class DummyTokenizer:
def __init__(self, *args, **kwargs):
self.update_sql_functions_called = False
self.update_statistics_called = False
self.update_word_tokens_called = False
- def update_sql_functions(self, *args):
+ def update_sql_functions(self, *args, **kwargs):
self.update_sql_functions_called = True
- def finalize_import(self, *args):
+ def finalize_import(self, *args, **kwargs):
self.finalize_import_called = True
- def update_statistics(self):
+ def update_statistics(self, *args, **kwargs):
self.update_statistics_called = True
- def update_word_tokens(self):
+ def update_word_tokens(self, *args, **kwargs):
self.update_word_tokens_called = True
@pytest.fixture
-def cli_call(src_dir):
+def cli_call():
""" Call the nominatim main function with the correct paths set.
Returns a function that can be called with the desired CLI arguments.
"""
def _call_nominatim(*args):
- return nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
- osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
- phplib_dir=str(src_dir / 'lib-php'),
- data_dir=str(src_dir / 'data'),
- phpcgi_path='/usr/bin/php-cgi',
- sqllib_dir=str(src_dir / 'lib-sql'),
- config_dir=str(src_dir / 'settings'),
- cli_args=args)
+ return nominatim_db.cli.nominatim(osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
+ cli_args=args)
return _call_nominatim
@pytest.fixture
-def mock_run_legacy(monkeypatch):
- mock = MockParamCapture()
- monkeypatch.setattr(nominatim.cli, 'run_legacy_script', mock)
- return mock
+def mock_func_factory(monkeypatch):
+ def get_mock(module, func):
+ mock = MockParamCapture()
+ mock.func_name = func
+ monkeypatch.setattr(module, func, mock)
+ return mock
+
+ return get_mock
@pytest.fixture
-def mock_func_factory(monkeypatch):
+def async_mock_func_factory(monkeypatch):
def get_mock(module, func):
- mock = MockParamCapture()
+ mock = AsyncMockParamCapture()
mock.func_name = func
monkeypatch.setattr(module, func, mock)
return mock
@pytest.fixture
def cli_tokenizer_mock(monkeypatch):
tok = DummyTokenizer()
- monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
+ monkeypatch.setattr(nominatim_db.tokenizer.factory, 'get_tokenizer_for_db',
lambda *args: tok)
- monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
+ monkeypatch.setattr(nominatim_db.tokenizer.factory, 'create_tokenizer',
lambda *args: tok)
return tok
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for command line interface wrapper.
These tests just check that the various command line parameters route to the
-correct functionionality. They use a lot of monkeypatching to avoid executing
+correct functionality. They use a lot of monkeypatching to avoid executing
the actual functions.
"""
+import importlib
import pytest
-import nominatim.indexer.indexer
-import nominatim.tools.add_osm_data
-import nominatim.tools.freeze
+import nominatim_db.indexer.indexer
+import nominatim_db.tools.add_osm_data
+import nominatim_db.tools.freeze
+import nominatim_db.tools.tiger_data
def test_cli_help(cli_call, capsys):
captured = capsys.readouterr()
assert captured.out.startswith('Nominatim version')
-@pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc')])
-def test_cli_add_data_file_command(cli_call, mock_func_factory, name, oid):
- mock_run_legacy = mock_func_factory(nominatim.tools.add_osm_data, 'add_data_from_file')
- assert cli_call('add-data', '--' + name, str(oid)) == 0
- assert mock_run_legacy.called == 1
-
-
-@pytest.mark.parametrize("name,oid", [('node', 12), ('way', 8), ('relation', 32)])
-def test_cli_add_data_object_command(cli_call, mock_func_factory, name, oid):
- mock_run_legacy = mock_func_factory(nominatim.tools.add_osm_data, 'add_osm_object')
- assert cli_call('add-data', '--' + name, str(oid)) == 0
-
- assert mock_run_legacy.called == 1
-
-
-
-def test_cli_add_data_tiger_data(cli_call, cli_tokenizer_mock, mock_func_factory):
- mock = mock_func_factory(nominatim.tools.tiger_data, 'add_tiger_data')
-
- assert cli_call('add-data', '--tiger-data', 'somewhere') == 0
-
- assert mock.called == 1
-
-
-def test_cli_serve_command(cli_call, mock_func_factory):
- func = mock_func_factory(nominatim.cli, 'run_php_server')
-
- cli_call('serve') == 0
+class TestCliWithDb:
- assert func.called == 1
+ @pytest.fixture(autouse=True)
+ def setup_cli_call(self, cli_call, temp_db, cli_tokenizer_mock, table_factory):
+ self.call_nominatim = cli_call
+ self.tokenizer_mock = cli_tokenizer_mock
+ # Make sure tools.freeze.is_frozen doesn't report database as frozen. Monkeypatching failed
+ table_factory('place')
-def test_cli_export_command(cli_call, mock_run_legacy):
- assert cli_call('export', '--output-all-postcodes') == 0
+ @pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc')])
+ def test_cli_add_data_file_command(self, cli_call, mock_func_factory, name, oid):
+ mock_run_legacy = mock_func_factory(nominatim_db.tools.add_osm_data, 'add_data_from_file')
+ assert cli_call('add-data', '--' + name, str(oid)) == 0
- assert mock_run_legacy.called == 1
- assert mock_run_legacy.last_args[0] == 'export.php'
+ assert mock_run_legacy.called == 1
-@pytest.mark.parametrize("param,value", [('output-type', 'country'),
- ('output-format', 'street;city'),
- ('language', 'xf'),
- ('restrict-to-country', 'us'),
- ('restrict-to-osm-node', '536'),
- ('restrict-to-osm-way', '727'),
- ('restrict-to-osm-relation', '197532')
- ])
-def test_export_parameters(src_dir, tmp_path, param, value):
- (tmp_path / 'admin').mkdir()
- (tmp_path / 'admin' / 'export.php').write_text(f"""<?php
- exit(strpos(implode(' ', $_SERVER['argv']), '--{param} {value}') >= 0 ? 0 : 10);
- """)
+ @pytest.mark.parametrize("name,oid", [('node', 12), ('way', 8), ('relation', 32)])
+ def test_cli_add_data_object_command(self, cli_call, mock_func_factory, name, oid):
+ mock_run_legacy = mock_func_factory(nominatim_db.tools.add_osm_data, 'add_osm_object')
+ assert cli_call('add-data', '--' + name, str(oid)) == 0
- assert nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
- osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
- phplib_dir=str(tmp_path),
- data_dir=str(src_dir / 'data'),
- phpcgi_path='/usr/bin/php-cgi',
- sqllib_dir=str(src_dir / 'lib-sql'),
- config_dir=str(src_dir / 'settings'),
- cli_args=['export', '--' + param, value]) == 0
+ assert mock_run_legacy.called == 1
-class TestCliWithDb:
+ def test_cli_add_data_tiger_data(self, cli_call, cli_tokenizer_mock, async_mock_func_factory):
+ mock = async_mock_func_factory(nominatim_db.tools.tiger_data, 'add_tiger_data')
- @pytest.fixture(autouse=True)
- def setup_cli_call(self, cli_call, temp_db, cli_tokenizer_mock):
- self.call_nominatim = cli_call
- self.tokenizer_mock = cli_tokenizer_mock
+ assert cli_call('add-data', '--tiger-data', 'somewhere') == 0
+ assert mock.called == 1
def test_freeze_command(self, mock_func_factory):
- mock_drop = mock_func_factory(nominatim.tools.freeze, 'drop_update_tables')
- mock_flatnode = mock_func_factory(nominatim.tools.freeze, 'drop_flatnode_file')
+ mock_drop = mock_func_factory(nominatim_db.tools.freeze, 'drop_update_tables')
+ mock_flatnode = mock_func_factory(nominatim_db.tools.freeze, 'drop_flatnode_file')
assert self.call_nominatim('freeze') == 0
@pytest.mark.parametrize("params,do_bnds,do_ranks", [
- ([], 1, 1),
- (['--boundaries-only'], 1, 0),
- (['--no-boundaries'], 0, 1),
+ ([], 2, 2),
+ (['--boundaries-only'], 2, 0),
+ (['--no-boundaries'], 0, 2),
(['--boundaries-only', '--no-boundaries'], 0, 0)])
- def test_index_command(self, mock_func_factory, table_factory,
+ def test_index_command(self, monkeypatch, async_mock_func_factory, table_factory,
params, do_bnds, do_ranks):
table_factory('import_status', 'indexed bool')
- bnd_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_boundaries')
- rank_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_by_rank')
+ bnd_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_boundaries')
+ rank_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_by_rank')
+ postcode_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_postcodes')
+
+ monkeypatch.setattr(nominatim_db.indexer.indexer.Indexer, 'has_pending',
+ [False, True].pop)
assert self.call_nominatim('index', *params) == 0
assert bnd_mock.called == do_bnds
assert rank_mock.called == do_ranks
+ assert postcode_mock.called == do_ranks
def test_special_phrases_wiki_command(self, mock_func_factory):
- func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
+ func = mock_func_factory(nominatim_db.clicmd.special_phrases.SPImporter, 'import_phrases')
self.call_nominatim('special-phrases', '--import-from-wiki', '--no-replace')
def test_special_phrases_csv_command(self, src_dir, mock_func_factory):
- func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
+ func = mock_func_factory(nominatim_db.clicmd.special_phrases.SPImporter, 'import_phrases')
testdata = src_dir / 'test' / 'testdb'
csv_path = str((testdata / 'full_en_phrases_test.csv').resolve())
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Test for the command line interface wrapper admin subcommand.
These tests just check that the various command line parameters route to the
-correct functionionality. They use a lot of monkeypatching to avoid executing
+correct functionality. They use a lot of monkeypatching to avoid executing
the actual functions.
"""
import pytest
-import nominatim.tools.admin
-import nominatim.tools.check_database
-import nominatim.tools.migration
-import nominatim.clicmd.admin
-
-
-@pytest.mark.parametrize("params", [('--warm', ),
- ('--warm', '--reverse-only'),
- ('--warm', '--search-only')])
-def test_admin_command_legacy(cli_call, mock_func_factory, params):
- mock_run_legacy = mock_func_factory(nominatim.clicmd.admin, 'run_legacy_script')
-
- assert cli_call('admin', *params) == 0
-
- assert mock_run_legacy.called == 1
+import nominatim_db.tools.admin
+import nominatim_db.tools.check_database
+import nominatim_db.tools.migration
+import nominatim_db.clicmd.admin
def test_admin_command_check_database(cli_call, mock_func_factory):
- mock = mock_func_factory(nominatim.tools.check_database, 'check_database')
+ mock = mock_func_factory(nominatim_db.tools.check_database, 'check_database')
assert cli_call('admin', '--check-database') == 0
assert mock.called == 1
def test_admin_migrate(cli_call, mock_func_factory):
- mock = mock_func_factory(nominatim.tools.migration, 'migrate')
+ mock = mock_func_factory(nominatim_db.tools.migration, 'migrate')
assert cli_call('admin', '--migrate') == 0
assert mock.called == 1
+def test_admin_clean_deleted_relations(cli_call, mock_func_factory):
+ mock = mock_func_factory(nominatim_db.tools.admin, 'clean_deleted_relations')
+
+ assert cli_call('admin', '--clean-deleted', '1 month') == 0
+ assert mock.called == 1
+
+def test_admin_clean_deleted_relations_no_age(cli_call, mock_func_factory):
+ mock = mock_func_factory(nominatim_db.tools.admin, 'clean_deleted_relations')
+
+ assert cli_call('admin', '--clean-deleted') == 1
+
class TestCliAdminWithDb:
@pytest.fixture(autouse=True)
@pytest.mark.parametrize("func, params", [('analyse_indexing', ('--analyse-indexing', ))])
def test_analyse_indexing(self, mock_func_factory, func, params):
- mock = mock_func_factory(nominatim.tools.admin, func)
+ mock = mock_func_factory(nominatim_db.tools.admin, func)
assert self.call_nominatim('admin', *params) == 0
assert mock.called == 1
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for API access commands of command-line interface wrapper.
"""
+import json
import pytest
-import nominatim.clicmd.api
+import nominatim_db.clicmd.api
+import nominatim_api as napi
+@pytest.mark.parametrize('call', ['search', 'reverse', 'lookup', 'details', 'status'])
+def test_list_format(cli_call, call):
+ assert 0 == cli_call(call, '--list-formats')
-@pytest.mark.parametrize("endpoint", (('search', 'reverse', 'lookup', 'details', 'status')))
-def test_no_api_without_phpcgi(src_dir, endpoint):
- assert nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
- osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
- phplib_dir=str(src_dir / 'lib-php'),
- data_dir=str(src_dir / 'data'),
- phpcgi_path=None,
- sqllib_dir=str(src_dir / 'lib-sql'),
- config_dir=str(src_dir / 'settings'),
- cli_args=[endpoint]) == 1
+@pytest.mark.parametrize('call', ['search', 'reverse', 'lookup', 'details', 'status'])
+def test_bad_format(cli_call, call):
+ assert 1 == cli_call(call, '--format', 'rsdfsdfsdfsaefsdfsd')
-@pytest.mark.parametrize("params", [('search', '--query', 'new'),
- ('search', '--city', 'Berlin'),
- ('reverse', '--lat', '0', '--lon', '0', '--zoom', '13'),
- ('lookup', '--id', 'N1'),
- ('details', '--node', '1'),
- ('details', '--way', '1'),
- ('details', '--relation', '1'),
- ('details', '--place_id', '10001'),
- ('status',)])
-class TestCliApiCall:
- @pytest.fixture(autouse=True)
- def setup_cli_call(self, cli_call):
- self.call_nominatim = cli_call
-
- def test_api_commands_simple(self, mock_func_factory, params, tmp_path):
- (tmp_path / 'website').mkdir()
- (tmp_path / 'website' / (params[0] + '.php')).write_text('')
- mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
-
- assert self.call_nominatim(*params, '--project-dir', str(tmp_path)) == 0
+class TestCliStatusCall:
- assert mock_run_api.called == 1
- assert mock_run_api.last_args[0] == params[0]
+ @pytest.fixture(autouse=True)
+ def setup_status_mock(self, monkeypatch):
+ monkeypatch.setattr(napi.NominatimAPI, 'status',
+ lambda self: napi.StatusResult(200, 'OK'))
- def test_bad_project_idr(self, mock_func_factory, params):
- mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
+ def test_status_simple(self, cli_call, tmp_path):
+ result = cli_call('status', '--project-dir', str(tmp_path))
- assert self.call_nominatim(*params) == 1
+ assert result == 0
-QUERY_PARAMS = {
- 'search': ('--query', 'somewhere'),
- 'reverse': ('--lat', '20', '--lon', '30'),
- 'lookup': ('--id', 'R345345'),
- 'details': ('--node', '324')
-}
-@pytest.mark.parametrize("endpoint", (('search', 'reverse', 'lookup')))
-class TestCliApiCommonParameters:
+ def test_status_json_format(self, cli_call, tmp_path, capsys):
+ result = cli_call('status', '--project-dir', str(tmp_path),
+ '--format', 'json')
- @pytest.fixture(autouse=True)
- def setup_website_dir(self, cli_call, project_env, endpoint):
- self.endpoint = endpoint
- self.cli_call = cli_call
- self.project_dir = project_env.project_dir
- (self.project_dir / 'website').mkdir()
+ assert result == 0
+ json.loads(capsys.readouterr().out)
- def expect_param(self, param, expected):
- (self.project_dir / 'website' / (self.endpoint + '.php')).write_text(f"""<?php
- exit($_GET['{param}'] == '{expected}' ? 0 : 10);
- """)
+class TestCliDetailsCall:
- def call_nominatim(self, *params):
- return self.cli_call(self.endpoint, *QUERY_PARAMS[self.endpoint],
- '--project-dir', str(self.project_dir), *params)
-
+ @pytest.fixture(autouse=True)
+ def setup_status_mock(self, monkeypatch):
+ result = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'),
+ napi.Point(1.0, -3.0))
- def test_param_output(self):
- self.expect_param('format', 'xml')
- assert self.call_nominatim('--format', 'xml') == 0
+ monkeypatch.setattr(napi.NominatimAPI, 'details',
+ lambda *args, **kwargs: result)
+ @pytest.mark.parametrize("params", [('--node', '1'),
+ ('--way', '1'),
+ ('--relation', '1'),
+ ('--place_id', '10001')])
- def test_param_lang(self):
- self.expect_param('accept-language', 'de')
- assert self.call_nominatim('--lang', 'de') == 0
- assert self.call_nominatim('--accept-language', 'de') == 0
+ def test_details_json_format(self, cli_call, tmp_path, capsys, params):
+ result = cli_call('details', '--project-dir', str(tmp_path), *params)
+ assert result == 0
- @pytest.mark.parametrize("param", ('addressdetails', 'extratags', 'namedetails'))
- def test_param_extradata(self, param):
- self.expect_param(param, '1')
+ json.loads(capsys.readouterr().out)
- assert self.call_nominatim('--' + param) == 0
- def test_param_polygon_output(self):
- self.expect_param('polygon_geojson', '1')
+class TestCliReverseCall:
- assert self.call_nominatim('--polygon-output', 'geojson') == 0
+ @pytest.fixture(autouse=True)
+ def setup_reverse_mock(self, monkeypatch):
+ result = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'),
+ napi.Point(1.0, -3.0),
+ names={'name':'Name', 'name:fr': 'Nom'},
+ extratags={'extra':'Extra'},
+ locale_name='Name',
+ display_name='Name')
+ monkeypatch.setattr(napi.NominatimAPI, 'reverse',
+ lambda *args, **kwargs: result)
- def test_param_polygon_threshold(self):
- self.expect_param('polygon_threshold', '0.3452')
- assert self.call_nominatim('--polygon-threshold', '0.3452') == 0
+ def test_reverse_simple(self, cli_call, tmp_path, capsys):
+ result = cli_call('reverse', '--project-dir', str(tmp_path),
+ '--lat', '34', '--lon', '34')
+ assert result == 0
-def test_cli_search_param_bounded(cli_call, project_env):
- webdir = project_env.project_dir / 'website'
- webdir.mkdir()
- (webdir / 'search.php').write_text(f"""<?php
- exit($_GET['bounded'] == '1' ? 0 : 10);
- """)
+ out = json.loads(capsys.readouterr().out)
+ assert out['name'] == 'Name'
+ assert 'address' not in out
+ assert 'extratags' not in out
+ assert 'namedetails' not in out
- assert cli_call('search', *QUERY_PARAMS['search'], '--project-dir', str(project_env.project_dir),
- '--bounded') == 0
+ @pytest.mark.parametrize('param,field', [('--addressdetails', 'address'),
+ ('--extratags', 'extratags'),
+ ('--namedetails', 'namedetails')])
+ def test_reverse_extra_stuff(self, cli_call, tmp_path, capsys, param, field):
+ result = cli_call('reverse', '--project-dir', str(tmp_path),
+ '--lat', '34', '--lon', '34', param)
-def test_cli_search_param_dedupe(cli_call, project_env):
- webdir = project_env.project_dir / 'website'
- webdir.mkdir()
- (webdir / 'search.php').write_text(f"""<?php
- exit($_GET['dedupe'] == '0' ? 0 : 10);
- """)
+ assert result == 0
- assert cli_call('search', *QUERY_PARAMS['search'], '--project-dir', str(project_env.project_dir),
- '--no-dedupe') == 0
+ out = json.loads(capsys.readouterr().out)
+ assert field in out
-def test_cli_details_param_class(cli_call, project_env):
- webdir = project_env.project_dir / 'website'
- webdir.mkdir()
- (webdir / 'details.php').write_text(f"""<?php
- exit($_GET['class'] == 'highway' ? 0 : 10);
- """)
+ def test_reverse_format(self, cli_call, tmp_path, capsys):
+ result = cli_call('reverse', '--project-dir', str(tmp_path),
+ '--lat', '34', '--lon', '34', '--format', 'geojson')
- assert cli_call('details', *QUERY_PARAMS['details'], '--project-dir', str(project_env.project_dir),
- '--class', 'highway') == 0
+ assert result == 0
+ out = json.loads(capsys.readouterr().out)
+ assert out['type'] == 'FeatureCollection'
-@pytest.mark.parametrize('param', ('lang', 'accept-language'))
-def test_cli_details_param_lang(cli_call, project_env, param):
- webdir = project_env.project_dir / 'website'
- webdir.mkdir()
- (webdir / 'details.php').write_text(f"""<?php
- exit($_GET['accept-language'] == 'es' ? 0 : 10);
- """)
- assert cli_call('details', *QUERY_PARAMS['details'], '--project-dir', str(project_env.project_dir),
- '--' + param, 'es') == 0
+class TestCliLookupCall:
+ @pytest.fixture(autouse=True)
+ def setup_lookup_mock(self, monkeypatch):
+ result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'),
+ napi.Point(1.0, -3.0),
+ names={'name':'Name', 'name:fr': 'Nom'},
+ extratags={'extra':'Extra'},
+ locale_name='Name',
+ display_name='Name')
+
+ monkeypatch.setattr(napi.NominatimAPI, 'lookup',
+ lambda *args, **kwargs: napi.SearchResults([result]))
+
+ def test_lookup_simple(self, cli_call, tmp_path, capsys):
+ result = cli_call('lookup', '--project-dir', str(tmp_path),
+ '--id', 'N34')
+
+ assert result == 0
+
+ out = json.loads(capsys.readouterr().out)
+ assert len(out) == 1
+ assert out[0]['name'] == 'Name'
+ assert 'address' not in out[0]
+ assert 'extratags' not in out[0]
+ assert 'namedetails' not in out[0]
+
+
+@pytest.mark.parametrize('endpoint, params', [('search', ('--query', 'Berlin')),
+ ('search_address', ('--city', 'Berlin'))
+ ])
+def test_search(cli_call, tmp_path, capsys, monkeypatch, endpoint, params):
+ result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'),
+ napi.Point(1.0, -3.0),
+ names={'name':'Name', 'name:fr': 'Nom'},
+ extratags={'extra':'Extra'},
+ locale_name='Name',
+ display_name='Name')
+
+ monkeypatch.setattr(napi.NominatimAPI, endpoint,
+ lambda *args, **kwargs: napi.SearchResults([result]))
+
+
+ result = cli_call('search', '--project-dir', str(tmp_path), *params)
+
+ assert result == 0
+
+ out = json.loads(capsys.readouterr().out)
+ assert len(out) == 1
+ assert out[0]['name'] == 'Name'
+ assert 'address' not in out[0]
+ assert 'extratags' not in out[0]
+ assert 'namedetails' not in out[0]
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-2.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for import command of the command-line interface wrapper.
"""
import pytest
-import nominatim.tools.database_import
-import nominatim.data.country_info
-import nominatim.tools.refresh
-import nominatim.tools.postcodes
-import nominatim.indexer.indexer
-import nominatim.db.properties
+import nominatim_db.tools.database_import
+import nominatim_db.data.country_info
+import nominatim_db.tools.refresh
+import nominatim_db.tools.postcodes
+import nominatim_db.indexer.indexer
+import nominatim_db.db.properties
class TestCliImportWithDb:
@pytest.mark.parametrize('with_updates', [True, False])
- def test_import_full(self, mock_func_factory, with_updates, place_table, property_table):
+ def test_import_full(self, mock_func_factory, async_mock_func_factory,
+ with_updates, place_table, property_table):
mocks = [
- mock_func_factory(nominatim.tools.database_import, 'setup_database_skeleton'),
- mock_func_factory(nominatim.data.country_info, 'setup_country_tables'),
- mock_func_factory(nominatim.tools.database_import, 'import_osm_data'),
- mock_func_factory(nominatim.tools.refresh, 'import_wikipedia_articles'),
- mock_func_factory(nominatim.tools.refresh, 'import_secondary_importance'),
- mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
- mock_func_factory(nominatim.tools.database_import, 'load_data'),
- mock_func_factory(nominatim.tools.database_import, 'create_tables'),
- mock_func_factory(nominatim.tools.database_import, 'create_table_triggers'),
- mock_func_factory(nominatim.tools.database_import, 'create_partition_tables'),
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.data.country_info, 'create_country_names'),
- mock_func_factory(nominatim.tools.refresh, 'load_address_levels_from_config'),
- mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
- mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+ mock_func_factory(nominatim_db.tools.database_import, 'setup_database_skeleton'),
+ mock_func_factory(nominatim_db.data.country_info, 'setup_country_tables'),
+ mock_func_factory(nominatim_db.tools.database_import, 'import_osm_data'),
+ mock_func_factory(nominatim_db.tools.refresh, 'import_wikipedia_articles'),
+ mock_func_factory(nominatim_db.tools.refresh, 'import_secondary_importance'),
+ mock_func_factory(nominatim_db.tools.database_import, 'truncate_data_tables'),
+ async_mock_func_factory(nominatim_db.tools.database_import, 'load_data'),
+ mock_func_factory(nominatim_db.tools.database_import, 'create_tables'),
+ mock_func_factory(nominatim_db.tools.database_import, 'create_table_triggers'),
+ mock_func_factory(nominatim_db.tools.database_import, 'create_partition_tables'),
+ async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim_db.data.country_info, 'create_country_names'),
+ mock_func_factory(nominatim_db.tools.refresh, 'load_address_levels_from_config'),
+ mock_func_factory(nominatim_db.tools.postcodes, 'update_postcodes'),
+ async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full'),
]
params = ['import', '--osm-file', __file__]
if with_updates:
- mocks.append(mock_func_factory(nominatim.tools.freeze, 'drop_update_tables'))
+ mocks.append(mock_func_factory(nominatim_db.tools.freeze, 'drop_update_tables'))
params.append('--no-updates')
- cf_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
+ cf_mock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions')
assert self.call_nominatim(*params) == 0
assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
- def test_import_continue_load_data(self, mock_func_factory):
+ def test_import_continue_load_data(self, mock_func_factory, async_mock_func_factory):
mocks = [
- mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
- mock_func_factory(nominatim.tools.database_import, 'load_data'),
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.data.country_info, 'create_country_names'),
- mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
- mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
- mock_func_factory(nominatim.db.properties, 'set_property')
+ mock_func_factory(nominatim_db.tools.database_import, 'truncate_data_tables'),
+ async_mock_func_factory(nominatim_db.tools.database_import, 'load_data'),
+ async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim_db.data.country_info, 'create_country_names'),
+ mock_func_factory(nominatim_db.tools.postcodes, 'update_postcodes'),
+ async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full'),
+ mock_func_factory(nominatim_db.db.properties, 'set_property')
]
assert self.call_nominatim('import', '--continue', 'load-data') == 0
assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
- def test_import_continue_indexing(self, mock_func_factory, placex_table,
- temp_db_conn):
+ def test_import_continue_indexing(self, mock_func_factory, async_mock_func_factory,
+ placex_table, temp_db_conn):
mocks = [
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.data.country_info, 'create_country_names'),
- mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
- mock_func_factory(nominatim.db.properties, 'set_property')
+ async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim_db.data.country_info, 'create_country_names'),
+ async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full'),
+ mock_func_factory(nominatim_db.db.properties, 'set_property')
]
assert self.call_nominatim('import', '--continue', 'indexing') == 0
assert self.call_nominatim('import', '--continue', 'indexing') == 0
- def test_import_continue_postprocess(self, mock_func_factory):
+ def test_import_continue_postprocess(self, mock_func_factory, async_mock_func_factory):
mocks = [
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.data.country_info, 'create_country_names'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
- mock_func_factory(nominatim.db.properties, 'set_property')
+ async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim_db.data.country_info, 'create_country_names'),
+ mock_func_factory(nominatim_db.db.properties, 'set_property')
]
assert self.call_nominatim('import', '--continue', 'db-postprocess') == 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for command line interface wrapper for refresk command.
"""
import pytest
-import nominatim.tools.refresh
-import nominatim.tools.postcodes
-import nominatim.indexer.indexer
+import nominatim_db.tools.refresh
+import nominatim_db.tools.postcodes
+import nominatim_db.indexer.indexer
class TestRefresh:
('address-levels', 'load_address_levels_from_config'),
('wiki-data', 'import_wikipedia_articles'),
('importance', 'recompute_importance'),
- ('website', 'setup_website'),
])
def test_refresh_command(self, mock_func_factory, command, func):
- func_mock = mock_func_factory(nominatim.tools.refresh, func)
+ mock_func_factory(nominatim_db.tools.refresh, 'create_functions')
+ func_mock = mock_func_factory(nominatim_db.tools.refresh, func)
assert self.call_nominatim('refresh', '--' + command) == 0
assert func_mock.called == 1
assert self.tokenizer_mock.update_word_tokens_called
- def test_refresh_postcodes(self, mock_func_factory, place_table):
- func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
- idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')
+ def test_refresh_postcodes(self, async_mock_func_factory, mock_func_factory, place_table):
+ func_mock = mock_func_factory(nominatim_db.tools.postcodes, 'update_postcodes')
+ idx_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_postcodes')
assert self.call_nominatim('refresh', '--postcodes') == 0
assert func_mock.called == 1
def test_refresh_create_functions(self, mock_func_factory):
- func_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
+ func_mock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions')
assert self.call_nominatim('refresh', '--functions') == 0
assert func_mock.called == 1
assert self.call_nominatim('refresh', '--wiki-data') == 1
+
def test_refresh_secondary_importance_file_not_found(self):
assert self.call_nominatim('refresh', '--secondary-importance') == 1
def test_refresh_secondary_importance_new_table(self, mock_func_factory):
- mocks = [mock_func_factory(nominatim.tools.refresh, 'import_secondary_importance'),
- mock_func_factory(nominatim.tools.refresh, 'create_functions')]
+ mocks = [mock_func_factory(nominatim_db.tools.refresh, 'import_secondary_importance'),
+ mock_func_factory(nominatim_db.tools.refresh, 'create_functions')]
assert self.call_nominatim('refresh', '--secondary-importance') == 0
assert mocks[0].called == 1
assert mocks[1].called == 1
- def test_refresh_importance_computed_after_wiki_import(self, monkeypatch):
+ def test_refresh_importance_computed_after_wiki_import(self, monkeypatch, mock_func_factory):
calls = []
- monkeypatch.setattr(nominatim.tools.refresh, 'import_wikipedia_articles',
+ monkeypatch.setattr(nominatim_db.tools.refresh, 'import_wikipedia_articles',
lambda *args, **kwargs: calls.append('import') or 0)
- monkeypatch.setattr(nominatim.tools.refresh, 'recompute_importance',
+ monkeypatch.setattr(nominatim_db.tools.refresh, 'recompute_importance',
lambda *args, **kwargs: calls.append('update'))
+ func_mock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions')
assert self.call_nominatim('refresh', '--importance', '--wiki-data') == 0
assert calls == ['import', 'update']
+ assert func_mock.called == 1
@pytest.mark.parametrize('params', [('--data-object', 'w234'),
('--data-object', 'N23', '--data-object', 'N24'),
('--data-area', 'r7723', '--data-area', 'r2'),
('--data-area', 'R9284425', '--data-object', 'n1234567894567')])
def test_refresh_objects(self, params, mock_func_factory):
- func_mock = mock_func_factory(nominatim.tools.refresh, 'invalidate_osm_object')
+ func_mock = mock_func_factory(nominatim_db.tools.refresh, 'invalidate_osm_object')
assert self.call_nominatim('refresh', *params) == 0
@pytest.mark.parametrize('func', ('--data-object', '--data-area'))
@pytest.mark.parametrize('param', ('234', 'a55', 'R 453', 'Rel'))
def test_refresh_objects_bad_param(self, func, param, mock_func_factory):
- func_mock = mock_func_factory(nominatim.tools.refresh, 'invalidate_osm_object')
+ func_mock = mock_func_factory(nominatim_db.tools.refresh, 'invalidate_osm_object')
self.call_nominatim('refresh', func, param) == 1
assert func_mock.called == 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2023 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for replication command of command-line interface wrapper.
import pytest
-import nominatim.cli
-import nominatim.indexer.indexer
-import nominatim.tools.replication
-from nominatim.db import status
+import nominatim_db.cli
+import nominatim_db.indexer.indexer
+import nominatim_db.tools.replication
+import nominatim_db.tools.refresh
+from nominatim_db.db import status
@pytest.fixture
def tokenizer_mock(monkeypatch):
self.finalize_import_called = True
tok = DummyTokenizer()
- monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
+ monkeypatch.setattr(nominatim_db.tokenizer.factory, 'get_tokenizer_for_db',
lambda *args: tok)
- monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
+ monkeypatch.setattr(nominatim_db.tokenizer.factory, 'create_tokenizer',
lambda *args: tok)
return tok
@pytest.fixture
-def index_mock(mock_func_factory, tokenizer_mock, init_status):
- return mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full')
+def index_mock(async_mock_func_factory, tokenizer_mock, init_status):
+ return async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full')
@pytest.fixture
def update_mock(mock_func_factory, init_status, tokenizer_mock):
- return mock_func_factory(nominatim.tools.replication, 'update')
+ return mock_func_factory(nominatim_db.tools.replication, 'update')
class TestCliReplication:
@pytest.fixture(autouse=True)
def setup_update_function(self, monkeypatch):
def _mock_updates(states):
- monkeypatch.setattr(nominatim.tools.replication, 'update',
+ monkeypatch.setattr(nominatim_db.tools.replication, 'update',
lambda *args, **kwargs: states.pop())
self.update_states = _mock_updates
(('--check-for-updates',), 'check_for_updates')
])
def test_replication_command(self, mock_func_factory, params, func):
- func_mock = mock_func_factory(nominatim.tools.replication, func)
+ func_mock = mock_func_factory(nominatim_db.tools.replication, func)
if params == ('--init',):
- umock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
+ umock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions')
assert self.call_nominatim(*params) == 0
assert func_mock.called == 1
def test_replication_update_once_no_index(self, update_mock):
assert self.call_nominatim('--once', '--no-index') == 0
- assert str(update_mock.last_args[1]['osm2pgsql']) == 'OSM2PGSQL NOT AVAILABLE'
+ assert str(update_mock.last_args[1]['osm2pgsql']).endswith('OSM2PGSQL NOT AVAILABLE')
def test_replication_update_custom_osm2pgsql(self, monkeypatch, update_mock):
@pytest.mark.parametrize("update_interval", [60, 3600])
def test_replication_catchup(self, placex_table, monkeypatch, index_mock, update_interval):
monkeypatch.setenv('NOMINATIM_REPLICATION_UPDATE_INTERVAL', str(update_interval))
- self.update_states([nominatim.tools.replication.UpdateState.NO_CHANGES])
+ self.update_states([nominatim_db.tools.replication.UpdateState.NO_CHANGES])
assert self.call_nominatim('--catch-up') == 0
def test_replication_update_continuous(self, index_mock):
- self.update_states([nominatim.tools.replication.UpdateState.UP_TO_DATE,
- nominatim.tools.replication.UpdateState.UP_TO_DATE])
+ self.update_states([nominatim_db.tools.replication.UpdateState.UP_TO_DATE,
+ nominatim_db.tools.replication.UpdateState.UP_TO_DATE])
with pytest.raises(IndexError):
self.call_nominatim()
def test_replication_update_continuous_no_change(self, mock_func_factory,
index_mock):
- self.update_states([nominatim.tools.replication.UpdateState.NO_CHANGES,
- nominatim.tools.replication.UpdateState.UP_TO_DATE])
+ self.update_states([nominatim_db.tools.replication.UpdateState.NO_CHANGES,
+ nominatim_db.tools.replication.UpdateState.UP_TO_DATE])
sleep_mock = mock_func_factory(time, 'sleep')
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Test for loading dotenv configuration.
from pathlib import Path
import pytest
-from nominatim.config import Configuration, flatten_config_list
-from nominatim.errors import UsageError
+from nominatim_db.config import Configuration, flatten_config_list
+from nominatim_db.errors import UsageError
@pytest.fixture
-def make_config(src_dir):
+def make_config():
""" Create a configuration object from the given project directory.
"""
def _mk_config(project_dir=None):
- return Configuration(project_dir, src_dir / 'settings')
+ return Configuration(project_dir)
return _mk_config
@pytest.fixture
-def make_config_path(src_dir, tmp_path):
+def make_config_path(tmp_path):
""" Create a configuration object with project and config directories
in a temporary directory.
"""
def _mk_config():
(tmp_path / 'project').mkdir()
(tmp_path / 'config').mkdir()
- conf = Configuration(tmp_path / 'project', src_dir / 'settings')
+ conf = Configuration(tmp_path / 'project')
conf.config_dir = tmp_path / 'config'
return conf
def test_get_bool_empty(make_config):
config = make_config()
- assert config.DATABASE_MODULE_PATH == ''
- assert not config.get_bool('DATABASE_MODULE_PATH')
+ assert config.TOKENIZER_CONFIG == ''
+ assert not config.get_bool('TOKENIZER_CONFIG')
@pytest.mark.parametrize("value,result", [('0', 0), ('1', 1),
def test_get_int_empty(make_config):
config = make_config()
- assert config.DATABASE_MODULE_PATH == ''
+ assert config.TOKENIZER_CONFIG == ''
with pytest.raises(UsageError):
- config.get_int('DATABASE_MODULE_PATH')
+ config.get_int('TOKENIZER_CONFIG')
@pytest.mark.parametrize("value,outlist", [('sd', ['sd']),
def test_get_path_empty(make_config):
config = make_config()
- assert config.DATABASE_MODULE_PATH == ''
- assert not config.get_path('DATABASE_MODULE_PATH')
+ assert config.TOKENIZER_CONFIG == ''
+ assert not config.get_path('TOKENIZER_CONFIG')
def test_get_path_absolute(make_config, monkeypatch):
monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'street')
- expected = src_dir / 'settings' / 'import-street.style'
+ expected = src_dir / 'lib-lua' / 'import-street.lua'
assert config.get_import_style_file() == expected
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Test for loading extra Python modules.
import pytest
-from nominatim.config import Configuration
+from nominatim_db.config import Configuration
@pytest.fixture
def test_config(src_dir, tmp_path):
"""
(tmp_path / 'project').mkdir()
(tmp_path / 'config').mkdir()
- conf = Configuration(tmp_path / 'project', src_dir / 'settings')
+ conf = Configuration(tmp_path / 'project')
conf.config_dir = tmp_path / 'config'
return conf
def test_load_default_module(test_config):
- module = test_config.load_plugin_module('version', 'nominatim')
+ module = test_config.load_plugin_module('version', 'nominatim_db')
assert isinstance(module.NOMINATIM_VERSION, tuple)
def test_load_default_module_with_hyphen(test_config):
- module = test_config.load_plugin_module('place-info', 'nominatim.data')
+ module = test_config.load_plugin_module('place-info', 'nominatim_db.data')
assert isinstance(module.PlaceInfo, object)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
import itertools
import sys
from pathlib import Path
-import psycopg2
+import psycopg
+from psycopg import sql as pysql
import pytest
# always test against the source
SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
-sys.path.insert(0, str(SRC_DIR))
+sys.path.insert(0, str(SRC_DIR / 'src'))
-from nominatim.config import Configuration
-from nominatim.db import connection
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-import nominatim.tokenizer.factory
+from nominatim_db.config import Configuration
+from nominatim_db.db import connection
+from nominatim_db.db.sql_preprocessor import SQLPreprocessor
+import nominatim_db.tokenizer.factory
import dummy_tokenizer
import mocks
exported into NOMINATIM_DATABASE_DSN.
"""
name = 'test_nominatim_python_unittest'
- conn = psycopg2.connect(database='postgres')
- conn.set_isolation_level(0)
- with conn.cursor() as cur:
- cur.execute('DROP DATABASE IF EXISTS {}'.format(name))
- cur.execute('CREATE DATABASE {}'.format(name))
-
- conn.close()
+ with psycopg.connect(dbname='postgres', autocommit=True) as conn:
+ with conn.cursor() as cur:
+ cur.execute(pysql.SQL('DROP DATABASE IF EXISTS') + pysql.Identifier(name))
+ cur.execute(pysql.SQL('CREATE DATABASE') + pysql.Identifier(name))
monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'dbname=' + name)
- yield name
-
- conn = psycopg2.connect(database='postgres')
+ with psycopg.connect(dbname=name) as conn:
+ with conn.cursor() as cur:
+ cur.execute('CREATE EXTENSION hstore')
- conn.set_isolation_level(0)
- with conn.cursor() as cur:
- cur.execute('DROP DATABASE IF EXISTS {}'.format(name))
+ yield name
- conn.close()
+ with psycopg.connect(dbname='postgres', autocommit=True) as conn:
+ with conn.cursor() as cur:
+ cur.execute('DROP DATABASE IF EXISTS {}'.format(name))
@pytest.fixture
@pytest.fixture
def temp_db_with_extensions(temp_db):
- conn = psycopg2.connect(database=temp_db)
- with conn.cursor() as cur:
- cur.execute('CREATE EXTENSION hstore; CREATE EXTENSION postgis;')
- conn.commit()
- conn.close()
+ with psycopg.connect(dbname=temp_db) as conn:
+ with conn.cursor() as cur:
+ cur.execute('CREATE EXTENSION postgis')
return temp_db
def temp_db_conn(temp_db):
""" Connection to the test database.
"""
- with connection.connect('dbname=' + temp_db) as conn:
+ with connection.connect('', autocommit=True, dbname=temp_db) as conn:
+ connection.register_hstore(conn)
yield conn
""" Connection and cursor towards the test database. The connection will
be in auto-commit mode.
"""
- conn = psycopg2.connect('dbname=' + temp_db)
- conn.set_isolation_level(0)
- with conn.cursor(cursor_factory=CursorForTesting) as cur:
- yield cur
- conn.close()
+ with psycopg.connect(dbname=temp_db, autocommit=True, cursor_factory=CursorForTesting) as conn:
+ connection.register_hstore(conn)
+ with conn.cursor() as cur:
+ yield cur
@pytest.fixture
-def table_factory(temp_db_cursor):
+def table_factory(temp_db_conn):
""" A fixture that creates new SQL tables, potentially filled with
content.
"""
def mk_table(name, definition='id INT', content=None):
- temp_db_cursor.execute('CREATE TABLE {} ({})'.format(name, definition))
- if content is not None:
- temp_db_cursor.execute_values("INSERT INTO {} VALUES %s".format(name), content)
+ with psycopg.ClientCursor(temp_db_conn) as cur:
+ cur.execute('CREATE TABLE {} ({})'.format(name, definition))
+ if content:
+ sql = pysql.SQL("INSERT INTO {} VALUES ({})")\
+ .format(pysql.Identifier(name),
+ pysql.SQL(',').join([pysql.Placeholder() for _ in range(len(content[0]))]))
+ cur.executemany(sql , content)
return mk_table
@pytest.fixture
-def def_config(src_dir):
- cfg = Configuration(None, src_dir / 'settings')
- cfg.set_libdirs(module='.', osm2pgsql='.',
- php=src_dir / 'lib-php',
- sql=src_dir / 'lib-sql',
- data=src_dir / 'data')
+def def_config():
+ cfg = Configuration(None)
+ cfg.set_libdirs(osm2pgsql=None)
return cfg
@pytest.fixture
-def project_env(src_dir, tmp_path):
+def project_env(tmp_path):
projdir = tmp_path / 'project'
projdir.mkdir()
- cfg = Configuration(projdir, src_dir / 'settings')
- cfg.set_libdirs(module='.', osm2pgsql='.',
- php=src_dir / 'lib-php',
- sql=src_dir / 'lib-sql',
- data=src_dir / 'data')
+ cfg = Configuration(projdir)
+ cfg.set_libdirs(osm2pgsql=None)
return cfg
""" A factory for rows in the place table. The table is created as a
prerequisite to the fixture.
"""
- psycopg2.extras.register_hstore(temp_db_cursor)
idseq = itertools.count(1001)
def _insert(osm_type='N', osm_id=None, cls='amenity', typ='cafe', names=None,
admin_level=None, address=None, extratags=None, geom=None):
@pytest.fixture
def sql_preprocessor_cfg(tmp_path, table_factory, temp_db_with_extensions):
table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
- cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
- cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
- sql=tmp_path, data=SRC_DIR / 'data')
+ cfg = Configuration(None)
+ cfg.set_libdirs(osm2pgsql=None, sql=tmp_path)
return cfg
def _import_dummy(*args, **kwargs):
return dummy_tokenizer
- monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy)
+ monkeypatch.setattr(nominatim_db.tokenizer.factory,
+ "_import_tokenizer", _import_dummy)
property_table.set('tokenizer', 'dummy')
def _create_tokenizer():
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
-Specialised psycopg2 cursor with shortcut functions useful for testing.
+Specialised psycopg cursor with shortcut functions useful for testing.
"""
-import psycopg2.extras
+import psycopg
-class CursorForTesting(psycopg2.extras.DictCursor):
+class CursorForTesting(psycopg.Cursor):
""" Extension to the DictCursor class that provides execution
short-cuts that simplify writing assertions.
"""
return self.scalar('SELECT count(*) FROM ' + table)
return self.scalar('SELECT count(*) FROM {} WHERE {}'.format(table, where))
-
-
- def execute_values(self, *args, **kwargs):
- """ Execute the execute_values() function on the cursor.
- """
- psycopg2.extras.execute_values(self, *args, **kwargs)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for function that handle country properties.
from textwrap import dedent
import pytest
-from nominatim.data import country_info
+from nominatim_db.data import country_info
@pytest.fixture
def loaded_country(def_config):
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Tests for function providing a non-blocking query interface towards PostgreSQL.
-"""
-from contextlib import closing
-import concurrent.futures
-
-import pytest
-import psycopg2
-
-from nominatim.db.async_connection import DBConnection, DeadlockHandler
-
-
-@pytest.fixture
-def conn(temp_db):
- with closing(DBConnection('dbname=' + temp_db)) as connection:
- yield connection
-
-
-@pytest.fixture
-def simple_conns(temp_db):
- conn1 = psycopg2.connect('dbname=' + temp_db)
- conn2 = psycopg2.connect('dbname=' + temp_db)
-
- yield conn1.cursor(), conn2.cursor()
-
- conn1.close()
- conn2.close()
-
-
-def test_simple_query(conn, temp_db_conn):
- conn.connect()
-
- conn.perform('CREATE TABLE foo (id INT)')
- conn.wait()
-
- temp_db_conn.table_exists('foo')
-
-
-def test_wait_for_query(conn):
- conn.connect()
-
- conn.perform('SELECT pg_sleep(1)')
-
- assert not conn.is_done()
-
- conn.wait()
-
-
-def test_bad_query(conn):
- conn.connect()
-
- conn.perform('SELECT efasfjsea')
-
- with pytest.raises(psycopg2.ProgrammingError):
- conn.wait()
-
-
-def test_bad_query_ignore(temp_db):
- with closing(DBConnection('dbname=' + temp_db, ignore_sql_errors=True)) as conn:
- conn.connect()
-
- conn.perform('SELECT efasfjsea')
-
- conn.wait()
-
-
-def exec_with_deadlock(cur, sql, detector):
- with DeadlockHandler(lambda *args: detector.append(1)):
- cur.execute(sql)
-
-
-def test_deadlock(simple_conns):
- cur1, cur2 = simple_conns
-
- cur1.execute("""CREATE TABLE t1 (id INT PRIMARY KEY, t TEXT);
- INSERT into t1 VALUES (1, 'a'), (2, 'b')""")
- cur1.connection.commit()
-
- cur1.execute("UPDATE t1 SET t = 'x' WHERE id = 1")
- cur2.execute("UPDATE t1 SET t = 'x' WHERE id = 2")
-
- # This is the tricky part of the test. The first SQL command runs into
- # a lock and blocks, so we have to run it in a separate thread. When the
- # second deadlocking SQL statement is issued, Postgresql will abort one of
- # the two transactions that cause the deadlock. There is no way to tell
- # which one of the two. Therefore wrap both in a DeadlockHandler and
- # expect that exactly one of the two triggers.
- with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
- deadlock_check = []
- try:
- future = executor.submit(exec_with_deadlock, cur2,
- "UPDATE t1 SET t = 'y' WHERE id = 1",
- deadlock_check)
-
- while not future.running():
- pass
-
-
- exec_with_deadlock(cur1, "UPDATE t1 SET t = 'y' WHERE id = 2",
- deadlock_check)
- finally:
- # Whatever happens, make sure the deadlock gets resolved.
- cur1.connection.rollback()
-
- future.result()
-
- assert len(deadlock_check) == 1
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for specialised connection and cursor classes.
"""
import pytest
-import psycopg2
+import psycopg
-from nominatim.db.connection import connect, get_pg_env
+import nominatim_db.db.connection as nc
@pytest.fixture
def db(dsn):
- with connect(dsn) as conn:
+ with nc.connect(dsn) as conn:
yield conn
def test_connection_table_exists(db, table_factory):
- assert not db.table_exists('foobar')
+ assert not nc.table_exists(db, 'foobar')
table_factory('foobar')
- assert db.table_exists('foobar')
+ assert nc.table_exists(db, 'foobar')
def test_has_column_no_table(db):
- assert not db.table_has_column('sometable', 'somecolumn')
+ assert not nc.table_has_column(db, 'sometable', 'somecolumn')
@pytest.mark.parametrize('name,result', [('tram', True), ('car', False)])
def test_has_column(db, table_factory, name, result):
table_factory('stuff', 'tram TEXT')
- assert db.table_has_column('stuff', name) == result
+ assert nc.table_has_column(db, 'stuff', name) == result
def test_connection_index_exists(db, table_factory, temp_db_cursor):
- assert not db.index_exists('some_index')
+ assert not nc.index_exists(db, 'some_index')
table_factory('foobar')
temp_db_cursor.execute('CREATE INDEX some_index ON foobar(id)')
- assert db.index_exists('some_index')
- assert db.index_exists('some_index', table='foobar')
- assert not db.index_exists('some_index', table='bar')
+ assert nc.index_exists(db, 'some_index')
+ assert nc.index_exists(db, 'some_index', table='foobar')
+ assert not nc.index_exists(db, 'some_index', table='bar')
def test_drop_table_existing(db, table_factory):
table_factory('dummy')
- assert db.table_exists('dummy')
+ assert nc.table_exists(db, 'dummy')
- db.drop_table('dummy')
- assert not db.table_exists('dummy')
+ nc.drop_tables(db, 'dummy')
+ assert not nc.table_exists(db, 'dummy')
-def test_drop_table_non_existsing(db):
- db.drop_table('dfkjgjriogjigjgjrdghehtre')
+def test_drop_table_non_existing(db):
+ nc.drop_tables(db, 'dfkjgjriogjigjgjrdghehtre')
+
+
+def test_drop_many_tables(db, table_factory):
+ tables = [f'table{n}' for n in range(5)]
+
+ for t in tables:
+ table_factory(t)
+ assert nc.table_exists(db, t)
+
+ nc.drop_tables(db, *tables)
+
+ for t in tables:
+ assert not nc.table_exists(db, t)
def test_drop_table_non_existing_force(db):
- with pytest.raises(psycopg2.ProgrammingError, match='.*does not exist.*'):
- db.drop_table('dfkjgjriogjigjgjrdghehtre', if_exists=False)
+ with pytest.raises(psycopg.ProgrammingError, match='.*does not exist.*'):
+ nc.drop_tables(db, 'dfkjgjriogjigjgjrdghehtre', if_exists=False)
def test_connection_server_version_tuple(db):
- ver = db.server_version_tuple()
+ ver = nc.server_version_tuple(db)
assert isinstance(ver, tuple)
assert len(ver) == 2
def test_connection_postgis_version_tuple(db, temp_db_with_extensions):
- ver = db.postgis_version_tuple()
+ ver = nc.postgis_version_tuple(db)
assert isinstance(ver, tuple)
assert len(ver) == 2
def test_cursor_scalar(db, table_factory):
table_factory('dummy')
- with db.cursor() as cur:
- assert cur.scalar('SELECT count(*) FROM dummy') == 0
+ assert nc.execute_scalar(db, 'SELECT count(*) FROM dummy') == 0
def test_cursor_scalar_many_rows(db):
- with db.cursor() as cur:
- with pytest.raises(RuntimeError):
- cur.scalar('SELECT * FROM pg_tables')
+ with pytest.raises(RuntimeError, match='Query did not return a single row.'):
+ nc.execute_scalar(db, 'SELECT * FROM pg_tables')
def test_cursor_scalar_no_rows(db, table_factory):
table_factory('dummy')
- with db.cursor() as cur:
- with pytest.raises(RuntimeError):
- cur.scalar('SELECT id FROM dummy')
+ with pytest.raises(RuntimeError, match='Query did not return a single row.'):
+ nc.execute_scalar(db, 'SELECT id FROM dummy')
def test_get_pg_env_add_variable(monkeypatch):
monkeypatch.delenv('PGPASSWORD', raising=False)
- env = get_pg_env('user=fooF')
+ env = nc.get_pg_env('user=fooF')
assert env['PGUSER'] == 'fooF'
assert 'PGPASSWORD' not in env
def test_get_pg_env_overwrite_variable(monkeypatch):
monkeypatch.setenv('PGUSER', 'some default')
- env = get_pg_env('user=overwriter')
+ env = nc.get_pg_env('user=overwriter')
assert env['PGUSER'] == 'overwriter'
def test_get_pg_env_ignore_unknown():
- env = get_pg_env('client_encoding=stuff', base_env={})
+ env = nc.get_pg_env('client_encoding=stuff', base_env={})
assert env == {}
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for property table manpulation.
"""
import pytest
-from nominatim.db import properties
+from nominatim_db.db import properties
@pytest.fixture
def property_factory(property_table, temp_db_cursor):
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for SQL preprocessing.
"""
import pytest
+import pytest_asyncio
-from nominatim.db.sql_preprocessor import SQLPreprocessor
+from nominatim_db.db.sql_preprocessor import SQLPreprocessor
@pytest.fixture
def sql_factory(tmp_path):
sql_preprocessor.run_sql_file(temp_db_conn, sqlfile, bar='XX', foo='ZZ')
assert temp_db_cursor.scalar('SELECT test()') == 'ZZ XX'
+
+
+@pytest.mark.asyncio
+async def test_load_parallel_file(dsn, sql_preprocessor, tmp_path, temp_db_cursor):
+ (tmp_path / 'test.sql').write_text("""
+ CREATE TABLE foo (a TEXT);
+ CREATE TABLE foo2(a TEXT);""" +
+ "\n---\nCREATE TABLE bar (b INT);")
+
+ await sql_preprocessor.run_parallel_sql_file(dsn, 'test.sql', num_threads=4)
+
+ assert temp_db_cursor.table_exists('foo')
+ assert temp_db_cursor.table_exists('foo2')
+ assert temp_db_cursor.table_exists('bar')
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for status table manipulation.
import pytest
-import nominatim.db.status
-from nominatim.errors import UsageError
+import nominatim_db.db.status
+from nominatim_db.errors import UsageError
OSM_NODE_DATA = """\
<osm version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/">
"""
def iso_date(date):
- return dt.datetime.strptime(date, nominatim.db.status.ISODATE_FORMAT)\
+ return dt.datetime.strptime(date, nominatim_db.db.status.ISODATE_FORMAT)\
.replace(tzinfo=dt.timezone.utc)
pass
+@pytest.mark.parametrize('offline', [True, False])
+def test_compute_database_date_from_osm2pgsql(table_factory, temp_db_conn, offline):
+ table_factory('osm2pgsql_properties', 'property TEXT, value TEXT',
+ content=(('current_timestamp', '2024-01-03T23:45:54Z'), ))
+
+ date = nominatim_db.db.status.compute_database_date(temp_db_conn, offline=offline)
+ assert date == iso_date('2024-01-03T23:45:54')
+
+
+def test_compute_database_date_from_osm2pgsql_nodata(table_factory, temp_db_conn):
+ table_factory('osm2pgsql_properties', 'property TEXT, value TEXT')
+
+ with pytest.raises(UsageError, match='Cannot determine database date from data in offline mode'):
+ nominatim_db.db.status.compute_database_date(temp_db_conn, offline=True)
+
+
def test_compute_database_date_place_empty(place_table, temp_db_conn):
with pytest.raises(UsageError):
- nominatim.db.status.compute_database_date(temp_db_conn)
+ nominatim_db.db.status.compute_database_date(temp_db_conn)
def test_compute_database_date_valid(monkeypatch, place_row, temp_db_conn):
requested_url.append(url)
return OSM_NODE_DATA
- monkeypatch.setattr(nominatim.db.status, "get_url", mock_url)
+ monkeypatch.setattr(nominatim_db.db.status, "get_url", mock_url)
- date = nominatim.db.status.compute_database_date(temp_db_conn)
+ date = nominatim_db.db.status.compute_database_date(temp_db_conn)
assert requested_url == ['https://www.openstreetmap.org/api/0.6/node/45673/1']
assert date == iso_date('2006-01-27T22:09:10')
requested_url.append(url)
return '<osm version="0.6" generator="OpenStre'
- monkeypatch.setattr(nominatim.db.status, "get_url", mock_url)
+ monkeypatch.setattr(nominatim_db.db.status, "get_url", mock_url)
with pytest.raises(UsageError):
- nominatim.db.status.compute_database_date(temp_db_conn)
+ nominatim_db.db.status.compute_database_date(temp_db_conn)
def test_set_status_empty_table(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
- nominatim.db.status.set_status(temp_db_conn, date=date)
+ nominatim_db.db.status.set_status(temp_db_conn, date=date)
assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
{(date, None, True)}
def test_set_status_filled_table(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
- nominatim.db.status.set_status(temp_db_conn, date=date)
+ nominatim_db.db.status.set_status(temp_db_conn, date=date)
assert temp_db_cursor.table_rows('import_status') == 1
date = dt.datetime.fromordinal(1000100).replace(tzinfo=dt.timezone.utc)
- nominatim.db.status.set_status(temp_db_conn, date=date, seq=456, indexed=False)
+ nominatim_db.db.status.set_status(temp_db_conn, date=date, seq=456, indexed=False)
assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
{(date, 456, False)}
def test_set_status_missing_date(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
- nominatim.db.status.set_status(temp_db_conn, date=date)
+ nominatim_db.db.status.set_status(temp_db_conn, date=date)
assert temp_db_cursor.table_rows('import_status') == 1
- nominatim.db.status.set_status(temp_db_conn, date=None, seq=456, indexed=False)
+ nominatim_db.db.status.set_status(temp_db_conn, date=None, seq=456, indexed=False)
assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
{(date, 456, False)}
def test_get_status_empty_table(temp_db_conn):
- assert nominatim.db.status.get_status(temp_db_conn) == (None, None, None)
+ assert nominatim_db.db.status.get_status(temp_db_conn) == (None, None, None)
def test_get_status_success(temp_db_conn):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
- nominatim.db.status.set_status(temp_db_conn, date=date, seq=667, indexed=False)
+ nominatim_db.db.status.set_status(temp_db_conn, date=date, seq=667, indexed=False)
- assert nominatim.db.status.get_status(temp_db_conn) == \
+ assert nominatim_db.db.status.get_status(temp_db_conn) == \
(date, 667, False)
@pytest.mark.parametrize("new_state", [True, False])
def test_set_indexed(temp_db_conn, temp_db_cursor, old_state, new_state):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
- nominatim.db.status.set_status(temp_db_conn, date=date, indexed=old_state)
- nominatim.db.status.set_indexed(temp_db_conn, new_state)
+ nominatim_db.db.status.set_status(temp_db_conn, date=date, indexed=old_state)
+ nominatim_db.db.status.set_indexed(temp_db_conn, new_state)
assert temp_db_cursor.scalar("SELECT indexed FROM import_status") == new_state
def test_set_indexed_empty_status(temp_db_conn, temp_db_cursor):
- nominatim.db.status.set_indexed(temp_db_conn, True)
+ nominatim_db.db.status.set_indexed(temp_db_conn, True)
assert temp_db_cursor.table_rows("import_status") == 0
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
start = dt.datetime.now() - dt.timedelta(hours=1)
- nominatim.db.status.set_status(temp_db_conn, date=date, seq=56)
- nominatim.db.status.log_status(temp_db_conn, start, 'index')
+ nominatim_db.db.status.set_status(temp_db_conn, date=date, seq=56)
+ nominatim_db.db.status.log_status(temp_db_conn, start, 'index')
temp_db_conn.commit()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for DB utility functions in db.utils
import pytest
-import nominatim.db.utils as db_utils
-from nominatim.errors import UsageError
+import nominatim_db.db.utils as db_utils
+from nominatim_db.errors import UsageError
def test_execute_file_success(dsn, temp_db_cursor, tmp_path):
tmpfile = tmp_path / 'test.sql'
db_utils.execute_file(dsn, tmpfile, post_code='INSERT INTO test VALUES(23)')
assert temp_db_cursor.row_set('SELECT * FROM test') == {(23, )}
-
-
-class TestCopyBuffer:
- TABLE_NAME = 'copytable'
-
- @pytest.fixture(autouse=True)
- def setup_test_table(self, table_factory):
- table_factory(self.TABLE_NAME, 'col_a INT, col_b TEXT')
-
-
- def table_rows(self, cursor):
- return cursor.row_set('SELECT * FROM ' + self.TABLE_NAME)
-
-
- def test_copybuffer_empty(self):
- with db_utils.CopyBuffer() as buf:
- buf.copy_out(None, "dummy")
-
-
- def test_all_columns(self, temp_db_cursor):
- with db_utils.CopyBuffer() as buf:
- buf.add(3, 'hum')
- buf.add(None, 'f\\t')
-
- buf.copy_out(temp_db_cursor, self.TABLE_NAME)
-
- assert self.table_rows(temp_db_cursor) == {(3, 'hum'), (None, 'f\\t')}
-
-
- def test_selected_columns(self, temp_db_cursor):
- with db_utils.CopyBuffer() as buf:
- buf.add('foo')
-
- buf.copy_out(temp_db_cursor, self.TABLE_NAME,
- columns=['col_b'])
-
- assert self.table_rows(temp_db_cursor) == {(None, 'foo')}
-
-
- def test_reordered_columns(self, temp_db_cursor):
- with db_utils.CopyBuffer() as buf:
- buf.add('one', 1)
- buf.add(' two ', 2)
-
- buf.copy_out(temp_db_cursor, self.TABLE_NAME,
- columns=['col_b', 'col_a'])
-
- assert self.table_rows(temp_db_cursor) == {(1, 'one'), (2, ' two ')}
-
-
- def test_special_characters(self, temp_db_cursor):
- with db_utils.CopyBuffer() as buf:
- buf.add('foo\tbar')
- buf.add('sun\nson')
- buf.add('\\N')
-
- buf.copy_out(temp_db_cursor, self.TABLE_NAME,
- columns=['col_b'])
-
- assert self.table_rows(temp_db_cursor) == {(None, 'foo\tbar'),
- (None, 'sun\nson'),
- (None, '\\N')}
-
-
-
-class TestCopyBufferJson:
- TABLE_NAME = 'copytable'
-
- @pytest.fixture(autouse=True)
- def setup_test_table(self, table_factory):
- table_factory(self.TABLE_NAME, 'col_a INT, col_b JSONB')
-
-
- def table_rows(self, cursor):
- cursor.execute('SELECT * FROM ' + self.TABLE_NAME)
- results = {k: v for k,v in cursor}
-
- assert len(results) == cursor.rowcount
-
- return results
-
-
- def test_json_object(self, temp_db_cursor):
- with db_utils.CopyBuffer() as buf:
- buf.add(1, json.dumps({'test': 'value', 'number': 1}))
-
- buf.copy_out(temp_db_cursor, self.TABLE_NAME)
-
- assert self.table_rows(temp_db_cursor) == \
- {1: {'test': 'value', 'number': 1}}
-
-
- def test_json_object_special_chras(self, temp_db_cursor):
- with db_utils.CopyBuffer() as buf:
- buf.add(1, json.dumps({'te\tst': 'va\nlue', 'nu"mber': None}))
-
- buf.copy_out(temp_db_cursor, self.TABLE_NAME)
-
- assert self.table_rows(temp_db_cursor) == \
- {1: {'te\tst': 'va\nlue', 'nu"mber': None}}
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tokenizer for testing.
"""
-from nominatim.data.place_info import PlaceInfo
-from nominatim.config import Configuration
+from nominatim_db.data.place_info import PlaceInfo
+from nominatim_db.config import Configuration
def create(dsn, data_dir):
""" Create a new instance of the tokenizer provided by this module.
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for running the indexing.
"""
import itertools
import pytest
+import pytest_asyncio
-from nominatim.indexer import indexer
-from nominatim.tokenizer import factory
+from nominatim_db.indexer import indexer
+from nominatim_db.tokenizer import factory
class IndexerTestDB:
self.postcode_id = itertools.count(700000)
self.conn = conn
- self.conn.set_isolation_level(0)
+ self.conn.autocimmit = True
with self.conn.cursor() as cur:
- cur.execute('CREATE EXTENSION hstore')
cur.execute("""CREATE TABLE placex (place_id BIGINT,
name HSTORE,
class TEXT,
@pytest.mark.parametrize("threads", [1, 15])
-def test_index_all_by_rank(test_db, threads, test_tokenizer):
+@pytest.mark.asyncio
+async def test_index_all_by_rank(test_db, threads, test_tokenizer):
for rank in range(31):
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
- idx.index_by_rank(0, 30)
+ await idx.index_by_rank(0, 30)
assert test_db.placex_unindexed() == 0
assert test_db.osmline_unindexed() == 0
@pytest.mark.parametrize("threads", [1, 15])
-def test_index_partial_without_30(test_db, threads, test_tokenizer):
+@pytest.mark.asyncio
+async def test_index_partial_without_30(test_db, threads, test_tokenizer):
for rank in range(31):
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
idx = indexer.Indexer('dbname=test_nominatim_python_unittest',
test_tokenizer, threads)
- idx.index_by_rank(4, 15)
+ await idx.index_by_rank(4, 15)
assert test_db.placex_unindexed() == 19
assert test_db.osmline_unindexed() == 1
@pytest.mark.parametrize("threads", [1, 15])
-def test_index_partial_with_30(test_db, threads, test_tokenizer):
+@pytest.mark.asyncio
+async def test_index_partial_with_30(test_db, threads, test_tokenizer):
for rank in range(31):
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
- idx.index_by_rank(28, 30)
+ await idx.index_by_rank(28, 30)
assert test_db.placex_unindexed() == 27
assert test_db.osmline_unindexed() == 0
WHERE indexed_status = 0 AND rank_address between 1 and 27""") == 0
@pytest.mark.parametrize("threads", [1, 15])
-def test_index_boundaries(test_db, threads, test_tokenizer):
+@pytest.mark.asyncio
+async def test_index_boundaries(test_db, threads, test_tokenizer):
for rank in range(4, 10):
test_db.add_admin(rank_address=rank, rank_search=rank)
for rank in range(31):
assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
- idx.index_boundaries(0, 30)
+ await idx.index_boundaries(0, 30)
assert test_db.placex_unindexed() == 31
assert test_db.osmline_unindexed() == 1
@pytest.mark.parametrize("threads", [1, 15])
-def test_index_postcodes(test_db, threads, test_tokenizer):
+@pytest.mark.asyncio
+async def test_index_postcodes(test_db, threads, test_tokenizer):
for postcode in range(1000):
test_db.add_postcode('de', postcode)
for postcode in range(32000, 33000):
test_db.add_postcode('us', postcode)
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
- idx.index_postcodes()
+ await idx.index_postcodes()
assert test_db.scalar("""SELECT count(*) FROM location_postcode
WHERE indexed_status != 0""") == 0
@pytest.mark.parametrize("analyse", [True, False])
-def test_index_full(test_db, analyse, test_tokenizer):
+@pytest.mark.asyncio
+async def test_index_full(test_db, analyse, test_tokenizer):
for rank in range(4, 10):
test_db.add_admin(rank_address=rank, rank_search=rank)
for rank in range(31):
test_db.add_postcode('de', postcode)
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, 4)
- idx.index_full(analyse=analyse)
+ await idx.index_full(analyse=analyse)
assert test_db.placex_unindexed() == 0
assert test_db.osmline_unindexed() == 0
assert test_db.scalar("""SELECT count(*) FROM location_postcode
WHERE indexed_status != 0""") == 0
-
-
-@pytest.mark.parametrize("threads", [1, 15])
-def test_index_reopen_connection(test_db, threads, monkeypatch, test_tokenizer):
- monkeypatch.setattr(indexer.WorkerPool, "REOPEN_CONNECTIONS_AFTER", 15)
-
- for _ in range(1000):
- test_db.add_place(rank_address=30, rank_search=30)
-
- idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
- idx.index_by_rank(28, 30)
-
- assert test_db.placex_unindexed() == 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Legacy word table for testing with functions to prefil and test contents
of the table.
"""
+from nominatim_db.db.connection import execute_scalar
class MockIcuWordTable:
""" A word table for testing using legacy word table structure.
with self.conn.cursor() as cur:
cur.execute("""INSERT INTO word (word_token, type, word, info)
VALUES (%s, 'S', %s,
- json_build_object('class', %s,
- 'type', %s,
- 'op', %s))
+ json_build_object('class', %s::text,
+ 'type', %s::text,
+ 'op', %s::text))
""", (word_token, word, cls, typ, oper))
self.conn.commit()
word = word_tokens[0]
for token in word_tokens:
cur.execute("""INSERT INTO word (word_id, word_token, type, word, info)
- VALUES (%s, %s, 'H', %s, jsonb_build_object('lookup', %s))
+ VALUES (%s, %s, 'H', %s, jsonb_build_object('lookup', %s::text))
""", (word_id, token, word, word_tokens[0]))
self.conn.commit()
def count(self):
- with self.conn.cursor() as cur:
- return cur.scalar("SELECT count(*) FROM word")
+ return execute_scalar(self.conn, "SELECT count(*) FROM word")
def count_special(self):
- with self.conn.cursor() as cur:
- return cur.scalar("SELECT count(*) FROM word WHERE type = 'S'")
+ return execute_scalar(self.conn, "SELECT count(*) FROM word WHERE type = 'S'")
def count_housenumbers(self):
- with self.conn.cursor() as cur:
- return cur.scalar("SELECT count(*) FROM word WHERE type = 'H'")
+ return execute_scalar(self.conn, "SELECT count(*) FROM word WHERE type = 'H'")
def get_special(self):
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Legacy word table for testing with functions to prefil and test contents
-of the table.
-"""
-
-class MockLegacyWordTable:
- """ A word table for testing using legacy word table structure.
- """
- def __init__(self, conn):
- self.conn = conn
- with conn.cursor() as cur:
- cur.execute("""CREATE TABLE word (word_id INTEGER,
- word_token text,
- word text,
- class text,
- type text,
- country_code varchar(2),
- search_name_count INTEGER,
- operator TEXT)""")
-
- conn.commit()
-
- def add_full_word(self, word_id, word, word_token=None):
- with self.conn.cursor() as cur:
- cur.execute("""INSERT INTO word (word_id, word_token, word)
- VALUES (%s, %s, %s)
- """, (word_id, ' ' + (word_token or word), word))
- self.conn.commit()
-
-
- def add_special(self, word_token, word, cls, typ, oper):
- with self.conn.cursor() as cur:
- cur.execute("""INSERT INTO word (word_token, word, class, type, operator)
- VALUES (%s, %s, %s, %s, %s)
- """, (word_token, word, cls, typ, oper))
- self.conn.commit()
-
-
- def add_country(self, country_code, word_token):
- with self.conn.cursor() as cur:
- cur.execute("INSERT INTO word (word_token, country_code) VALUES(%s, %s)",
- (word_token, country_code))
- self.conn.commit()
-
-
- def add_postcode(self, word_token, postcode):
- with self.conn.cursor() as cur:
- cur.execute("""INSERT INTO word (word_token, word, class, type)
- VALUES (%s, %s, 'place', 'postcode')
- """, (word_token, postcode))
- self.conn.commit()
-
-
- def count(self):
- with self.conn.cursor() as cur:
- return cur.scalar("SELECT count(*) FROM word")
-
-
- def count_special(self):
- with self.conn.cursor() as cur:
- return cur.scalar("SELECT count(*) FROM word WHERE class != 'place'")
-
-
- def get_special(self):
- with self.conn.cursor() as cur:
- cur.execute("""SELECT word_token, word, class, type, operator
- FROM word WHERE class != 'place'""")
- result = set((tuple(row) for row in cur))
- assert len(result) == cur.rowcount, "Word table has duplicates."
- return result
-
-
- def get_country(self):
- with self.conn.cursor() as cur:
- cur.execute("""SELECT country_code, word_token
- FROM word WHERE country_code is not null""")
- result = set((tuple(row) for row in cur))
- assert len(result) == cur.rowcount, "Word table has duplicates."
- return result
-
-
- def get_postcodes(self):
- with self.conn.cursor() as cur:
- cur.execute("""SELECT word FROM word
- WHERE class = 'place' and type = 'postcode'""")
- return set((row[0] for row in cur))
-
- def get_partial_words(self):
- with self.conn.cursor() as cur:
- cur.execute("""SELECT word_token, search_name_count FROM word
- WHERE class is null and country_code is null
- and not word_token like ' %'""")
- return set((tuple(row) for row in cur))
-
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Custom mocks for testing.
"""
import itertools
-import psycopg2.extras
-
-from nominatim.db import properties
+from nominatim_db.db import properties
# This must always point to the mock word table for the default tokenizer.
from mock_icu_word_table import MockIcuWordTable as MockWordTable
def add(self, osm_type='N', osm_id=None, cls='amenity', typ='cafe', names=None,
admin_level=None, address=None, extratags=None, geom='POINT(10 4)',
- country=None, housenumber=None):
+ country=None, housenumber=None, rank_search=30):
with self.conn.cursor() as cur:
- psycopg2.extras.register_hstore(cur)
cur.execute("""INSERT INTO placex (place_id, osm_type, osm_id, class,
type, name, admin_level, address,
- housenumber,
+ housenumber, rank_search,
extratags, geometry, country_code)
- VALUES(nextval('seq_place'), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""",
+ VALUES(nextval('seq_place'), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""",
(osm_type, osm_id or next(self.idseq), cls, typ, names,
- admin_level, address, housenumber, extratags, 'SRID=4326;' + geom,
+ admin_level, address, housenumber, rank_search,
+ extratags, 'SRID=4326;' + geom,
country))
self.conn.commit()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for the sanitizer that normalizes housenumbers.
"""
import pytest
-from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
-from nominatim.data.place_info import PlaceInfo
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer
+from nominatim_db.data.place_info import PlaceInfo
@pytest.fixture
def sanitize(request, def_config):
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for the sanitizer that normalizes postcodes.
"""
import pytest
-from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
-from nominatim.data.place_info import PlaceInfo
-from nominatim.data import country_info
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer
+from nominatim_db.data.place_info import PlaceInfo
+from nominatim_db.data import country_info
@pytest.fixture
def sanitize(def_config, request):
assert sanitize(country='se', postcode=postcode) == []
+@pytest.mark.parametrize("postcode", ('AD123', '123', 'AD 123', 'AD-123'))
+def test_postcode_andorra_pass(sanitize, postcode):
+ assert sanitize(country='ad', postcode=postcode) == [('postcode', 'AD123')]
+
+
+@pytest.mark.parametrize("postcode", ('AD1234', 'AD AD123', 'XX123'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_andorra_fail(sanitize, postcode):
+ assert sanitize(country='ad', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('AI-2640', '2640', 'AI 2640'))
+def test_postcode_anguilla_pass(sanitize, postcode):
+ assert sanitize(country='ai', postcode=postcode) == [('postcode', 'AI-2640')]
+
+
+@pytest.mark.parametrize("postcode", ('AI-2000', 'AI US-2640', 'AI AI-2640'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_anguilla_fail(sanitize, postcode):
+ assert sanitize(country='ai', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('BN1111', 'BN 1111', 'BN BN1111', 'BN BN 1111'))
+def test_postcode_brunei_pass(sanitize, postcode):
+ assert sanitize(country='bn', postcode=postcode) == [('postcode', 'BN1111')]
+
+
+@pytest.mark.parametrize("postcode", ('BN-1111', 'BNN1111'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_brunei_fail(sanitize, postcode):
+ assert sanitize(country='bn', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('IM1 1AA', 'IM11AA', 'IM IM11AA'))
+def test_postcode_isle_of_man_pass(sanitize, postcode):
+ assert sanitize(country='im', postcode=postcode) == [('postcode', 'IM1 1AA')]
+
+
+@pytest.mark.parametrize("postcode", ('IZ1 1AA', 'IM1 AA'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_isle_of_man_fail(sanitize, postcode):
+ assert sanitize(country='im', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('JE5 0LA', 'JE50LA', 'JE JE50LA', 'je JE5 0LA'))
+def test_postcode_jersey_pass(sanitize, postcode):
+ assert sanitize(country='je', postcode=postcode) == [('postcode', 'JE5 0LA')]
+
+
+@pytest.mark.parametrize("postcode", ('gb JE5 0LA', 'IM50LA', 'IM5 012'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_jersey_fail(sanitize, postcode):
+ assert sanitize(country='je', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('KY1-1234', '1-1234', 'KY 1-1234'))
+def test_postcode_cayman_islands_pass(sanitize, postcode):
+ assert sanitize(country='ky', postcode=postcode) == [('postcode', 'KY1-1234')]
+
+
+@pytest.mark.parametrize("postcode", ('KY-1234', 'KZ1-1234', 'KY1 1234', 'KY1-123', 'KY KY1-1234'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_cayman_islands_fail(sanitize, postcode):
+ assert sanitize(country='ky', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('LC11 222', '11 222', '11222', 'LC 11 222'))
+def test_postcode_saint_lucia_pass(sanitize, postcode):
+ assert sanitize(country='lc', postcode=postcode) == [('postcode', 'LC11 222')]
+
+
+@pytest.mark.parametrize("postcode", ('11 2222', 'LC LC11 222'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_saint_lucia_fail(sanitize, postcode):
+ assert sanitize(country='lc', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('LV-1111', '1111', 'LV 1111', 'LV1111',))
+def test_postcode_latvia_pass(sanitize, postcode):
+ assert sanitize(country='lv', postcode=postcode) == [('postcode', 'LV-1111')]
+
+
+@pytest.mark.parametrize("postcode", ('111', '11111', 'LV LV-1111'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_latvia_fail(sanitize, postcode):
+ assert sanitize(country='lv', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('MD-1111', '1111', 'MD 1111', 'MD1111'))
+def test_postcode_moldova_pass(sanitize, postcode):
+ assert sanitize(country='md', postcode=postcode) == [('postcode', 'MD-1111')]
+
+
+@pytest.mark.parametrize("postcode", ("MD MD-1111", "MD MD1111", "MD MD 1111"))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_moldova_fail(sanitize, postcode):
+ assert sanitize(country='md', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('VLT 1117', 'GDJ 1234', 'BZN 2222'))
+def test_postcode_malta_pass(sanitize, postcode):
+ assert sanitize(country='mt', postcode=postcode) == [('postcode', postcode)]
+
+
+@pytest.mark.parametrize("postcode", ('MTF 1111', 'MT MTF 1111', 'MTF1111', 'MT MTF1111'))
+def test_postcode_malta_mtarfa_pass(sanitize, postcode):
+ assert sanitize(country='mt', postcode=postcode) == [('postcode', 'MTF 1111')]
+
+
+@pytest.mark.parametrize("postcode", ('1111', 'MTMT 1111'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_malta_fail(sanitize, postcode):
+ assert sanitize(country='mt', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('VC1111', '1111', 'VC-1111', 'VC 1111'))
+def test_postcode_saint_vincent_pass(sanitize, postcode):
+ assert sanitize(country='vc', postcode=postcode) == [('postcode', 'VC1111')]
+
+
+@pytest.mark.parametrize("postcode", ('VC11', 'VC VC1111'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_saint_vincent_fail(sanitize, postcode):
+ assert sanitize(country='vc', postcode=postcode) == []
+
+
+@pytest.mark.parametrize("postcode", ('VG1111', '1111', 'VG 1111', 'VG-1111'))
+def test_postcode_virgin_islands_pass(sanitize, postcode):
+ assert sanitize(country='vg', postcode=postcode) == [('postcode', 'VG1111')]
+
+
+@pytest.mark.parametrize("postcode", ('111', '11111', 'VG VG1111'))
+@pytest.mark.sanitizer_params(convert_to_address=False)
+def test_postcode_virgin_islands_fail(sanitize, postcode):
+ assert sanitize(country='vg', postcode=postcode) == []
+
+
@pytest.mark.parametrize("postcode", ('AB1', '123-456-7890', '1 as 44'))
@pytest.mark.sanitizer_params(default_pattern='[A-Z0-9- ]{3,12}')
def test_postcode_default_pattern_pass(sanitize, postcode):
@pytest.mark.sanitizer_params(convert_to_address=False, default_pattern='[A-Z0-9- ]{3,12}')
def test_postcode_default_pattern_fail(sanitize, postcode):
assert sanitize(country='an', postcode=postcode) == []
-
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for sanitizer that clean up TIGER tags.
+"""
+import pytest
+
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer
+from nominatim_db.data.place_info import PlaceInfo
+
+class TestCleanTigerTags:
+
+ @pytest.fixture(autouse=True)
+ def setup_country(self, def_config):
+ self.config = def_config
+
+
+ def run_sanitizer_on(self, addr):
+ place = PlaceInfo({'address': addr})
+ _, outaddr = PlaceSanitizer([{'step': 'clean-tiger-tags'}], self.config).process_names(place)
+
+ return sorted([(p.name, p.kind, p.suffix) for p in outaddr])
+
+ @pytest.mark.parametrize('inname,outname', [('Hamilton, AL', 'Hamilton'),
+ ('Little, Borough, CA', 'Little, Borough')])
+ def test_well_formatted(self, inname, outname):
+ assert self.run_sanitizer_on({'tiger:county': inname})\
+ == [(outname, 'county', 'tiger')]
+
+
+ @pytest.mark.parametrize('name', ('Hamilton', 'Big, Road', ''))
+ def test_badly_formatted(self, name):
+ assert self.run_sanitizer_on({'tiger:county': name})\
+ == [(name, 'county', 'tiger')]
+
+
+ def test_unmatched(self):
+ assert self.run_sanitizer_on({'tiger:country': 'US'})\
+ == [('US', 'tiger', 'country')]
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later\r
+#\r
+# This file is part of Nominatim. (https://nominatim.org)\r
+#\r
+# Copyright (C) 2024 by the Nominatim developer community.\r
+# For a full list of authors see the git log.\r
+"""\r
+Tests for the sanitizer that normalizes housenumbers.\r
+"""\r
+import pytest\r
+\r
+from nominatim_db.data.place_info import PlaceInfo\r
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer\r
+\r
+\r
+class TestWithDefault:\r
+\r
+ @pytest.fixture(autouse=True)\r
+ def setup_country(self, def_config):\r
+ self.config = def_config\r
+\r
+ def run_sanitizer_on(self, type, **kwargs):\r
+\r
+ place = PlaceInfo({type: {k.replace('_', ':'): v for k, v in kwargs.items()},\r
+ 'country_code': 'de', 'rank_address': 30})\r
+\r
+ sanitizer_args = {'step': 'delete-tags'}\r
+\r
+ name, address = PlaceSanitizer([sanitizer_args],\r
+ self.config).process_names(place)\r
+\r
+ return {\r
+ 'name': sorted([(p.name, p.kind, p.suffix or '') for p in name]),\r
+ 'address': sorted([(p.name, p.kind, p.suffix or '') for p in address])\r
+ }\r
+\r
+\r
+ def test_on_name(self):\r
+ res = self.run_sanitizer_on('name', name='foo', ref='bar', ref_abc='baz')\r
+\r
+ assert res.get('name') == []\r
+\r
+ def test_on_address(self):\r
+ res = self.run_sanitizer_on('address', name='foo', ref='bar', ref_abc='baz')\r
+\r
+ assert res.get('address') == [('bar', 'ref', ''), ('baz', 'ref', 'abc'),\r
+ ('foo', 'name', '')]\r
+\r
+\r
+class TestTypeField:\r
+\r
+ @pytest.fixture(autouse=True)\r
+ def setup_country(self, def_config):\r
+ self.config = def_config\r
+\r
+ def run_sanitizer_on(self, type, **kwargs):\r
+\r
+ place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()},\r
+ 'country_code': 'de', 'rank_address': 30})\r
+\r
+ sanitizer_args = {\r
+ 'step': 'delete-tags',\r
+ 'type': type,\r
+ }\r
+\r
+ name, _ = PlaceSanitizer([sanitizer_args],\r
+ self.config).process_names(place)\r
+\r
+ return sorted([(p.name, p.kind, p.suffix or '') for p in name])\r
+\r
+ def test_name_type(self):\r
+ res = self.run_sanitizer_on('name', name='foo', ref='bar', ref_abc='baz')\r
+\r
+ assert res == []\r
+\r
+ def test_address_type(self):\r
+ res = self.run_sanitizer_on('address', name='foo', ref='bar', ref_abc='baz')\r
+\r
+ assert res == [('bar', 'ref', ''), ('baz', 'ref', 'abc'),\r
+ ('foo', 'name', '')]\r
+\r
+class TestFilterKind:\r
+\r
+ @pytest.fixture(autouse=True)\r
+ def setup_country(self, def_config):\r
+ self.config = def_config\r
+\r
+ def run_sanitizer_on(self, filt, **kwargs):\r
+\r
+ place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()},\r
+ 'country_code': 'de', 'rank_address': 30})\r
+\r
+ sanitizer_args = {\r
+ 'step': 'delete-tags',\r
+ 'filter-kind': filt,\r
+ }\r
+\r
+ name, _ = PlaceSanitizer([sanitizer_args],\r
+ self.config).process_names(place)\r
+\r
+ return sorted([(p.name, p.kind, p.suffix or '') for p in name])\r
+\r
+ def test_single_exact_name(self):\r
+ res = self.run_sanitizer_on(['name'], ref='foo', name='foo',\r
+ name_abc='bar', ref_abc='bar')\r
+\r
+ assert res == [('bar', 'ref', 'abc'), ('foo', 'ref', '')]\r
+\r
+\r
+ def test_single_pattern(self):\r
+ res = self.run_sanitizer_on(['.*name'],\r
+ name_fr='foo', ref_fr='foo', namexx_fr='bar',\r
+ shortname_fr='bar', name='bar')\r
+\r
+ assert res == [('bar', 'namexx', 'fr'), ('foo', 'ref', 'fr')]\r
+\r
+\r
+ def test_multiple_patterns(self):\r
+ res = self.run_sanitizer_on(['.*name', 'ref'],\r
+ name_fr='foo', ref_fr='foo', oldref_fr='foo',\r
+ namexx_fr='bar', shortname_fr='baz', name='baz')\r
+\r
+ assert res == [('bar', 'namexx', 'fr'), ('foo', 'oldref', 'fr')]\r
+\r
+\r
+class TestRankAddress:\r
+\r
+ @pytest.fixture(autouse=True)\r
+ def setup_country(self, def_config):\r
+ self.config = def_config\r
+\r
+ def run_sanitizer_on(self, rank_addr, **kwargs):\r
+\r
+ place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()},\r
+ 'country_code': 'de', 'rank_address': 30})\r
+\r
+ sanitizer_args = {\r
+ 'step': 'delete-tags',\r
+ 'rank_address': rank_addr\r
+ }\r
+\r
+ name, _ = PlaceSanitizer([sanitizer_args],\r
+ self.config).process_names(place)\r
+\r
+ return sorted([(p.name, p.kind, p.suffix or '') for p in name])\r
+\r
+\r
+ def test_single_rank(self):\r
+ res = self.run_sanitizer_on('30', name='foo', ref='bar')\r
+\r
+ assert res == []\r
+\r
+ def test_single_rank_fail(self):\r
+ res = self.run_sanitizer_on('28', name='foo', ref='bar')\r
+\r
+ assert res == [('bar', 'ref', ''), ('foo', 'name', '')]\r
+\r
+ def test_ranged_rank_pass(self):\r
+ res = self.run_sanitizer_on('26-30', name='foo', ref='bar')\r
+\r
+ assert res == []\r
+\r
+ def test_ranged_rank_fail(self):\r
+ res = self.run_sanitizer_on('26-29', name='foo', ref='bar')\r
+\r
+ assert res == [('bar', 'ref', ''), ('foo', 'name', '')]\r
+\r
+ def test_mixed_rank_pass(self):\r
+ res = self.run_sanitizer_on(['4', '20-28', '30', '10-12'], name='foo', ref='bar')\r
+\r
+ assert res == []\r
+\r
+ def test_mixed_rank_fail(self):\r
+ res = self.run_sanitizer_on(['4-8', '10', '26-29', '18'], name='foo', ref='bar')\r
+\r
+ assert res == [('bar', 'ref', ''), ('foo', 'name', '')]\r
+\r
+\r
+class TestSuffix:\r
+\r
+ @pytest.fixture(autouse=True)\r
+ def setup_country(self, def_config):\r
+ self.config = def_config\r
+\r
+ def run_sanitizer_on(self, suffix, **kwargs):\r
+\r
+ place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()},\r
+ 'country_code': 'de', 'rank_address': 30})\r
+\r
+ sanitizer_args = {\r
+ 'step': 'delete-tags',\r
+ 'suffix': suffix,\r
+ }\r
+\r
+ name, _ = PlaceSanitizer([sanitizer_args],\r
+ self.config).process_names(place)\r
+\r
+ return sorted([(p.name, p.kind, p.suffix or '') for p in name])\r
+\r
+\r
+ def test_single_suffix(self):\r
+ res = self.run_sanitizer_on('abc', name='foo', name_abc='foo',\r
+ name_pqr='bar', ref='bar', ref_abc='baz')\r
+\r
+ assert res == [('bar', 'name', 'pqr'), ('bar', 'ref', ''), ('foo', 'name', '')]\r
+\r
+ def test_multiple_suffix(self):\r
+ res = self.run_sanitizer_on(['abc.*', 'pqr'], name='foo', name_abcxx='foo',\r
+ ref_pqr='bar', name_pqrxx='baz')\r
+\r
+ assert res == [('baz', 'name', 'pqrxx'), ('foo', 'name', '')]\r
+\r
+\r
+\r
+class TestCountryCodes:\r
+\r
+ @pytest.fixture(autouse=True)\r
+ def setup_country(self, def_config):\r
+ self.config = def_config\r
+\r
+ def run_sanitizer_on(self, country_code, **kwargs):\r
+\r
+ place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()},\r
+ 'country_code': 'de', 'rank_address': 30})\r
+\r
+ sanitizer_args = {\r
+ 'step': 'delete-tags',\r
+ 'country_code': country_code,\r
+ }\r
+\r
+ name, _ = PlaceSanitizer([sanitizer_args],\r
+ self.config).process_names(place)\r
+\r
+ return sorted([(p.name, p.kind) for p in name])\r
+\r
+\r
+ def test_single_country_code_pass(self):\r
+ res = self.run_sanitizer_on('de', name='foo', ref='bar')\r
+\r
+ assert res == []\r
+\r
+ def test_single_country_code_fail(self):\r
+ res = self.run_sanitizer_on('in', name='foo', ref='bar')\r
+\r
+ assert res == [('bar', 'ref'), ('foo', 'name')]\r
+\r
+ def test_empty_country_code_list(self):\r
+ res = self.run_sanitizer_on([], name='foo', ref='bar')\r
+\r
+ assert res == [('bar', 'ref'), ('foo', 'name')]\r
+\r
+ def test_multiple_country_code_pass(self):\r
+ res = self.run_sanitizer_on(['in', 'de', 'fr'], name='foo', ref='bar')\r
+\r
+ assert res == []\r
+\r
+ def test_multiple_country_code_fail(self):\r
+ res = self.run_sanitizer_on(['in', 'au', 'fr'], name='foo', ref='bar')\r
+\r
+ assert res == [('bar', 'ref'), ('foo', 'name')]\r
+\r
+class TestAllParameters:\r
+\r
+ @pytest.fixture(autouse=True)\r
+ def setup_country(self, def_config):\r
+ self.config = def_config\r
+\r
+ def run_sanitizer_on(self, country_code, rank_addr, suffix, **kwargs):\r
+\r
+ place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()},\r
+ 'country_code': 'de', 'rank_address': 30})\r
+\r
+ sanitizer_args = {\r
+ 'step': 'delete-tags',\r
+ 'type': 'name',\r
+ 'filter-kind': ['name', 'ref'],\r
+ 'country_code': country_code,\r
+ 'rank_address': rank_addr,\r
+ 'suffix': suffix,\r
+ 'name': r'[\s\S]*',\r
+ }\r
+\r
+ name, _ = PlaceSanitizer([sanitizer_args],\r
+ self.config).process_names(place)\r
+\r
+ return sorted([(p.name, p.kind, p.suffix or '') for p in name])\r
+\r
+\r
+ def test_string_arguments_pass(self):\r
+ res = self.run_sanitizer_on('de', '25-30', r'[\s\S]*',\r
+ name='foo', ref='foo', name_abc='bar', ref_abc='baz')\r
+\r
+ assert res == []\r
+\r
+ def test_string_arguments_fail(self):\r
+ res = self.run_sanitizer_on('in', '25-30', r'[\s\S]*',\r
+ name='foo', ref='foo', name_abc='bar', ref_abc='baz')\r
+\r
+ assert res == [('bar', 'name', 'abc'), ('baz', 'ref', 'abc'),\r
+ ('foo', 'name', ''), ('foo', 'ref', '')]\r
+\r
+ def test_list_arguments_pass(self):\r
+ res = self.run_sanitizer_on(['de', 'in'], ['20-28', '30'], [r'abc.*', r'[\s\S]*'],\r
+ name='foo', ref='foo', name_abcxx='bar', ref_pqr='baz')\r
+\r
+ assert res == []\r
+\r
+ def test_list_arguments_fail(self):\r
+ res = self.run_sanitizer_on(['de', 'in'], ['14', '20-29'], [r'abc.*', r'pqr'],\r
+ name='foo', ref_abc='foo', name_abcxx='bar', ref_pqr='baz')\r
+\r
+ assert res == [('bar', 'name', 'abcxx'), ('baz', 'ref', 'pqr'),\r
+ ('foo', 'name', ''), ('foo', 'ref', 'abc')]\r
+\r
+ def test_mix_arguments_pass(self):\r
+ res = self.run_sanitizer_on('de', ['10', '20-28', '30'], r'[\s\S]*',\r
+ name_abc='foo', ref_abc='foo', name_abcxx='bar', ref_pqr='baz')\r
+\r
+ assert res == []\r
+\r
+ def test_mix_arguments_fail(self):\r
+ res = self.run_sanitizer_on(['de', 'in'], ['10', '20-28', '30'], r'abc.*',\r
+ name='foo', ref='foo', name_pqr='bar', ref_pqr='baz')\r
+\r
+ assert res == [('bar', 'name', 'pqr'), ('baz', 'ref', 'pqr'),\r
+ ('foo', 'name', ''), ('foo', 'ref', '')]\r
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for sanitizer configuration helper functions.
"""
import pytest
-from nominatim.errors import UsageError
-from nominatim.tokenizer.place_sanitizer import PlaceName
-from nominatim.tokenizer.sanitizers.config import SanitizerConfig
+from nominatim_db.errors import UsageError
+from nominatim_db.tokenizer.sanitizers.config import SanitizerConfig
def test_string_list_default_empty():
assert SanitizerConfig().get_string_list('op') == []
-def test_string_list_default_none():
- assert SanitizerConfig().get_string_list('op', default=None) is None
-
-
def test_string_list_default_something():
assert SanitizerConfig().get_string_list('op', default=['a', 'b']) == ['a', 'b']
regex = SanitizerConfig({'delimiters': ''}).get_delimiter()
-@pytest.mark.parametrize('inp', ('name', 'name:de', 'na\\me', '.*'))
-def test_create_kind_filter_no_params(inp):
- filt = SanitizerConfig().get_filter_kind()
+@pytest.mark.parametrize('inp', ('name', 'name:de', 'na\\me', '.*', ''))
+def test_create_name_filter_no_param_no_default(inp):
+ filt = SanitizerConfig({'filter-kind': 'place'}).get_filter('name')
+
+ assert filt(inp)
+
+
+@pytest.mark.parametrize('inp', ('name', 'name:de', 'na\\me', '.*', ''))
+def test_create_name_filter_no_param_default_pass_all(inp):
+ filt = SanitizerConfig().get_filter('name', 'PASS_ALL')
assert filt(inp)
+@pytest.mark.parametrize('inp', ('name', 'name:de', 'na\\me', '.*', ''))
+def test_create_name_filter_no_param_default_fail_all(inp):
+ filt = SanitizerConfig().get_filter('name', 'FAIL_ALL')
+
+ assert not filt(inp)
+
+
+def test_create_name_filter_no_param_default_invalid_string():
+ with pytest.raises(ValueError):
+ filt = SanitizerConfig().get_filter('name', 'abc')
+
+
+def test_create_name_filter_no_param_default_empty_list():
+ with pytest.raises(ValueError):
+ filt = SanitizerConfig().get_filter('name', [])
+
+
@pytest.mark.parametrize('kind', ('de', 'name:de', 'ende'))
+def test_create_kind_filter_default_positive(kind):
+ filt = SanitizerConfig().get_filter('filter-kind', ['.*de'])
+
+ assert filt(kind)
+
+
+@pytest.mark.parametrize('kind', ('de', 'name:de', 'ende'))
+def test_create_kind_filter_default_negetive(kind):
+ filt = SanitizerConfig().get_filter('filter-kind', ['.*fr'])
+
+ assert not filt(kind)
+
+
+@pytest.mark.parametrize('kind', ('lang', 'lang:de', 'langxx'))
def test_create_kind_filter_custom_regex_positive(kind):
- filt = SanitizerConfig({'filter-kind': '.*de'}).get_filter_kind()
+ filt = SanitizerConfig({'filter-kind': 'lang.*'}
+ ).get_filter('filter-kind', ['.*fr'])
assert filt(kind)
@pytest.mark.parametrize('kind', ('de ', '123', '', 'bedece'))
def test_create_kind_filter_custom_regex_negative(kind):
- filt = SanitizerConfig({'filter-kind': '.*de'}).get_filter_kind()
+ filt = SanitizerConfig({'filter-kind': '.*de'}).get_filter('filter-kind')
assert not filt(kind)
@pytest.mark.parametrize('kind', ('name', 'fr', 'name:fr', 'frfr', '34'))
def test_create_kind_filter_many_positive(kind):
- filt = SanitizerConfig({'filter-kind': ['.*fr', 'name', r'\d+']}).get_filter_kind()
+ filt = SanitizerConfig({'filter-kind': ['.*fr', 'name', r'\d+']}
+ ).get_filter('filter-kind')
assert filt(kind)
@pytest.mark.parametrize('kind', ('name:de', 'fridge', 'a34', '.*', '\\'))
def test_create_kind_filter_many_negative(kind):
- filt = SanitizerConfig({'filter-kind': ['.*fr', 'name', r'\d+']}).get_filter_kind()
+ filt = SanitizerConfig({'filter-kind': ['.*fr', 'name', r'\d+']}
+ ).get_filter('filter-kind')
assert not filt(kind)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for the sanitizer that splits multivalue lists.
"""
import pytest
-from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
-from nominatim.data.place_info import PlaceInfo
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer
+from nominatim_db.data.place_info import PlaceInfo
-from nominatim.errors import UsageError
+from nominatim_db.errors import UsageError
class TestSplitName:
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for the sanitizer that handles braced suffixes.
"""
import pytest
-from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
-from nominatim.data.place_info import PlaceInfo
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer
+from nominatim_db.data.place_info import PlaceInfo
class TestStripBrace:
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for the sanitizer that enables language-dependent analyzers.
"""
import pytest
-from nominatim.data.place_info import PlaceInfo
-from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
-from nominatim.data.country_info import setup_country_config
+from nominatim_db.data.place_info import PlaceInfo
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer
+from nominatim_db.data.country_info import setup_country_config
class TestWithDefaults:
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+from typing import Mapping, Optional, List
+import pytest
+
+from nominatim_db.data.place_info import PlaceInfo
+from nominatim_db.data.place_name import PlaceName
+from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer
+
+class TestTagJapanese:
+ @pytest.fixture(autouse=True)
+ def setup_country(self, def_config):
+ self.config = def_config
+
+ def run_sanitizer_on(self,type, **kwargs):
+ place = PlaceInfo({
+ 'address': kwargs,
+ 'country_code': 'jp'
+ })
+ sanitizer_args = {'step': 'tag-japanese'}
+ _, address = PlaceSanitizer([sanitizer_args], self.config).process_names(place)
+ tmp_list = [(p.name,p.kind) for p in address]
+ return sorted(tmp_list)
+
+ def test_on_address(self):
+ res = self.run_sanitizer_on('address', name='foo', ref='bar', ref_abc='baz')
+ assert res == [('bar','ref'),('baz','ref_abc'),('foo','name')]
+
+ def test_housenumber(self):
+ res = self.run_sanitizer_on('address', housenumber='2')
+ assert res == [('2','housenumber')]
+
+ def test_blocknumber(self):
+ res = self.run_sanitizer_on('address', block_number='6')
+ assert res == [('6','housenumber')]
+
+ def test_neighbourhood(self):
+ res = self.run_sanitizer_on('address', neighbourhood='8')
+ assert res == [('8','place')]
+
+ def test_quarter(self):
+ res = self.run_sanitizer_on('address', quarter='kase')
+ assert res==[('kase','place')]
+
+ def test_housenumber_blocknumber(self):
+ res = self.run_sanitizer_on('address', housenumber='2', block_number='6')
+ assert res == [('6-2','housenumber')]
+
+ def test_quarter_neighbourhood(self):
+ res = self.run_sanitizer_on('address', quarter='kase', neighbourhood='8')
+ assert res == [('kase8','place')]
+
+ def test_blocknumber_housenumber_quarter(self):
+ res = self.run_sanitizer_on('address', block_number='6', housenumber='2', quarter='kase')
+ assert res == [('6-2','housenumber'),('kase','place')]
+
+ def test_blocknumber_housenumber_quarter_neighbourhood(self):
+ res = self.run_sanitizer_on('address', block_number='6', housenumber='2', neighbourhood='8')
+ assert res == [('6-2','housenumber'),('8','place')]
+
+ def test_blocknumber_quarter_neighbourhood(self):
+ res = self.run_sanitizer_on('address',block_number='6', quarter='kase', neighbourhood='8')
+ assert res == [('6','housenumber'),('kase8','place')]
+
+ def test_blocknumber_quarter(self):
+ res = self.run_sanitizer_on('address',block_number='6', quarter='kase')
+ assert res == [('6','housenumber'),('kase','place')]
+
+ def test_blocknumber_neighbourhood(self):
+ res = self.run_sanitizer_on('address',block_number='6', neighbourhood='8')
+ assert res == [('6','housenumber'),('8','place')]
+
+ def test_housenumber_quarter_neighbourhood(self):
+ res = self.run_sanitizer_on('address',housenumber='2', quarter='kase', neighbourhood='8')
+ assert res == [('2','housenumber'),('kase8','place')]
+
+ def test_housenumber_quarter(self):
+ res = self.run_sanitizer_on('address',housenumber='2', quarter='kase')
+ assert res == [('2','housenumber'),('kase','place')]
+
+ def test_housenumber_blocknumber_neighbourhood_quarter(self):
+ res = self.run_sanitizer_on('address', block_number='6', housenumber='2', quarter='kase', neighbourhood='8')
+ assert res == [('6-2','housenumber'),('kase8','place')]
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for creating new tokenizers.
"""
import pytest
-from nominatim.db import properties
-from nominatim.tokenizer import factory
-from nominatim.errors import UsageError
+from nominatim_db.db import properties
+from nominatim_db.tokenizer import factory
+from nominatim_db.errors import UsageError
from dummy_tokenizer import DummyTokenizer
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for ICU tokenizer.
"""
-import shutil
import yaml
import itertools
import pytest
-from nominatim.tokenizer import icu_tokenizer
-import nominatim.tokenizer.icu_rule_loader
-from nominatim.db import properties
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.data.place_info import PlaceInfo
+from nominatim_db.tokenizer import icu_tokenizer
+import nominatim_db.tokenizer.icu_rule_loader
+from nominatim_db.db import properties
+from nominatim_db.db.sql_preprocessor import SQLPreprocessor
+from nominatim_db.data.place_info import PlaceInfo
from mock_icu_word_table import MockIcuWordTable
sqldir.mkdir()
(sqldir / 'tokenizer').mkdir()
(sqldir / 'tokenizer' / 'icu_tokenizer.sql').write_text("SELECT 'a'")
- shutil.copy(str(project_env.lib_dir.sql / 'tokenizer' / 'icu_tokenizer_tables.sql'),
- str(sqldir / 'tokenizer' / 'icu_tokenizer_tables.sql'))
project_env.lib_dir.sql = sqldir
cfgstr['token-analysis'].append({'id': '@postcode',
'analyzer': 'postcodes'})
(test_config.project_dir / 'icu_tokenizer.yaml').write_text(yaml.dump(cfgstr))
- tok.loader = nominatim.tokenizer.icu_rule_loader.ICURuleLoader(test_config)
+ tok.loader = nominatim_db.tokenizer.icu_rule_loader.ICURuleLoader(test_config)
return tok.name_analyzer()
tok = tokenizer_factory()
tok.init_new_db(test_config)
- assert db_prop(nominatim.tokenizer.icu_rule_loader.DBCFG_IMPORT_NORM_RULES) \
+ assert db_prop(nominatim_db.tokenizer.icu_rule_loader.DBCFG_IMPORT_NORM_RULES) \
.startswith(':: lower ();')
assert test_content == set((('1133', ), ))
-def test_finalize_import(tokenizer_factory, temp_db_conn,
- temp_db_cursor, test_config, sql_preprocessor_cfg):
- func_file = test_config.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer_indices.sql'
- func_file.write_text("""CREATE FUNCTION test() RETURNS TEXT
- AS $$ SELECT 'b'::text $$ LANGUAGE SQL""")
-
+def test_finalize_import(tokenizer_factory, temp_db_cursor,
+ test_config, sql_preprocessor_cfg):
tok = tokenizer_factory()
tok.init_new_db(test_config)
+ assert not temp_db_cursor.index_exists('word', 'idx_word_word_id')
+
tok.finalize_import(test_config)
- temp_db_cursor.scalar('SELECT test()') == 'b'
+ assert temp_db_cursor.index_exists('word', 'idx_word_word_id')
def test_check_database(test_config, tokenizer_factory,
assert tok.check_database(test_config) is None
-def test_update_statistics_reverse_only(word_table, tokenizer_factory):
+def test_update_statistics_reverse_only(word_table, tokenizer_factory, test_config):
tok = tokenizer_factory()
- tok.update_statistics()
+ tok.update_statistics(test_config)
-def test_update_statistics(word_table, table_factory, temp_db_cursor, tokenizer_factory):
+def test_update_statistics(word_table, table_factory, temp_db_cursor,
+ tokenizer_factory, test_config):
word_table.add_full_word(1000, 'hello')
+ word_table.add_full_word(1001, 'bye')
table_factory('search_name',
- 'place_id BIGINT, name_vector INT[]',
- [(12, [1000])])
+ 'place_id BIGINT, name_vector INT[], nameaddress_vector INT[]',
+ [(12, [1000], [1001])])
tok = tokenizer_factory()
- tok.update_statistics()
+ tok.update_statistics(test_config)
assert temp_db_cursor.scalar("""SELECT count(*) FROM word
- WHERE type = 'W' and
- (info->>'count')::int > 0""") > 0
+ WHERE type = 'W' and word_id = 1000 and
+ (info->>'count')::int > 0""") == 1
+ assert temp_db_cursor.scalar("""SELECT count(*) FROM word
+ WHERE type = 'W' and word_id = 1001 and
+ (info->>'addr_count')::int > 0""") == 1
def test_normalize_postcode(analyzer):
def test_process_place_nonexisting_street(self):
info = self.process_address(street='Grand Road')
- assert 'street' not in info
+ assert info['street'] == '{}'
def test_process_place_multiple_street_tags(self):
def test_process_place_street_empty(self):
info = self.process_address(street='🜵')
- assert 'street' not in info
+ assert info['street'] == '{}'
def test_process_place_street_from_cache(self):
def test_process_place_place(self):
info = self.process_address(place='Honu Lulu')
- assert eval(info['place']) == self.name_token_set('HONU', 'LULU')
+ assert eval(info['place']) == self.name_token_set('HONU', 'LULU', '#HONU LULU')
def test_process_place_place_extra(self):
suburb='Zwickau', street='Hauptstr',
full='right behind the church')
- city = self.name_token_set('ZWICKAU')
- state = self.name_token_set('SACHSEN')
+ city = self.name_token_set('ZWICKAU', '#ZWICKAU')
+ state = self.name_token_set('SACHSEN', '#SACHSEN')
result = {k: eval(v) for k,v in info['addr'].items()}
result = {k: eval(v) for k,v in info['addr'].items()}
- assert result == {'city': self.name_token_set('Bruxelles')}
+ assert result == {'city': self.name_token_set('Bruxelles', '#Bruxelles')}
def test_process_place_address_terms_empty(self):
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for converting a config file to ICU rules.
import pytest
import yaml
-from nominatim.tokenizer.icu_rule_loader import ICURuleLoader
-from nominatim.errors import UsageError
+from nominatim_db.tokenizer.icu_rule_loader import ICURuleLoader
+from nominatim_db.errors import UsageError
from icu import Transliterator
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Test for legacy tokenizer.
-"""
-import shutil
-import re
-
-import pytest
-
-from nominatim.data.place_info import PlaceInfo
-from nominatim.tokenizer import legacy_tokenizer
-from nominatim.db import properties
-from nominatim.errors import UsageError
-
-from mock_legacy_word_table import MockLegacyWordTable
-
-# Force use of legacy word table
-@pytest.fixture
-def word_table(temp_db_conn):
- return MockLegacyWordTable(temp_db_conn)
-
-
-@pytest.fixture
-def test_config(project_env, tmp_path):
- module_dir = tmp_path / 'module_src'
- module_dir.mkdir()
- (module_dir / 'nominatim.so').write_text('TEST nominatim.so')
-
- project_env.lib_dir.module = module_dir
-
- sqldir = tmp_path / 'sql'
- sqldir.mkdir()
- (sqldir / 'tokenizer').mkdir()
-
- # Get the original SQL but replace make_standard_name to avoid module use.
- init_sql = (project_env.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer.sql').read_text()
- for fn in ('transliteration', 'gettokenstring'):
- init_sql = re.sub(f'CREATE OR REPLACE FUNCTION {fn}[^;]*;',
- '', init_sql, re.DOTALL)
- init_sql += """
- CREATE OR REPLACE FUNCTION make_standard_name(name TEXT)
- RETURNS TEXT AS $$ SELECT lower(name); $$ LANGUAGE SQL;
-
- """
- # Also load util functions. Some are needed by the tokenizer.
- init_sql += (project_env.lib_dir.sql / 'functions' / 'utils.sql').read_text()
- (sqldir / 'tokenizer' / 'legacy_tokenizer.sql').write_text(init_sql)
-
- (sqldir / 'words.sql').write_text("SELECT 'a'")
-
- shutil.copy(str(project_env.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer_tables.sql'),
- str(sqldir / 'tokenizer' / 'legacy_tokenizer_tables.sql'))
-
- project_env.lib_dir.sql = sqldir
- project_env.lib_dir.data = sqldir
-
- return project_env
-
-
-@pytest.fixture
-def tokenizer_factory(dsn, tmp_path, property_table):
- (tmp_path / 'tokenizer').mkdir()
-
- def _maker():
- return legacy_tokenizer.create(dsn, tmp_path / 'tokenizer')
-
- return _maker
-
-
-@pytest.fixture
-def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
-
-
-@pytest.fixture
-def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor,
- word_table, temp_db_with_extensions, tmp_path):
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
- monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
- monkeypatch.undo()
-
- with tok.name_analyzer() as analyzer:
- yield analyzer
-
-
-@pytest.fixture
-def make_standard_name(temp_db_cursor):
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION make_standard_name(name TEXT)
- RETURNS TEXT AS $$ SELECT '#' || lower(name) || '#'; $$ LANGUAGE SQL""")
-
-
-@pytest.fixture
-def create_postcode_id(temp_db_cursor):
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_postcode_id(postcode TEXT)
- RETURNS BOOLEAN AS $$
- INSERT INTO word (word_token, word, class, type)
- VALUES (' ' || postcode, postcode, 'place', 'postcode')
- RETURNING True;
- $$ LANGUAGE SQL""")
-
-
-def test_init_new(tokenizer_factory, test_config, monkeypatch,
- temp_db_conn, sql_preprocessor):
- monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', 'xxvv')
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
-
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
-
- assert properties.get_property(temp_db_conn, legacy_tokenizer.DBCFG_NORMALIZATION) == 'xxvv'
-
- outfile = test_config.project_dir / 'module' / 'nominatim.so'
-
- assert outfile.exists()
- assert outfile.read_text() == 'TEST nominatim.so'
- assert outfile.stat().st_mode == 33261
-
-
-def test_init_module_load_failed(tokenizer_factory, test_config):
- tok = tokenizer_factory()
-
- with pytest.raises(UsageError):
- tok.init_new_db(test_config)
-
-
-def test_init_module_custom(tokenizer_factory, test_config,
- monkeypatch, tmp_path, sql_preprocessor):
- module_dir = (tmp_path / 'custom').resolve()
- module_dir.mkdir()
- (module_dir/ 'nominatim.so').write_text('CUSTOM nomiantim.so')
-
- monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', str(module_dir))
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
-
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
-
- assert not (test_config.project_dir / 'module').exists()
-
-
-def test_init_from_project(tokenizer_setup, tokenizer_factory, test_config):
- tok = tokenizer_factory()
-
- tok.init_from_project(test_config)
-
- assert tok.normalization is not None
-
-
-def test_update_sql_functions(sql_preprocessor, temp_db_conn,
- tokenizer_factory, test_config, table_factory,
- monkeypatch, temp_db_cursor):
- monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
- monkeypatch.undo()
-
- assert properties.get_property(temp_db_conn, legacy_tokenizer.DBCFG_MAXWORDFREQ) == '1133'
-
- table_factory('test', 'txt TEXT')
-
- func_file = test_config.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer.sql'
- func_file.write_text("""INSERT INTO test VALUES ('{{max_word_freq}}'),
- ('{{modulepath}}')""")
-
- tok.update_sql_functions(test_config)
-
- test_content = temp_db_cursor.row_set('SELECT * FROM test')
- assert test_content == set((('1133', ), (str(test_config.project_dir / 'module'), )))
-
-
-def test_finalize_import(tokenizer_factory, temp_db_conn,
- temp_db_cursor, test_config, monkeypatch,
- sql_preprocessor_cfg):
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
-
- func_file = test_config.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer_indices.sql'
- func_file.write_text("""CREATE FUNCTION test() RETURNS TEXT
- AS $$ SELECT 'b'::text $$ LANGUAGE SQL""")
-
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
-
- tok.finalize_import(test_config)
-
- temp_db_cursor.scalar('SELECT test()') == 'b'
-
-
-def test_migrate_database(tokenizer_factory, test_config, temp_db_conn, monkeypatch):
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
- tok = tokenizer_factory()
- tok.migrate_database(test_config)
-
- assert properties.get_property(temp_db_conn, legacy_tokenizer.DBCFG_MAXWORDFREQ) is not None
- assert properties.get_property(temp_db_conn, legacy_tokenizer.DBCFG_NORMALIZATION) is not None
-
- outfile = test_config.project_dir / 'module' / 'nominatim.so'
-
- assert outfile.exists()
- assert outfile.read_text() == 'TEST nominatim.so'
- assert outfile.stat().st_mode == 33261
-
-
-def test_check_database(test_config, tokenizer_factory, monkeypatch,
- temp_db_cursor, sql_preprocessor_cfg):
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
-
- assert tok.check_database(False) is None
-
-
-def test_check_database_no_tokenizer(test_config, tokenizer_factory):
- tok = tokenizer_factory()
-
- assert tok.check_database(False) is not None
-
-
-def test_check_database_bad_setup(test_config, tokenizer_factory, monkeypatch,
- temp_db_cursor, sql_preprocessor_cfg):
- monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
- tok = tokenizer_factory()
- tok.init_new_db(test_config)
-
- # Inject a bad transliteration.
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION make_standard_name(name TEXT)
- RETURNS TEXT AS $$ SELECT 'garbage'::text; $$ LANGUAGE SQL""")
-
- assert tok.check_database(False) is not None
-
-
-def test_update_statistics_reverse_only(word_table, tokenizer_factory):
- tok = tokenizer_factory()
- tok.update_statistics()
-
-
-def test_update_statistics(word_table, table_factory, temp_db_cursor, tokenizer_factory):
- word_table.add_full_word(1000, 'hello')
- table_factory('search_name',
- 'place_id BIGINT, name_vector INT[]',
- [(12, [1000])])
- tok = tokenizer_factory()
-
- tok.update_statistics()
-
- assert temp_db_cursor.scalar("""SELECT count(*) FROM word
- WHERE word_token like ' %' and
- search_name_count > 0""") > 0
-
-
-def test_update_word_tokens(tokenizer_factory):
- tok = tokenizer_factory()
-
- # This is a noop and should just pass.
- tok.update_word_tokens()
-
-
-def test_normalize(analyzer):
- assert analyzer.normalize('TEsT') == 'test'
-
-
-def test_update_postcodes_from_db_empty(analyzer, table_factory, word_table,
- create_postcode_id):
- table_factory('location_postcode', 'postcode TEXT',
- content=(('1234',), ('12 34',), ('AB23',), ('1234',)))
-
- analyzer.update_postcodes_from_db()
-
- assert word_table.get_postcodes() == {'1234', '12 34', 'AB23'}
-
-
-def test_update_postcodes_from_db_add_and_remove(analyzer, table_factory, word_table,
- create_postcode_id):
- table_factory('location_postcode', 'postcode TEXT',
- content=(('1234',), ('45BC', ), ('XX45', )))
- word_table.add_postcode(' 1234', '1234')
- word_table.add_postcode(' 5678', '5678')
-
- analyzer.update_postcodes_from_db()
-
- assert word_table.get_postcodes() == {'1234', '45BC', 'XX45'}
-
-
-def test_update_special_phrase_empty_table(analyzer, word_table, make_standard_name):
- analyzer.update_special_phrases([
- ("König bei", "amenity", "royal", "near"),
- ("Könige", "amenity", "royal", "-"),
- ("könige", "amenity", "royal", "-"),
- ("strasse", "highway", "primary", "in")
- ], True)
-
- assert word_table.get_special() \
- == set(((' #könig bei#', 'könig bei', 'amenity', 'royal', 'near'),
- (' #könige#', 'könige', 'amenity', 'royal', None),
- (' #strasse#', 'strasse', 'highway', 'primary', 'in')))
-
-
-def test_update_special_phrase_delete_all(analyzer, word_table, make_standard_name):
- word_table.add_special(' #foo#', 'foo', 'amenity', 'prison', 'in')
- word_table.add_special(' #bar#', 'bar', 'highway', 'road', None)
-
- assert word_table.count_special() == 2
-
- analyzer.update_special_phrases([], True)
-
- assert word_table.count_special() == 0
-
-
-def test_update_special_phrases_no_replace(analyzer, word_table, make_standard_name):
- word_table.add_special(' #foo#', 'foo', 'amenity', 'prison', 'in')
- word_table.add_special(' #bar#', 'bar', 'highway', 'road', None)
-
- assert word_table.count_special() == 2
-
- analyzer.update_special_phrases([], False)
-
- assert word_table.count_special() == 2
-
-
-def test_update_special_phrase_modify(analyzer, word_table, make_standard_name):
- word_table.add_special(' #foo#', 'foo', 'amenity', 'prison', 'in')
- word_table.add_special(' #bar#', 'bar', 'highway', 'road', None)
-
- assert word_table.count_special() == 2
-
- analyzer.update_special_phrases([
- ('prison', 'amenity', 'prison', 'in'),
- ('bar', 'highway', 'road', '-'),
- ('garden', 'leisure', 'garden', 'near')
- ], True)
-
- assert word_table.get_special() \
- == set(((' #prison#', 'prison', 'amenity', 'prison', 'in'),
- (' #bar#', 'bar', 'highway', 'road', None),
- (' #garden#', 'garden', 'leisure', 'garden', 'near')))
-
-
-def test_add_country_names(analyzer, word_table, make_standard_name):
- analyzer.add_country_names('de', {'name': 'Germany',
- 'name:de': 'Deutschland',
- 'short_name': 'germany'})
-
- assert word_table.get_country() \
- == {('de', ' #germany#'),
- ('de', ' #deutschland#')}
-
-
-def test_add_more_country_names(analyzer, word_table, make_standard_name):
- word_table.add_country('fr', ' #france#')
- word_table.add_country('it', ' #italy#')
- word_table.add_country('it', ' #itala#')
-
- analyzer.add_country_names('it', {'name': 'Italy', 'ref': 'IT'})
-
- assert word_table.get_country() \
- == {('fr', ' #france#'),
- ('it', ' #italy#'),
- ('it', ' #itala#'),
- ('it', ' #it#')}
-
-
-@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
-def test_process_place_postcode(analyzer, create_postcode_id, word_table, pcode):
- analyzer.process_place(PlaceInfo({'address': {'postcode' : pcode}}))
-
- assert word_table.get_postcodes() == {pcode, }
-
-
-@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
-def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pcode):
- analyzer.process_place(PlaceInfo({'address': {'postcode' : pcode}}))
-
- assert not word_table.get_postcodes()
-
-
-class TestHousenumberName:
-
- @staticmethod
- @pytest.fixture(autouse=True)
- def setup_create_housenumbers(temp_db_cursor):
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers(
- housenumbers TEXT[],
- OUT tokens TEXT, OUT normtext TEXT)
- AS $$
- SELECT housenumbers::TEXT, array_to_string(housenumbers, ';')
- $$ LANGUAGE SQL""")
-
-
- @staticmethod
- @pytest.mark.parametrize('hnr', ['123a', '1', '101'])
- def test_process_place_housenumbers_simple(analyzer, hnr):
- info = analyzer.process_place(PlaceInfo({'address': {'housenumber' : hnr}}))
-
- assert info['hnr'] == hnr
- assert info['hnr_tokens'].startswith("{")
-
-
- @staticmethod
- def test_process_place_housenumbers_lists(analyzer):
- info = analyzer.process_place(PlaceInfo({'address': {'conscriptionnumber' : '1; 2;3'}}))
-
- assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
-
-
- @staticmethod
- def test_process_place_housenumbers_duplicates(analyzer):
- info = analyzer.process_place(PlaceInfo({'address': {'housenumber' : '134',
- 'conscriptionnumber' : '134',
- 'streetnumber' : '99a'}}))
-
- assert set(info['hnr'].split(';')) == set(('134', '99a'))
-
-
-class TestPlaceNames:
-
- @pytest.fixture(autouse=True)
- def setup(self, analyzer):
- self.analyzer = analyzer
-
-
- def expect_name_terms(self, info, *expected_terms):
- tokens = self.analyzer.get_word_token_info(list(expected_terms))
- for token in tokens:
- assert token[2] is not None, "No token for {0}".format(token)
-
- assert eval(info['names']) == set((t[2] for t in tokens)),\
- f"Expected: {tokens}\nGot: {info['names']}"
-
-
- def process_named_place(self, names):
- return self.analyzer.process_place(PlaceInfo({'name': names}))
-
-
- def test_simple_names(self):
- info = self.process_named_place({'name': 'Soft bAr', 'ref': '34'})
-
- self.expect_name_terms(info, '#Soft bAr', '#34', 'Soft', 'bAr', '34')
-
-
- @pytest.mark.parametrize('sep', [',' , ';'])
- def test_names_with_separator(self, sep):
- info = self.process_named_place({'name': sep.join(('New York', 'Big Apple'))})
-
- self.expect_name_terms(info, '#New York', '#Big Apple',
- 'new', 'york', 'big', 'apple')
-
-
- def test_full_names_with_bracket(self):
- info = self.process_named_place({'name': 'Houseboat (left)'})
-
- self.expect_name_terms(info, '#Houseboat (left)', '#Houseboat',
- 'houseboat', '(left)')
-
-
- def test_country_name(self, word_table):
- place = PlaceInfo({'name' : {'name': 'Norge'},
- 'country_code': 'no',
- 'rank_address': 4,
- 'class': 'boundary',
- 'type': 'administrative'})
-
- info = self.analyzer.process_place(place)
-
- self.expect_name_terms(info, '#norge', 'norge')
- assert word_table.get_country() == {('no', ' norge')}
-
-
-class TestPlaceAddress:
-
- @pytest.fixture(autouse=True)
- def setup(self, analyzer):
- self.analyzer = analyzer
-
-
- @pytest.fixture
- def getorcreate_hnr_id(self, temp_db_cursor):
- temp_db_cursor.execute("""CREATE SEQUENCE seq_hnr start 1;
- CREATE OR REPLACE FUNCTION getorcreate_housenumber_id(lookup_word TEXT)
- RETURNS INTEGER AS $$
- SELECT -nextval('seq_hnr')::INTEGER; $$ LANGUAGE SQL""")
-
- def process_address(self, **kwargs):
- return self.analyzer.process_place(PlaceInfo({'address': kwargs}))
-
-
- def name_token_set(self, *expected_terms):
- tokens = self.analyzer.get_word_token_info(list(expected_terms))
- for token in tokens:
- assert token[2] is not None, "No token for {0}".format(token)
-
- return set((t[2] for t in tokens))
-
-
- @pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
- def test_process_place_postcode(self, word_table, pcode):
- self.process_address(postcode=pcode)
-
- assert word_table.get_postcodes() == {pcode, }
-
-
- @pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
- def test_process_place_bad_postcode(self, word_table, pcode):
- self.process_address(postcode=pcode)
-
- assert not word_table.get_postcodes()
-
-
- @pytest.mark.parametrize('hnr', ['123a', '0', '101'])
- def test_process_place_housenumbers_simple(self, hnr, getorcreate_hnr_id):
- info = self.process_address(housenumber=hnr)
-
- assert info['hnr'] == hnr.lower()
- assert info['hnr_tokens'] == "{-1}"
-
-
- def test_process_place_housenumbers_lists(self, getorcreate_hnr_id):
- info = self.process_address(conscriptionnumber='1; 2;3')
-
- assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
- assert info['hnr_tokens'] == "{-1,-2,-3}"
-
-
- def test_process_place_housenumbers_duplicates(self, getorcreate_hnr_id):
- info = self.process_address(housenumber='134',
- conscriptionnumber='134',
- streetnumber='99A')
-
- assert set(info['hnr'].split(';')) == set(('134', '99a'))
- assert info['hnr_tokens'] == "{-1,-2}"
-
-
- def test_process_place_street(self):
- # legacy tokenizer only indexes known names
- self.analyzer.process_place(PlaceInfo({'name': {'name' : 'Grand Road'}}))
- info = self.process_address(street='Grand Road')
-
- assert eval(info['street']) == self.name_token_set('#Grand Road')
-
-
- def test_process_place_street_empty(self):
- info = self.process_address(street='🜵')
-
- assert 'street' not in info
-
-
- def test_process_place_place(self):
- self.analyzer.process_place(PlaceInfo({'name': {'name' : 'Honu Lulu'}}))
- info = self.process_address(place='Honu Lulu')
-
- assert eval(info['place_search']) == self.name_token_set('#Honu Lulu',
- 'Honu', 'Lulu')
- assert eval(info['place_match']) == self.name_token_set('#Honu Lulu')
-
-
- def test_process_place_place_empty(self):
- info = self.process_address(place='🜵')
-
- assert 'place' not in info
-
-
- def test_process_place_address_terms(self):
- for name in ('Zwickau', 'Haupstraße', 'Sachsen'):
- self.analyzer.process_place(PlaceInfo({'name': {'name' : name}}))
- info = self.process_address(country='de', city='Zwickau', state='Sachsen',
- suburb='Zwickau', street='Hauptstr',
- full='right behind the church')
-
- city = self.name_token_set('ZWICKAU')
- state = self.name_token_set('SACHSEN')
-
- print(info)
- result = {k: eval(v[0]) for k,v in info['addr'].items()}
-
- assert result == {'city': city, 'suburb': city, 'state': state}
-
-
- def test_process_place_address_terms_empty(self):
- info = self.process_address(country='de', city=' ', street='Hauptstr',
- full='right behind the church')
-
- assert 'addr' not in info
-
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for execution of the sanitztion step.
"""
import pytest
-from nominatim.errors import UsageError
-import nominatim.tokenizer.place_sanitizer as sanitizer
-from nominatim.data.place_info import PlaceInfo
+from nominatim_db.errors import UsageError
+import nominatim_db.tokenizer.place_sanitizer as sanitizer
+from nominatim_db.data.place_info import PlaceInfo
def test_placeinfo_clone_new_name():
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for special postcode analysis and variant generation.
from icu import Transliterator
-import nominatim.tokenizer.token_analysis.postcodes as module
-from nominatim.data.place_name import PlaceName
-from nominatim.errors import UsageError
+import nominatim_db.tokenizer.token_analysis.postcodes as module
+from nominatim_db.data.place_name import PlaceName
+from nominatim_db.errors import UsageError
DEFAULT_NORMALIZATION = """ :: NFD ();
'🜳' > ' ';
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for import name normalisation and variant generation.
from icu import Transliterator
-import nominatim.tokenizer.token_analysis.generic as module
-from nominatim.errors import UsageError
+import nominatim_db.tokenizer.token_analysis.generic as module
+from nominatim_db.errors import UsageError
DEFAULT_NORMALIZATION = """ :: NFD ();
'🜳' > ' ';
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for generic token analysis, mutation part.
from icu import Transliterator
-import nominatim.tokenizer.token_analysis.generic as module
-from nominatim.errors import UsageError
+import nominatim_db.tokenizer.token_analysis.generic as module
+from nominatim_db.errors import UsageError
DEFAULT_NORMALIZATION = """ '🜳' > ' ';
[[:Nonspacing Mark:] [:Cf:]] >;
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
import pytest
@pytest.fixture
-def osm2pgsql_options(temp_db):
- """ A standard set of options for osm2pgsql.
+def osm2pgsql_options(temp_db, tmp_path):
+ """ A standard set of options for osm2pgsql
+ together with a osm2pgsql mock that just reflects the command line.
"""
- return dict(osm2pgsql='echo',
+ osm2pgsql_exec = tmp_path / 'osm2pgsql_mock'
+
+ osm2pgsql_exec.write_text("""#!/bin/sh
+
+if [ "$*" = "--version" ]; then
+ >&2 echo "2024-08-09 11:16:23 osm2pgsql version 11.7.2 (11.7.2)"
+else
+ echo "$@"
+fi
+ """)
+ osm2pgsql_exec.chmod(0o777)
+
+ return dict(osm2pgsql=str(osm2pgsql_exec),
osm2pgsql_cache=10,
osm2pgsql_style='style.file',
threads=1,
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for functions to add additional data to the database.
import pytest
-from nominatim.tools import add_osm_data
+from nominatim_db.tools import add_osm_data
class CaptureGetUrl:
return '<xml></xml>'
-def test_import_osm_file_simple(table_factory, osm2pgsql_options, capfd):
- table_factory('place', content=((1, ), ))
+@pytest.fixture(autouse=True)
+def setup_delete_postprocessing(temp_db_cursor):
+ temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION flush_deleted_places()
+ RETURNS INTEGER AS $$ SELECT 1 $$ LANGUAGE SQL""")
- assert add_osm_data.add_data_from_file(Path('change.osm'), osm2pgsql_options) == 0
+def test_import_osm_file_simple(dsn, table_factory, osm2pgsql_options, capfd):
+
+ assert add_osm_data.add_data_from_file(dsn, Path('change.osm'), osm2pgsql_options) == 0
captured = capfd.readouterr()
assert '--append' in captured.out
@pytest.mark.parametrize("osm_type", ['node', 'way', 'relation'])
@pytest.mark.parametrize("main_api,url", [(True, 'https://www.openstreetmap.org/api'),
(False, 'https://overpass-api.de/api/interpreter?')])
-def test_import_osm_object_main_api(osm2pgsql_options, monkeypatch, capfd,
- osm_type, main_api, url):
+def test_import_osm_object_main_api(dsn, osm2pgsql_options, monkeypatch,
+ capfd, osm_type, main_api, url):
get_url_mock = CaptureGetUrl(monkeypatch)
- add_osm_data.add_osm_object(osm_type, 4536, main_api, osm2pgsql_options)
+ add_osm_data.add_osm_object(dsn, osm_type, 4536, main_api, osm2pgsql_options)
captured = capfd.readouterr()
assert get_url_mock.url.startswith(url)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for maintenance and analysis functions.
"""
import pytest
-from nominatim.errors import UsageError
-from nominatim.tools import admin
-from nominatim.tokenizer import factory
+from nominatim_db.errors import UsageError
+from nominatim_db.tools import admin
+from nominatim_db.tokenizer import factory
+from nominatim_db.db.sql_preprocessor import SQLPreprocessor
@pytest.fixture(autouse=True)
def create_placex_table(project_env, tokenizer_mock, temp_db_cursor, placex_table):
VALUES(9988, 'N', 10000)""")
admin.analyse_indexing(project_env, osm_id='N10000')
+
+
+class TestAdminCleanDeleted:
+
+ @pytest.fixture(autouse=True)
+ def setup_polygon_delete(self, project_env, table_factory, place_table, osmline_table, temp_db_cursor, temp_db_conn, def_config, src_dir):
+ """ Set up place_force_delete function and related tables
+ """
+ self.project_env = project_env
+ self.temp_db_cursor = temp_db_cursor
+ table_factory('import_polygon_delete',
+ """osm_id BIGINT,
+ osm_type CHAR(1),
+ class TEXT NOT NULL,
+ type TEXT NOT NULL""",
+ ((100, 'N', 'boundary', 'administrative'),
+ (145, 'N', 'boundary', 'administrative'),
+ (175, 'R', 'landcover', 'grass')))
+ temp_db_cursor.execute("""INSERT INTO placex (place_id, osm_id, osm_type, class, type, indexed_date, indexed_status)
+ VALUES(1, 100, 'N', 'boundary', 'administrative', current_date - INTERVAL '1 month', 1),
+ (2, 145, 'N', 'boundary', 'administrative', current_date - INTERVAL '3 month', 1),
+ (3, 175, 'R', 'landcover', 'grass', current_date - INTERVAL '3 months', 1)""")
+ # set up tables and triggers for utils function
+ table_factory('place_to_be_deleted',
+ """osm_id BIGINT,
+ osm_type CHAR(1),
+ class TEXT NOT NULL,
+ type TEXT NOT NULL,
+ deferred BOOLEAN""")
+ table_factory('country_name', 'partition INT')
+ table_factory('import_polygon_error', """osm_id BIGINT,
+ osm_type CHAR(1),
+ class TEXT NOT NULL,
+ type TEXT NOT NULL""")
+ temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION place_delete()
+ RETURNS TRIGGER AS $$
+ BEGIN RETURN NULL; END;
+ $$ LANGUAGE plpgsql;""")
+ temp_db_cursor.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place
+ FOR EACH ROW EXECUTE PROCEDURE place_delete();""")
+ orig_sql = def_config.lib_dir.sql
+ def_config.lib_dir.sql = src_dir / 'lib-sql'
+ sqlproc = SQLPreprocessor(temp_db_conn, def_config)
+ sqlproc.run_sql_file(temp_db_conn, 'functions/utils.sql')
+ def_config.lib_dir.sql = orig_sql
+
+
+ def test_admin_clean_deleted_no_records(self):
+ admin.clean_deleted_relations(self.project_env, age='1 year')
+ assert self.temp_db_cursor.row_set('SELECT osm_id, osm_type, class, type, indexed_status FROM placex') == {(100, 'N', 'boundary', 'administrative', 1),
+ (145, 'N', 'boundary', 'administrative', 1),
+ (175, 'R', 'landcover', 'grass', 1)}
+ assert self.temp_db_cursor.table_rows('import_polygon_delete') == 3
+
+
+ @pytest.mark.parametrize('test_age', ['T week', '1 welk', 'P1E'])
+ def test_admin_clean_deleted_bad_age(self, test_age):
+ with pytest.raises(UsageError):
+ admin.clean_deleted_relations(self.project_env, age = test_age)
+
+
+ def test_admin_clean_deleted_partial(self):
+ admin.clean_deleted_relations(self.project_env, age = '2 months')
+ assert self.temp_db_cursor.row_set('SELECT osm_id, osm_type, class, type, indexed_status FROM placex') == {(100, 'N', 'boundary', 'administrative', 1),
+ (145, 'N', 'boundary', 'administrative', 100),
+ (175, 'R', 'landcover', 'grass', 100)}
+ assert self.temp_db_cursor.table_rows('import_polygon_delete') == 1
+
+ @pytest.mark.parametrize('test_age', ['1 week', 'P3D', '5 hours'])
+ def test_admin_clean_deleted(self, test_age):
+ admin.clean_deleted_relations(self.project_env, age = test_age)
+ assert self.temp_db_cursor.row_set('SELECT osm_id, osm_type, class, type, indexed_status FROM placex') == {(100, 'N', 'boundary', 'administrative', 100),
+ (145, 'N', 'boundary', 'administrative', 100),
+ (175, 'R', 'landcover', 'grass', 100)}
+ assert self.temp_db_cursor.table_rows('import_polygon_delete') == 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for database integrity checks.
"""
import pytest
-from nominatim.tools import check_database as chkdb
+from nominatim_db.tools import check_database as chkdb
+import nominatim_db.version
def test_check_database_unknown_db(def_config, monkeypatch):
monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=fjgkhughwgh2423gsags')
assert chkdb.check_database(def_config) == 1
-def test_check_conection_good(temp_db_conn, def_config):
+def test_check_connection_good(temp_db_conn, def_config):
assert chkdb.check_connection(temp_db_conn, def_config) == chkdb.CheckState.OK
-def test_check_conection_bad(def_config):
+def test_check_connection_bad(def_config):
badconn = chkdb._BadConnection('Error')
assert chkdb.check_connection(badconn, def_config) == chkdb.CheckState.FATAL
+def test_check_database_version_good(property_table, temp_db_conn, def_config):
+ property_table.set('database_version',
+ str(nominatim_db.version.NOMINATIM_VERSION))
+ assert chkdb.check_database_version(temp_db_conn, def_config) == chkdb.CheckState.OK
+
+def test_check_database_version_bad(property_table, temp_db_conn, def_config):
+ property_table.set('database_version', '3.9.9-9')
+ assert chkdb.check_database_version(temp_db_conn, def_config) == chkdb.CheckState.FATAL
+
+
def test_check_placex_table_good(table_factory, temp_db_conn, def_config):
table_factory('placex')
assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.OK
def test_check_indexing_bad(table_factory, temp_db_conn, def_config):
table_factory('placex', 'place_id int, indexed_status smallint',
content=((1, 0), (2, 2)))
- assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.FAIL
+ assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.WARN
def test_check_database_indexes_bad(temp_db_conn, def_config):
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for functions to import a new database.
"""
from pathlib import Path
-from contextlib import closing
import pytest
-import psycopg2
+import pytest_asyncio
+import psycopg
+from psycopg import sql as pysql
-from nominatim.tools import database_import
-from nominatim.errors import UsageError
+from nominatim_db.tools import database_import
+from nominatim_db.errors import UsageError
class TestDatabaseSetup:
DBNAME = 'test_nominatim_python_unittest'
@pytest.fixture(autouse=True)
def setup_nonexistant_db(self):
- conn = psycopg2.connect(database='postgres')
-
- try:
- conn.set_isolation_level(0)
+ with psycopg.connect(dbname='postgres', autocommit=True) as conn:
with conn.cursor() as cur:
cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}')
with conn.cursor() as cur:
cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}')
- finally:
- conn.close()
+
@pytest.fixture
def cursor(self):
- conn = psycopg2.connect(database=self.DBNAME)
-
- try:
+ with psycopg.connect(dbname=self.DBNAME) as conn:
with conn.cursor() as cur:
yield cur
- finally:
- conn.close()
def conn(self):
- return closing(psycopg2.connect(database=self.DBNAME))
+ return psycopg.connect(dbname=self.DBNAME)
def test_setup_skeleton(self):
ignore_errors=True)
-def test_import_osm_data_drop(table_factory, temp_db_conn, tmp_path, osm2pgsql_options):
+def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options):
table_factory('place', content=((1, ), ))
table_factory('planet_osm_nodes')
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, drop=True)
assert not flatfile.exists()
- assert not temp_db_conn.table_exists('planet_osm_nodes')
+ assert not temp_db_cursor.table_exists('planet_osm_nodes')
def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd):
@pytest.mark.parametrize("threads", (1, 5))
-def test_load_data(dsn, place_row, placex_table, osmline_table,
+@pytest.mark.asyncio
+async def test_load_data(dsn, place_row, placex_table, osmline_table,
temp_db_cursor, threads):
for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'):
- temp_db_cursor.execute(f"""CREATE FUNCTION {func} (src TEXT)
- RETURNS TEXT AS $$ SELECT 'a'::TEXT $$ LANGUAGE SQL
- """)
+ temp_db_cursor.execute(pysql.SQL("""CREATE FUNCTION {} (src TEXT)
+ RETURNS TEXT AS $$ SELECT 'a'::TEXT $$ LANGUAGE SQL
+ """).format(pysql.Identifier(func)))
for oid in range(100, 130):
place_row(osm_id=oid)
place_row(osm_type='W', osm_id=342, cls='place', typ='houses',
geom='SRID=4326;LINESTRING(0 0, 10 10)')
- database_import.load_data(dsn, threads)
+ await database_import.load_data(dsn, threads)
assert temp_db_cursor.table_rows('placex') == 30
assert temp_db_cursor.table_rows('location_property_osmline') == 1
@pytest.mark.parametrize("drop", [True, False])
- def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop):
+ @pytest.mark.asyncio
+ async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop):
self.write_sql('indices.sql',
"""CREATE FUNCTION test() RETURNS bool
AS $$ SELECT {{drop}} $$ LANGUAGE SQL""")
- database_import.create_search_indices(temp_db_conn, self.config, drop)
+ await database_import.create_search_indices(temp_db_conn, self.config, drop)
temp_db_cursor.scalar('SELECT test()') == drop
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for tools.exec_utils module.
import pytest
-import nominatim.tools.exec_utils as exec_utils
-
-class TestRunLegacyScript:
-
- @pytest.fixture(autouse=True)
- def setup_nominatim_env(self, tmp_path, def_config):
- tmp_phplib_dir = tmp_path / 'phplib'
- tmp_phplib_dir.mkdir()
- (tmp_phplib_dir / 'admin').mkdir()
-
- class _NominatimEnv:
- config = def_config
- phplib_dir = tmp_phplib_dir
- data_dir = Path('data')
- project_dir = Path('.')
- sqllib_dir = Path('lib-sql')
- config_dir = Path('settings')
- module_dir = 'module'
- osm2pgsql_path = 'osm2pgsql'
-
- self.testenv = _NominatimEnv
-
-
- def mk_script(self, code):
- codefile = self.testenv.phplib_dir / 'admin' / 't.php'
- codefile.write_text('<?php\n' + code + '\n')
-
- return 't.php'
-
-
- @pytest.mark.parametrize("return_code", (0, 1, 15, 255))
- def test_run_legacy_return_exit_code(self, return_code):
- fname = self.mk_script('exit({});'.format(return_code))
- assert return_code == \
- exec_utils.run_legacy_script(fname, nominatim_env=self.testenv)
-
-
- def test_run_legacy_return_throw_on_fail(self):
- fname = self.mk_script('exit(11);')
- with pytest.raises(subprocess.CalledProcessError):
- exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
- throw_on_fail=True)
-
-
- def test_run_legacy_return_dont_throw_on_success(self):
- fname = self.mk_script('exit(0);')
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
- throw_on_fail=True) == 0
-
- def test_run_legacy_use_given_module_path(self):
- fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);")
-
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
-
-
- def test_run_legacy_do_not_overwrite_module_path(self, monkeypatch):
- monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', 'other')
- fname = self.mk_script(
- "exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
-
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
-
-
- def test_run_legacy_default_osm2pgsql_binary(self, monkeypatch):
- fname = self.mk_script("exit($_SERVER['NOMINATIM_OSM2PGSQL_BINARY'] == 'osm2pgsql' ? 0 : 23);")
-
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
-
-
- def test_run_legacy_override_osm2pgsql_binary(self, monkeypatch):
- monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', 'somethingelse')
-
- fname = self.mk_script("exit($_SERVER['NOMINATIM_OSM2PGSQL_BINARY'] == 'somethingelse' ? 0 : 23);")
-
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
-
-
-class TestRunApiScript:
-
- @staticmethod
- @pytest.fixture(autouse=True)
- def setup_project_dir(tmp_path):
- webdir = tmp_path / 'website'
- webdir.mkdir()
- (webdir / 'test.php').write_text("<?php\necho 'OK\n';")
-
-
- @staticmethod
- def test_run_api(tmp_path):
- assert exec_utils.run_api_script('test', tmp_path) == 0
-
- @staticmethod
- def test_run_api_execution_error(tmp_path):
- assert exec_utils.run_api_script('badname', tmp_path) != 0
-
- @staticmethod
- def test_run_api_with_extra_env(tmp_path):
- extra_env = dict(SCRIPT_FILENAME=str(tmp_path / 'website' / 'test.php'))
- assert exec_utils.run_api_script('badname', tmp_path, extra_env=extra_env) == 0
-
- @staticmethod
- def test_custom_phpcgi(tmp_path, capfd):
- assert exec_utils.run_api_script('test', tmp_path, phpcgi_bin='env',
- params={'q' : 'Berlin'}) == 0
- captured = capfd.readouterr()
-
- assert '?q=Berlin' in captured.out
-
- @staticmethod
- def test_fail_on_error_output(tmp_path):
- # Starting PHP 8 the PHP CLI no longer has STDERR defined as constant
- php = """
- <?php
- if(!defined('STDERR')) define('STDERR', fopen('php://stderr', 'wb'));
- fwrite(STDERR, 'WARNING'.PHP_EOL);
- """
- (tmp_path / 'website' / 'bad.php').write_text(php)
-
- assert exec_utils.run_api_script('bad', tmp_path) == 1
-
-### run_osm2pgsql
+from nominatim_db.config import Configuration
+import nominatim_db.tools.exec_utils as exec_utils
def test_run_osm2pgsql(osm2pgsql_options):
osm2pgsql_options['append'] = False
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for freeze functions (removing unused database parts).
"""
-from nominatim.tools import freeze
+from nominatim_db.tools import freeze
NOMINATIM_RUNTIME_TABLES = [
'country_name', 'country_osm_grid',
for table in NOMINATIM_RUNTIME_TABLES + NOMINATIM_DROP_TABLES:
table_factory(table)
+ assert not freeze.is_frozen(temp_db_conn)
+
freeze.drop_update_tables(temp_db_conn)
for table in NOMINATIM_RUNTIME_TABLES:
for table in NOMINATIM_DROP_TABLES:
assert not temp_db_cursor.table_exists(table)
+ assert freeze.is_frozen(temp_db_conn)
+
def test_drop_flatnode_file_no_file():
freeze.drop_flatnode_file(None)
freeze.drop_flatnode_file(tmp_path / 'something.store')
-def test_drop_flatnode_file_delte(tmp_path):
+def test_drop_flatnode_file_delete(tmp_path):
flatfile = tmp_path / 'flatnode.store'
flatfile.write_text('Some content')
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for import special phrases methods
"""
from shutil import copyfile
import pytest
-from nominatim.tools.special_phrases.sp_importer import SPImporter
-from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
-from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
-from nominatim.errors import UsageError
+from nominatim_db.tools.special_phrases.sp_importer import SPImporter
+from nominatim_db.tools.special_phrases.sp_wiki_loader import SPWikiLoader
+from nominatim_db.tools.special_phrases.special_phrase import SpecialPhrase
+from nominatim_db.errors import UsageError
from cursor import CursorForTesting
assert isinstance(black_list, dict) and isinstance(white_list, dict)
-def test_create_place_classtype_indexes(temp_db_with_extensions, temp_db_conn,
+def test_create_place_classtype_indexes(temp_db_with_extensions,
+ temp_db_conn, temp_db_cursor,
table_factory, sp_importer):
"""
Test that _create_place_classtype_indexes() create the
table_factory(table_name, 'place_id BIGINT, centroid GEOMETRY')
sp_importer._create_place_classtype_indexes('', phrase_class, phrase_type)
+ temp_db_conn.commit()
- assert check_placeid_and_centroid_indexes(temp_db_conn, phrase_class, phrase_type)
+ assert check_placeid_and_centroid_indexes(temp_db_cursor, phrase_class, phrase_type)
-def test_create_place_classtype_table(temp_db_conn, placex_table, sp_importer):
+def test_create_place_classtype_table(temp_db_conn, temp_db_cursor, placex_table, sp_importer):
"""
Test that _create_place_classtype_table() create
the right place_classtype table.
phrase_class = 'class'
phrase_type = 'type'
sp_importer._create_place_classtype_table('', phrase_class, phrase_type)
+ temp_db_conn.commit()
- assert check_table_exist(temp_db_conn, phrase_class, phrase_type)
+ assert check_table_exist(temp_db_cursor, phrase_class, phrase_type)
-def test_grant_access_to_web_user(temp_db_conn, table_factory, def_config, sp_importer):
+def test_grant_access_to_web_user(temp_db_conn, temp_db_cursor, table_factory,
+ def_config, sp_importer):
"""
Test that _grant_access_to_webuser() give
right access to the web user.
table_factory(table_name)
sp_importer._grant_access_to_webuser(phrase_class, phrase_type)
+ temp_db_conn.commit()
- assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, phrase_class, phrase_type)
+ assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, phrase_class, phrase_type)
def test_create_place_classtype_table_and_indexes(
- temp_db_conn, def_config, placex_table,
- sp_importer):
+ temp_db_cursor, def_config, placex_table,
+ sp_importer, temp_db_conn):
"""
Test that _create_place_classtype_table_and_indexes()
create the right place_classtype tables and place_id indexes
pairs = set([('class1', 'type1'), ('class2', 'type2')])
sp_importer._create_classtype_table_and_indexes(pairs)
+ temp_db_conn.commit()
for pair in pairs:
- assert check_table_exist(temp_db_conn, pair[0], pair[1])
- assert check_placeid_and_centroid_indexes(temp_db_conn, pair[0], pair[1])
- assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, pair[0], pair[1])
+ assert check_table_exist(temp_db_cursor, pair[0], pair[1])
+ assert check_placeid_and_centroid_indexes(temp_db_cursor, pair[0], pair[1])
+ assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, pair[0], pair[1])
def test_remove_non_existent_tables_from_db(sp_importer, default_phrases,
- temp_db_conn):
+ temp_db_conn, temp_db_cursor):
"""
Check for the remove_non_existent_phrases_from_db() method.
"""
sp_importer._remove_non_existent_tables_from_db()
+ temp_db_conn.commit()
- # Changes are not committed yet. Use temp_db_conn for checking results.
- with temp_db_conn.cursor(cursor_factory=CursorForTesting) as cur:
- assert cur.row_set(query_tables) \
+ assert temp_db_cursor.row_set(query_tables) \
== {('place_classtype_testclasstypetable_to_keep', )}
@pytest.mark.parametrize("should_replace", [(True), (False)])
-def test_import_phrases(monkeypatch, temp_db_conn, def_config, sp_importer,
+def test_import_phrases(monkeypatch, temp_db_cursor, def_config, sp_importer,
placex_table, table_factory, tokenizer_mock,
xml_wiki_content, should_replace):
"""
table_factory('place_classtype_amenity_animal_shelter')
table_factory('place_classtype_wrongclass_wrongtype')
- monkeypatch.setattr('nominatim.tools.special_phrases.sp_wiki_loader._get_wiki_content',
+ monkeypatch.setattr('nominatim_db.tools.special_phrases.sp_wiki_loader._get_wiki_content',
lambda lang: xml_wiki_content)
tokenizer = tokenizer_mock()
class_test = 'aerialway'
type_test = 'zip_line'
- assert check_table_exist(temp_db_conn, class_test, type_test)
- assert check_placeid_and_centroid_indexes(temp_db_conn, class_test, type_test)
- assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, class_test, type_test)
- assert check_table_exist(temp_db_conn, 'amenity', 'animal_shelter')
+ assert check_table_exist(temp_db_cursor, class_test, type_test)
+ assert check_placeid_and_centroid_indexes(temp_db_cursor, class_test, type_test)
+ assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, class_test, type_test)
+ assert check_table_exist(temp_db_cursor, 'amenity', 'animal_shelter')
if should_replace:
- assert not check_table_exist(temp_db_conn, 'wrong_class', 'wrong_type')
+ assert not check_table_exist(temp_db_cursor, 'wrong_class', 'wrong_type')
- assert temp_db_conn.table_exists('place_classtype_amenity_animal_shelter')
+ assert temp_db_cursor.table_exists('place_classtype_amenity_animal_shelter')
if should_replace:
- assert not temp_db_conn.table_exists('place_classtype_wrongclass_wrongtype')
+ assert not temp_db_cursor.table_exists('place_classtype_wrongclass_wrongtype')
-def check_table_exist(temp_db_conn, phrase_class, phrase_type):
+def check_table_exist(temp_db_cursor, phrase_class, phrase_type):
"""
Verify that the place_classtype table exists for the given
phrase_class and phrase_type.
"""
- return temp_db_conn.table_exists('place_classtype_{}_{}'.format(phrase_class, phrase_type))
+ return temp_db_cursor.table_exists('place_classtype_{}_{}'.format(phrase_class, phrase_type))
-def check_grant_access(temp_db_conn, user, phrase_class, phrase_type):
+def check_grant_access(temp_db_cursor, user, phrase_class, phrase_type):
"""
Check that the web user has been granted right access to the
place_classtype table of the given phrase_class and phrase_type.
"""
table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
- with temp_db_conn.cursor() as temp_db_cursor:
- temp_db_cursor.execute("""
- SELECT * FROM information_schema.role_table_grants
- WHERE table_name='{}'
- AND grantee='{}'
- AND privilege_type='SELECT'""".format(table_name, user))
- return temp_db_cursor.fetchone()
+ temp_db_cursor.execute("""
+ SELECT * FROM information_schema.role_table_grants
+ WHERE table_name='{}'
+ AND grantee='{}'
+ AND privilege_type='SELECT'""".format(table_name, user))
+ return temp_db_cursor.fetchone()
-def check_placeid_and_centroid_indexes(temp_db_conn, phrase_class, phrase_type):
+def check_placeid_and_centroid_indexes(temp_db_cursor, phrase_class, phrase_type):
"""
Check that the place_id index and centroid index exist for the
place_classtype table of the given phrase_class and phrase_type.
"""
+ table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
index_prefix = 'idx_place_classtype_{}_{}_'.format(phrase_class, phrase_type)
return (
- temp_db_conn.index_exists(index_prefix + 'centroid')
+ temp_db_cursor.index_exists(table_name, index_prefix + 'centroid')
and
- temp_db_conn.index_exists(index_prefix + 'place_id')
+ temp_db_cursor.index_exists(table_name, index_prefix + 'place_id')
)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for migration functions
"""
import pytest
-import psycopg2.extras
-from nominatim.tools import migration
-from nominatim.errors import UsageError
-import nominatim.version
-
-from mock_legacy_word_table import MockLegacyWordTable
+from nominatim_db.tools import migration
+from nominatim_db.errors import UsageError
+from nominatim_db.db.connection import server_version_tuple
+import nominatim_db.version
class DummyTokenizer:
monkeypatch.setattr(migration.tokenizer_factory, 'get_tokenizer_for_db',
lambda *args: DummyTokenizer())
-@pytest.fixture
-def legacy_word_table(temp_db_conn):
- return MockLegacyWordTable(temp_db_conn)
-
-def test_no_migration_old_versions(temp_db_with_extensions, table_factory, def_config):
- table_factory('country_name', 'name HSTORE, country_code TEXT')
+def test_no_migration_old_versions(temp_db_with_extensions, def_config, property_table):
+ property_table.set('database_version', '4.2.99-0')
with pytest.raises(UsageError, match='Migration not possible'):
migration.migrate(def_config, {})
-def test_set_up_migration_for_36(temp_db_with_extensions, temp_db_cursor,
- table_factory, def_config, monkeypatch,
- postprocess_mock):
- psycopg2.extras.register_hstore(temp_db_cursor)
- # don't actually run any migration, except the property table creation
- monkeypatch.setattr(migration, '_MIGRATION_FUNCTIONS',
- [((3, 5, 0, 99), migration.add_nominatim_property_table)])
- # Use a r/o user name that always exists
- monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'postgres')
-
- table_factory('country_name', 'name HSTORE, country_code TEXT',
- (({str(x): 'a' for x in range(200)}, 'gb'),))
-
- assert not temp_db_cursor.table_exists('nominatim_properties')
-
- assert migration.migrate(def_config, {}) == 0
-
- assert temp_db_cursor.table_exists('nominatim_properties')
-
- assert 1 == temp_db_cursor.scalar(""" SELECT count(*) FROM nominatim_properties
- WHERE property = 'database_version'""")
-
-
-def test_already_at_version(def_config, property_table):
+def test_already_at_version(temp_db_with_extensions, def_config, property_table):
property_table.set('database_version',
- '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(nominatim.version.NOMINATIM_VERSION))
+ str(nominatim_db.version.NOMINATIM_VERSION))
assert migration.migrate(def_config, {}) == 0
-def test_no_migrations_necessary(def_config, temp_db_cursor, property_table,
- monkeypatch):
- oldversion = [x for x in nominatim.version.NOMINATIM_VERSION]
- oldversion[0] -= 1
+def test_run_single_migration(temp_db_with_extensions, def_config, temp_db_cursor,
+ property_table, monkeypatch, postprocess_mock):
+ oldversion = [4, 4, 99, 0]
property_table.set('database_version',
- '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(oldversion))
-
- oldversion[0] = 0
- monkeypatch.setattr(migration, '_MIGRATION_FUNCTIONS',
- [(tuple(oldversion), lambda **attr: True)])
-
- assert migration.migrate(def_config, {}) == 0
-
-
-def test_run_single_migration(def_config, temp_db_cursor, property_table,
- monkeypatch, postprocess_mock):
- oldversion = [x for x in nominatim.version.NOMINATIM_VERSION]
- oldversion[0] -= 1
- property_table.set('database_version',
- '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(oldversion))
+ str(nominatim_db.version.NominatimVersion(*oldversion)))
done = {'old': False, 'new': False}
def _migration(**_):
""" Dummy migration"""
done['old'] = True
- oldversion[0] = 0
+ oldversion[1] = 0
monkeypatch.setattr(migration, '_MIGRATION_FUNCTIONS',
[(tuple(oldversion), _old_migration),
- (nominatim.version.NOMINATIM_VERSION, _migration)])
+ (nominatim_db.version.NOMINATIM_VERSION, _migration)])
assert migration.migrate(def_config, {}) == 0
assert done['new']
assert not done['old']
- assert property_table.get('database_version') == \
- '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(nominatim.version.NOMINATIM_VERSION)
+ assert property_table.get('database_version') == str(nominatim_db.version.NOMINATIM_VERSION)
###### Tests for specific migrations
# Each migration should come with two tests:
# 1. Test that migration from old to new state works as expected.
# 2. Test that the migration can be rerun on the new state without side effects.
-
-
-@pytest.mark.parametrize('in_attr', ('', 'with time zone'))
-def test_import_status_timestamp_change(temp_db_conn, temp_db_cursor,
- table_factory, in_attr):
- table_factory('import_status',
- f"""lastimportdate timestamp {in_attr},
- sequence_id integer,
- indexed boolean""")
-
- migration.import_status_timestamp_change(temp_db_conn)
- temp_db_conn.commit()
-
- assert temp_db_cursor.scalar("""SELECT data_type FROM information_schema.columns
- WHERE table_name = 'import_status'
- and column_name = 'lastimportdate'""")\
- == 'timestamp with time zone'
-
-
-def test_add_nominatim_property_table(temp_db_conn, temp_db_cursor,
- def_config, monkeypatch):
- # Use a r/o user name that always exists
- monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'postgres')
-
- assert not temp_db_cursor.table_exists('nominatim_properties')
-
- migration.add_nominatim_property_table(temp_db_conn, def_config)
- temp_db_conn.commit()
-
- assert temp_db_cursor.table_exists('nominatim_properties')
-
-
-def test_add_nominatim_property_table_repeat(temp_db_conn, temp_db_cursor,
- def_config, property_table):
- assert temp_db_cursor.table_exists('nominatim_properties')
-
- migration.add_nominatim_property_table(temp_db_conn, def_config)
- temp_db_conn.commit()
-
- assert temp_db_cursor.table_exists('nominatim_properties')
-
-
-def test_change_housenumber_transliteration(temp_db_conn, temp_db_cursor,
- legacy_word_table, placex_table):
- placex_table.add(housenumber='3A')
-
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION make_standard_name(name TEXT)
- RETURNS TEXT AS $$ SELECT lower(name) $$ LANGUAGE SQL """)
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_housenumber_id(lookup_word TEXT)
- RETURNS INTEGER AS $$ SELECT 4325 $$ LANGUAGE SQL """)
-
- migration.change_housenumber_transliteration(temp_db_conn)
- temp_db_conn.commit()
-
- assert temp_db_cursor.scalar('SELECT housenumber from placex') == '3a'
-
- migration.change_housenumber_transliteration(temp_db_conn)
- temp_db_conn.commit()
-
- assert temp_db_cursor.scalar('SELECT housenumber from placex') == '3a'
-
-
-def test_switch_placenode_geometry_index(temp_db_conn, temp_db_cursor, placex_table):
- temp_db_cursor.execute("""CREATE INDEX idx_placex_adminname
- ON placex (place_id)""")
-
- migration.switch_placenode_geometry_index(temp_db_conn)
- temp_db_conn.commit()
-
- assert temp_db_cursor.index_exists('placex', 'idx_placex_geometry_placenode')
- assert not temp_db_cursor.index_exists('placex', 'idx_placex_adminname')
-
-
-def test_switch_placenode_geometry_index_repeat(temp_db_conn, temp_db_cursor, placex_table):
- temp_db_cursor.execute("""CREATE INDEX idx_placex_geometry_placenode
- ON placex (place_id)""")
-
- migration.switch_placenode_geometry_index(temp_db_conn)
- temp_db_conn.commit()
-
- assert temp_db_cursor.index_exists('placex', 'idx_placex_geometry_placenode')
- assert not temp_db_cursor.index_exists('placex', 'idx_placex_adminname')
- assert temp_db_cursor.scalar("""SELECT indexdef from pg_indexes
- WHERE tablename = 'placex'
- and indexname = 'idx_placex_geometry_placenode'
- """).endswith('(place_id)')
-
-
-def test_install_legacy_tokenizer(temp_db_conn, temp_db_cursor, project_env,
- property_table, table_factory, monkeypatch,
- tmp_path):
- table_factory('placex', 'place_id BIGINT')
- table_factory('location_property_osmline', 'place_id BIGINT')
-
- # Setting up the tokenizer is problematic
- class MiniTokenizer:
- def migrate_database(self, config):
- pass
-
- monkeypatch.setattr(migration.tokenizer_factory, 'create_tokenizer',
- lambda cfg, **kwargs: MiniTokenizer())
-
- migration.install_legacy_tokenizer(temp_db_conn, project_env)
- temp_db_conn.commit()
-
-
-
-def test_install_legacy_tokenizer_repeat(temp_db_conn, temp_db_cursor,
- def_config, property_table):
-
- property_table.set('tokenizer', 'dummy')
- migration.install_legacy_tokenizer(temp_db_conn, def_config)
- temp_db_conn.commit()
-
-
-def test_create_tiger_housenumber_index(temp_db_conn, temp_db_cursor, table_factory):
- table_factory('location_property_tiger',
- 'parent_place_id BIGINT, startnumber INT, endnumber INT')
-
- migration.create_tiger_housenumber_index(temp_db_conn)
- temp_db_conn.commit()
-
- if temp_db_conn.server_version_tuple() >= (11, 0, 0):
- assert temp_db_cursor.index_exists('location_property_tiger',
- 'idx_location_property_tiger_housenumber_migrated')
-
- migration.create_tiger_housenumber_index(temp_db_conn)
- temp_db_conn.commit()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for functions to maintain the artificial postcode table.
import pytest
-from nominatim.tools import postcodes
-from nominatim.data import country_info
+from nominatim_db.tools import postcodes
+from nominatim_db.data import country_info
import dummy_tokenizer
class MockPostcodeTable:
country_code, postcode,
geometry)
VALUES (nextval('seq_place'), 1, %s, %s,
- 'SRID=4326;POINT(%s %s)')""",
+ ST_SetSRID(ST_MakePoint(%s, %s), 4326))""",
(country, postcode, x, y))
self.conn.commit()
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Test for various refresh functions.
import pytest
-from nominatim.tools import refresh
+from nominatim_db.tools import refresh
+from nominatim_db.db.connection import postgis_version_tuple
def test_refresh_import_wikipedia_not_existing(dsn):
assert refresh.import_wikipedia_articles(dsn, Path('.')) == 1
def test_refresh_import_secondary_importance_testdb(dsn, src_dir, temp_db_conn, temp_db_cursor):
temp_db_cursor.execute('CREATE EXTENSION postgis')
+ temp_db_cursor.execute('CREATE EXTENSION postgis_raster')
+ assert refresh.import_secondary_importance(dsn, src_dir / 'test' / 'testdb') == 0
- if temp_db_conn.postgis_version_tuple()[0] < 3:
- assert refresh.import_secondary_importance(dsn, src_dir / 'test' / 'testdb') > 0
- else:
- temp_db_cursor.execute('CREATE EXTENSION postgis_raster')
- assert refresh.import_secondary_importance(dsn, src_dir / 'test' / 'testdb') == 0
-
- assert temp_db_conn.table_exists('secondary_importance')
+ assert temp_db_cursor.table_exists('secondary_importance')
@pytest.mark.parametrize("replace", (True, False))
def test_refresh_import_wikipedia(dsn, src_dir, table_factory, temp_db_cursor, replace):
if replace:
- table_factory('wikipedia_article')
- table_factory('wikipedia_redirect')
+ table_factory('wikimedia_importance')
# use the small wikipedia file for the API testdb
assert refresh.import_wikipedia_articles(dsn, src_dir / 'test' / 'testdb') == 0
- assert temp_db_cursor.table_rows('wikipedia_article') > 0
- assert temp_db_cursor.table_rows('wikipedia_redirect') > 0
+ assert temp_db_cursor.table_rows('wikimedia_importance') > 0
def test_recompute_importance(placex_table, table_factory, temp_db_conn, temp_db_cursor):
temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION compute_importance(extratags HSTORE,
country_code varchar(2),
- osm_type varchar(1), osm_id BIGINT,
+ rank_search SMALLINT,
centroid GEOMETRY,
OUT importance FLOAT,
OUT wikipedia TEXT)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for function for importing address ranks.
import pytest
-from nominatim.tools.refresh import load_address_levels, load_address_levels_from_config
+from nominatim_db.tools.refresh import load_address_levels, load_address_levels_from_config
def test_load_ranks_def_config(temp_db_conn, temp_db_cursor, def_config):
load_address_levels_from_config(temp_db_conn, def_config)
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for creating PL/pgSQL functions for Nominatim.
"""
import pytest
-from nominatim.tools.refresh import create_functions
+from nominatim_db.tools.refresh import create_functions
class TestCreateFunctions:
@pytest.fixture(autouse=True)
+++ /dev/null
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim. (https://nominatim.org)
-#
-# Copyright (C) 2022 by the Nominatim developer community.
-# For a full list of authors see the git log.
-"""
-Tests for setting up the website scripts.
-"""
-import subprocess
-
-import pytest
-
-from nominatim.tools import refresh
-
-@pytest.fixture
-def test_script(tmp_path):
- (tmp_path / 'php').mkdir()
-
- website_dir = (tmp_path / 'php' / 'website')
- website_dir.mkdir()
-
- def _create_file(code):
- outfile = website_dir / 'reverse-only-search.php'
- outfile.write_text('<?php\n{}\n'.format(code), 'utf-8')
-
- return _create_file
-
-
-@pytest.fixture
-def run_website_script(tmp_path, project_env, temp_db_conn):
- project_env.lib_dir.php = tmp_path / 'php'
-
- def _runner():
- refresh.setup_website(tmp_path, project_env, temp_db_conn)
-
- proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
- tmp_path / 'search.php'], check=False)
-
- return proc.returncode
-
- return _runner
-
-
-def test_basedir_created(tmp_path, project_env, temp_db_conn):
- webdir = tmp_path / 'website'
-
- assert not webdir.exists()
-
- refresh.setup_website(webdir, project_env, temp_db_conn)
-
- assert webdir.exists()
-
-
-@pytest.mark.parametrize("setting,retval", (('yes', 10), ('no', 20)))
-def test_setup_website_check_bool(monkeypatch, test_script, run_website_script,
- setting, retval):
- monkeypatch.setenv('NOMINATIM_CORS_NOACCESSCONTROL', setting)
-
- test_script('exit(CONST_NoAccessControl ? 10 : 20);')
-
- assert run_website_script() == retval
-
-
-@pytest.mark.parametrize("setting", (0, 10, 99067))
-def test_setup_website_check_int(monkeypatch, test_script, run_website_script, setting):
- monkeypatch.setenv('NOMINATIM_LOOKUP_MAX_COUNT', str(setting))
-
- test_script('exit(CONST_Places_Max_ID_count == {} ? 10 : 20);'.format(setting))
-
- assert run_website_script() == 10
-
-
-def test_setup_website_check_empty_str(monkeypatch, test_script, run_website_script):
- monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', '')
-
- test_script('exit(CONST_Default_Language === false ? 10 : 20);')
-
- assert run_website_script() == 10
-
-
-def test_setup_website_check_str(monkeypatch, test_script, run_website_script):
- monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'ffde 2')
-
- test_script('exit(CONST_Default_Language === "ffde 2" ? 10 : 20);')
-
- assert run_website_script() == 10
-
-
-def test_relative_log_file(project_env, monkeypatch, test_script, run_website_script):
- monkeypatch.setenv('NOMINATIM_LOG_FILE', 'access.log')
-
- expected_file = str(project_env.project_dir / 'access.log')
- test_script(f'exit(CONST_Log_File === "{expected_file}" ? 10 : 20);')
-
- assert run_website_script() == 10
-
--- /dev/null
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for correctly assigning wikipedia pages to places.
+"""
+import gzip
+import csv
+
+import pytest
+
+from nominatim_db.tools.refresh import import_wikipedia_articles, recompute_importance, create_functions
+
+@pytest.fixture
+def wiki_csv(tmp_path, sql_preprocessor):
+ def _import(data):
+ with gzip.open(tmp_path / 'wikimedia-importance.csv.gz', mode='wt') as fd:
+ writer = csv.DictWriter(fd, fieldnames=['language', 'type', 'title',
+ 'importance', 'wikidata_id'],
+ delimiter='\t', quotechar='|')
+ writer.writeheader()
+ for lang, title, importance, wd in data:
+ writer.writerow({'language': lang, 'type': 'a',
+ 'title': title, 'importance': str(importance),
+ 'wikidata_id' : wd})
+ return tmp_path
+
+ return _import
+
+
+@pytest.mark.parametrize('extra', [{'wikipedia:en': 'Test'},
+ {'wikipedia': 'en:Test'},
+ {'wikidata': 'Q123'}])
+def test_wikipedia(dsn, temp_db_conn, temp_db_cursor, def_config, wiki_csv, placex_table, extra):
+ import_wikipedia_articles(dsn, wiki_csv([('en', 'Test', 0.3, 'Q123')]))
+ create_functions(temp_db_conn, def_config)
+
+ content = temp_db_cursor.row_set(
+ 'SELECT language, title, importance, wikidata FROM wikimedia_importance')
+ assert content == set([('en', 'Test', 0.3, 'Q123')])
+
+ placex_table.add(osm_id=12, extratags=extra)
+
+ recompute_importance(temp_db_conn)
+
+ content = temp_db_cursor.row_set('SELECT wikipedia, importance FROM placex')
+ assert content == set([('en:Test', 0.3)])
+
+
+def test_wikipedia_no_match(dsn, temp_db_conn, temp_db_cursor, def_config, wiki_csv,
+ placex_table):
+ import_wikipedia_articles(dsn, wiki_csv([('de', 'Test', 0.3, 'Q123')]))
+ create_functions(temp_db_conn, def_config)
+
+ placex_table.add(osm_id=12, extratags={'wikipedia': 'en:Test'}, rank_search=10)
+
+ recompute_importance(temp_db_conn)
+
+ content = temp_db_cursor.row_set('SELECT wikipedia, importance FROM placex')
+ assert list(content) == [(None, pytest.approx(0.26667666))]
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for replication functionality.
import pytest
from osmium.replication.server import OsmosisState
-import nominatim.tools.replication
-import nominatim.db.status as status
-from nominatim.errors import UsageError
+import nominatim_db.tools.replication
+import nominatim_db.db.status as status
+from nominatim_db.errors import UsageError
OSM_NODE_DATA = """\
<osm version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/">
def test_init_replication_bad_base_url(monkeypatch, place_row, temp_db_conn):
place_row(osm_type='N', osm_id=100)
- monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
+ monkeypatch.setattr(status, "get_url", lambda u: OSM_NODE_DATA)
with pytest.raises(UsageError, match="Failed to reach replication service"):
- nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io')
+ nominatim_db.tools.replication.init_replication(temp_db_conn, 'https://test.io')
def test_init_replication_success(monkeypatch, place_row, temp_db_conn, temp_db_cursor):
place_row(osm_type='N', osm_id=100)
- monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
- monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
+ monkeypatch.setattr(status, "get_url", lambda u: OSM_NODE_DATA)
+ monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer,
"timestamp_to_sequence",
lambda self, date: 234)
- nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io')
+ nominatim_db.tools.replication.init_replication(temp_db_conn, 'https://test.io')
expected_date = dt.datetime.strptime('2006-01-27T19:09:10', status.ISODATE_FORMAT)\
.replace(tzinfo=dt.timezone.utc)
### checking for updates
def test_check_for_updates_empty_status_table(temp_db_conn):
- assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
+ assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
def test_check_for_updates_seq_not_set(temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc))
- assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
+ assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
def test_check_for_updates_no_state(monkeypatch, temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=345)
- monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
+ monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer,
"get_state_info", lambda self: None)
- assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 253
+ assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 253
@pytest.mark.parametrize("server_sequence,result", [(344, 2), (345, 2), (346, 0)])
date = dt.datetime.now(dt.timezone.utc)
status.set_status(temp_db_conn, date, seq=345)
- monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
+ monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer,
"get_state_info",
lambda self: OsmosisState(server_sequence, date))
- assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == result
+ assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == result
### updating
import_file=tmpdir / 'foo.osm',
max_diff_size=1)
-def test_update_empty_status_table(temp_db_conn):
+def test_update_empty_status_table(dsn):
with pytest.raises(UsageError):
- nominatim.tools.replication.update(temp_db_conn, {})
+ nominatim_db.tools.replication.update(dsn, {})
-def test_update_already_indexed(temp_db_conn):
+def test_update_already_indexed(temp_db_conn, dsn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=34, indexed=False)
- assert nominatim.tools.replication.update(temp_db_conn, dict(indexed_only=True)) \
- == nominatim.tools.replication.UpdateState.MORE_PENDING
+ assert nominatim_db.tools.replication.update(dsn, dict(indexed_only=True)) \
+ == nominatim_db.tools.replication.UpdateState.MORE_PENDING
-def test_update_no_data_no_sleep(monkeypatch, temp_db_conn, update_options):
+def test_update_no_data_no_sleep(monkeypatch, temp_db_conn, dsn, update_options):
date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(days=1)
status.set_status(temp_db_conn, date, seq=34)
- monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
+ monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer,
"apply_diffs",
lambda *args, **kwargs: None)
sleeptime = []
monkeypatch.setattr(time, 'sleep', sleeptime.append)
- assert nominatim.tools.replication.update(temp_db_conn, update_options) \
- == nominatim.tools.replication.UpdateState.NO_CHANGES
+ assert nominatim_db.tools.replication.update(dsn, update_options) \
+ == nominatim_db.tools.replication.UpdateState.NO_CHANGES
assert not sleeptime
-def test_update_no_data_sleep(monkeypatch, temp_db_conn, update_options):
+def test_update_no_data_sleep(monkeypatch, temp_db_conn, dsn, update_options):
date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=30)
status.set_status(temp_db_conn, date, seq=34)
- monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
+ monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer,
"apply_diffs",
lambda *args, **kwargs: None)
sleeptime = []
monkeypatch.setattr(time, 'sleep', sleeptime.append)
- assert nominatim.tools.replication.update(temp_db_conn, update_options) \
- == nominatim.tools.replication.UpdateState.NO_CHANGES
+ assert nominatim_db.tools.replication.update(dsn, update_options) \
+ == nominatim_db.tools.replication.UpdateState.NO_CHANGES
assert len(sleeptime) == 1
assert sleeptime[0] < 3600
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for methods of the SPCsvLoader class.
"""
import pytest
-from nominatim.errors import UsageError
-from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
-from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
+from nominatim_db.errors import UsageError
+from nominatim_db.tools.special_phrases.sp_csv_loader import SPCsvLoader
+from nominatim_db.tools.special_phrases.special_phrase import SpecialPhrase
@pytest.fixture
def sp_csv_loader(src_dir):
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for methods of the SPWikiLoader class.
"""
import pytest
-from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
+from nominatim_db.tools.special_phrases.sp_wiki_loader import SPWikiLoader
@pytest.fixture
xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt'
return xml_test_content.read_text()
- monkeypatch.setattr('nominatim.tools.special_phrases.sp_wiki_loader._get_wiki_content',
+ monkeypatch.setattr('nominatim_db.tools.special_phrases.sp_wiki_loader._get_wiki_content',
_mock_wiki_content)
return loader
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Test for tiger data function
from textwrap import dedent
import pytest
+import pytest_asyncio
-from nominatim.tools import tiger_data
-from nominatim.errors import UsageError
+from nominatim_db.db.connection import execute_scalar
+from nominatim_db.tools import tiger_data, freeze
+from nominatim_db.errors import UsageError
class MockTigerTable:
token_info JSONB,
postcode TEXT)""")
+ # We need this table to determine if the database is frozen or not
+ cur.execute("CREATE TABLE place (number INTEGER)")
+
def count(self):
- with self.conn.cursor() as cur:
- return cur.scalar("SELECT count(*) FROM tiger")
+ return execute_scalar(self.conn, "SELECT count(*) FROM tiger")
def row(self):
with self.conn.cursor() as cur:
@pytest.mark.parametrize("threads", (1, 5))
-def test_add_tiger_data(def_config, src_dir, tiger_table, tokenizer_mock, threads):
- tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'),
- def_config, threads, tokenizer_mock())
+@pytest.mark.asyncio
+async def test_add_tiger_data(def_config, src_dir, tiger_table, tokenizer_mock, threads):
+ await tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'),
+ def_config, threads, tokenizer_mock())
assert tiger_table.count() == 6213
-def test_add_tiger_data_no_files(def_config, tiger_table, tokenizer_mock,
+@pytest.mark.asyncio
+async def test_add_tiger_data_database_frozen(def_config, temp_db_conn, tiger_table, tokenizer_mock,
+ tmp_path):
+ freeze.drop_update_tables(temp_db_conn)
+
+ with pytest.raises(UsageError) as excinfo:
+ await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
+
+ assert "database frozen" in str(excinfo.value)
+
+ assert tiger_table.count() == 0
+
+
+@pytest.mark.asyncio
+async def test_add_tiger_data_no_files(def_config, tiger_table, tokenizer_mock,
tmp_path):
- tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
+ await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
assert tiger_table.count() == 0
-def test_add_tiger_data_bad_file(def_config, tiger_table, tokenizer_mock,
+@pytest.mark.asyncio
+async def test_add_tiger_data_bad_file(def_config, tiger_table, tokenizer_mock,
tmp_path):
sqlfile = tmp_path / '1010.csv'
sqlfile.write_text("""Random text""")
- tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
+ await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
assert tiger_table.count() == 0
-def test_add_tiger_data_hnr_nan(def_config, tiger_table, tokenizer_mock,
+@pytest.mark.asyncio
+async def test_add_tiger_data_hnr_nan(def_config, tiger_table, tokenizer_mock,
csv_factory, tmp_path):
csv_factory('file1', hnr_from=99)
csv_factory('file2', hnr_from='L12')
csv_factory('file3', hnr_to='12.4')
- tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
+ await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
assert tiger_table.count() == 1
- assert tiger_table.row()['start'] == 99
+ assert tiger_table.row().start == 99
@pytest.mark.parametrize("threads", (1, 5))
-def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock,
+@pytest.mark.asyncio
+async def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock,
tmp_path, src_dir, threads):
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv'))
tar.close()
- tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
- tokenizer_mock())
+ await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
+ tokenizer_mock())
assert tiger_table.count() == 6213
-def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock,
+@pytest.mark.asyncio
+async def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock,
tmp_path):
tarfile = tmp_path / 'sample.tar.gz'
tarfile.write_text("""Random text""")
with pytest.raises(UsageError):
- tiger_data.add_tiger_data(str(tarfile), def_config, 1, tokenizer_mock())
+ await tiger_data.add_tiger_data(str(tarfile), def_config, 1, tokenizer_mock())
-def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
+@pytest.mark.asyncio
+async def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
tmp_path):
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
tar.add(__file__)
tar.close()
- tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
- tokenizer_mock())
+ await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
+ tokenizer_mock())
assert tiger_table.count() == 0
-# SPDX-License-Identifier: GPL-2.0-only
+# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
-# Copyright (C) 2022 by the Nominatim developer community.
+# Copyright (C) 2024 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for centroid computation.
"""
import pytest
-from nominatim.utils.centroid import PointsCentroid
+from nominatim_db.utils.centroid import PointsCentroid
def test_empty_set():
c = PointsCentroid()
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for the streaming JSON writer.
+"""
+import json
+
+import pytest
+
+from nominatim_api.utils.json_writer import JsonWriter
+
+@pytest.mark.parametrize("inval,outstr", [(None, 'null'),
+ (True, 'true'), (False, 'false'),
+ (23, '23'), (0, '0'), (-1.3, '-1.3'),
+ ('g\nä', '"g\\nä"'), ('"', '"\\\""'),
+ ({}, '{}'), ([], '[]')])
+def test_simple_value(inval, outstr):
+ writer = JsonWriter()
+ writer.value(inval)
+
+ assert writer() == outstr
+ json.loads(writer())
+
+
+def test_empty_array():
+ writer = JsonWriter().start_array().end_array()
+
+ assert writer() == '[]'
+ json.loads(writer())
+
+
+def test_array_with_single_value():
+ writer = JsonWriter().start_array().value(None).end_array()
+
+ assert writer() == '[null]'
+ json.loads(writer())
+
+
+@pytest.mark.parametrize("invals,outstr", [((1, ), '[1]'),
+ (('a', 'b'), '["a","b"]')])
+def test_array_with_data(invals, outstr):
+ writer = JsonWriter()
+
+ writer.start_array()
+ for ival in invals:
+ writer.value(ival).next()
+ writer.end_array()
+
+ assert writer() == outstr
+ json.loads(writer())
+
+
+def test_empty_object():
+ writer = JsonWriter().start_object().end_object()
+
+ assert writer() == '{}'
+ json.loads(writer())
+
+
+def test_object_single_entry():
+ writer = JsonWriter()\
+ .start_object()\
+ .key('something')\
+ .value(5)\
+ .end_object()
+
+ assert writer() == '{"something":5}'
+ json.loads(writer())
+
+def test_object_many_values():
+ writer = JsonWriter()\
+ .start_object()\
+ .keyval('foo', None)\
+ .keyval('bar', {})\
+ .keyval('baz', 'b\taz')\
+ .end_object()
+
+ assert writer() == '{"foo":null,"bar":{},"baz":"b\\taz"}'
+ json.loads(writer())
+
+def test_object_many_values_without_none():
+ writer = JsonWriter()\
+ .start_object()\
+ .keyval_not_none('foo', 0)\
+ .keyval_not_none('bar', None)\
+ .keyval_not_none('baz', '')\
+ .keyval_not_none('eve', False,
+ transform = lambda v: 'yes' if v else 'no')\
+ .end_object()
+
+ assert writer() == '{"foo":0,"baz":"","eve":"no"}'
+ json.loads(writer())
+
+
+def test_raw_output():
+ writer = JsonWriter()\
+ .start_array()\
+ .raw('{ "nicely": "formatted here" }').next()\
+ .value(1)\
+ .end_array()
+
+ assert writer() == '[{ "nicely": "formatted here" },1]'
+++ /dev/null
-CREATE OR REPLACE FUNCTION test_getorcreate_amenity(lookup_word TEXT, normalized_word TEXT,
- lookup_class text, lookup_type text)
- RETURNS INTEGER
- AS $$
-DECLARE
- lookup_token TEXT;
- return_word_id INTEGER;
-BEGIN
- lookup_token := ' '||trim(lookup_word);
- SELECT min(word_id) FROM word
- WHERE word_token = lookup_token and word = normalized_word
- and class = lookup_class and type = lookup_type
- INTO return_word_id;
- IF return_word_id IS NULL THEN
- return_word_id := nextval('seq_word');
- INSERT INTO word VALUES (return_word_id, lookup_token, normalized_word,
- lookup_class, lookup_type, null, 0);
- END IF;
- RETURN return_word_id;
-END;
-$$
-LANGUAGE plpgsql;
-
-
-CREATE OR REPLACE FUNCTION test_getorcreate_amenityoperator(lookup_word TEXT,
- normalized_word TEXT,
- lookup_class text,
- lookup_type text,
- op text)
- RETURNS INTEGER
- AS $$
-DECLARE
- lookup_token TEXT;
- return_word_id INTEGER;
-BEGIN
- lookup_token := ' '||trim(lookup_word);
- SELECT min(word_id) FROM word
- WHERE word_token = lookup_token and word = normalized_word
- and class = lookup_class and type = lookup_type and operator = op
- INTO return_word_id;
- IF return_word_id IS NULL THEN
- return_word_id := nextval('seq_word');
- INSERT INTO word VALUES (return_word_id, lookup_token, normalized_word,
- lookup_class, lookup_type, null, 0, op);
- END IF;
- RETURN return_word_id;
-END;
-$$
-LANGUAGE plpgsql;
-
-SELECT test_getorcreate_amenity(make_standard_name('Aerodrome'), 'aerodrome', 'aeroway', 'aerodrome');
-SELECT test_getorcreate_amenity(make_standard_name('Aerodromes'), 'aerodromes', 'aeroway', 'aerodrome');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Aerodrome in'), 'aerodrome in', 'aeroway', 'aerodrome', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Aerodromes in'), 'aerodromes in', 'aeroway', 'aerodrome', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Aerodrome near'), 'aerodrome near', 'aeroway', 'aerodrome', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Aerodromes near'), 'aerodromes near', 'aeroway', 'aerodrome', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Airport'), 'airport', 'aeroway', 'aerodrome');
-SELECT test_getorcreate_amenity(make_standard_name('Airports'), 'airports', 'aeroway', 'aerodrome');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Airport in'), 'airport in', 'aeroway', 'aerodrome', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Airports in'), 'airports in', 'aeroway', 'aerodrome', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Airport near'), 'airport near', 'aeroway', 'aerodrome', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Airports near'), 'airports near', 'aeroway', 'aerodrome', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Bar'), 'bar', 'amenity', 'bar');
-SELECT test_getorcreate_amenity(make_standard_name('Bars'), 'bars', 'amenity', 'bar');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bar in'), 'bar in', 'amenity', 'bar', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bars in'), 'bars in', 'amenity', 'bar', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bar near'), 'bar near', 'amenity', 'bar', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bars near'), 'bars near', 'amenity', 'bar', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Bar'), 'bar', 'amenity', 'pub');
-SELECT test_getorcreate_amenity(make_standard_name('Bars'), 'bars', 'amenity', 'pub');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bar in'), 'bar in', 'amenity', 'pub', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bars in'), 'bars in', 'amenity', 'pub', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bar near'), 'bar near', 'amenity', 'pub', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Bars near'), 'bars near', 'amenity', 'pub', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Food'), 'food', 'amenity', 'restaurant');
-SELECT test_getorcreate_amenity(make_standard_name('Food'), 'food', 'amenity', 'restaurant');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Food in'), 'food in', 'amenity', 'restaurant', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Food in'), 'food in', 'amenity', 'restaurant', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Food near'), 'food near', 'amenity', 'restaurant', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Food near'), 'food near', 'amenity', 'restaurant', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Pub'), 'pub', 'amenity', 'bar');
-SELECT test_getorcreate_amenity(make_standard_name('Pubs'), 'pubs', 'amenity', 'bar');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pub in'), 'pub in', 'amenity', 'bar', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pubs in'), 'pubs in', 'amenity', 'bar', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pub near'), 'pub near', 'amenity', 'bar', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pubs near'), 'pubs near', 'amenity', 'bar', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Pub'), 'pub', 'amenity', 'pub');
-SELECT test_getorcreate_amenity(make_standard_name('Pubs'), 'pubs', 'amenity', 'pub');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pub in'), 'pub in', 'amenity', 'pub', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pubs in'), 'pubs in', 'amenity', 'pub', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pub near'), 'pub near', 'amenity', 'pub', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Pubs near'), 'pubs near', 'amenity', 'pub', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Restaurant'), 'restaurant', 'amenity', 'restaurant');
-SELECT test_getorcreate_amenity(make_standard_name('Restaurants'), 'restaurants', 'amenity', 'restaurant');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Restaurant in'), 'restaurant in', 'amenity', 'restaurant', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Restaurants in'), 'restaurants in', 'amenity', 'restaurant', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Restaurant near'), 'restaurant near', 'amenity', 'restaurant', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Restaurants near'), 'restaurants near', 'amenity', 'restaurant', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Mural'), 'mural', 'artwork_type', 'mural');
-SELECT test_getorcreate_amenity(make_standard_name('Murals'), 'murals', 'artwork_type', 'mural');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Mural in'), 'mural in', 'artwork_type', 'mural', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Murals in'), 'murals in', 'artwork_type', 'mural', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Mural near'), 'mural near', 'artwork_type', 'mural', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Murals near'), 'murals near', 'artwork_type', 'mural', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Sculpture'), 'sculpture', 'artwork_type', 'sculpture');
-SELECT test_getorcreate_amenity(make_standard_name('Sculptures'), 'sculptures', 'artwork_type', 'sculpture');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Sculpture in'), 'sculpture in', 'artwork_type', 'sculpture', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Sculptures in'), 'sculptures in', 'artwork_type', 'sculpture', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Sculpture near'), 'sculpture near', 'artwork_type', 'sculpture', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Sculptures near'), 'sculptures near', 'artwork_type', 'sculpture', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Statue'), 'statue', 'artwork_type', 'statue');
-SELECT test_getorcreate_amenity(make_standard_name('Statues'), 'statues', 'artwork_type', 'statue');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statue in'), 'statue in', 'artwork_type', 'statue', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statues in'), 'statues in', 'artwork_type', 'statue', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statue near'), 'statue near', 'artwork_type', 'statue', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statues near'), 'statues near', 'artwork_type', 'statue', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('ATM'), 'atm', 'atm', 'yes');
-SELECT test_getorcreate_amenity(make_standard_name('ATMs'), 'atms', 'atm', 'yes');
-SELECT test_getorcreate_amenityoperator(make_standard_name('ATM in'), 'atm in', 'atm', 'yes', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('ATMs in'), 'atms in', 'atm', 'yes', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('ATM near'), 'atm near', 'atm', 'yes', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('ATMs near'), 'atms near', 'atm', 'yes', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('National Park'), 'national park', 'boundary', 'national_park');
-SELECT test_getorcreate_amenity(make_standard_name('National Parks'), 'national parks', 'boundary', 'national_park');
-SELECT test_getorcreate_amenityoperator(make_standard_name('National Park in'), 'national park in', 'boundary', 'national_park', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('National Parks in'), 'national parks in', 'boundary', 'national_park', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('National Park near'), 'national park near', 'boundary', 'national_park', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('National Parks near'), 'national parks near', 'boundary', 'national_park', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Changing table'), 'changing table', 'changing_table', 'yes');
-SELECT test_getorcreate_amenity(make_standard_name('Changing tables'), 'changing tables', 'changing_table', 'yes');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Changing table in'), 'changing table in', 'changing_table', 'yes', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Changing tables in'), 'changing tables in', 'changing_table', 'yes', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Changing table near'), 'changing table near', 'changing_table', 'yes', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Changing tables near'), 'changing tables near', 'changing_table', 'yes', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Roundabout'), 'roundabout', 'junction', 'roundabout');
-SELECT test_getorcreate_amenity(make_standard_name('Roundabouts'), 'roundabouts', 'junction', 'roundabout');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Roundabout in'), 'roundabout in', 'junction', 'roundabout', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Roundabouts in'), 'roundabouts in', 'junction', 'roundabout', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Roundabout near'), 'roundabout near', 'junction', 'roundabout', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Roundabouts near'), 'roundabouts near', 'junction', 'roundabout', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Plaque'), 'plaque', 'memorial', 'plaque');
-SELECT test_getorcreate_amenity(make_standard_name('Plaques'), 'plaques', 'memorial', 'plaque');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Plaque in'), 'plaque in', 'memorial', 'plaque', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Plaques in'), 'plaques in', 'memorial', 'plaque', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Plaque near'), 'plaque near', 'memorial', 'plaque', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Plaques near'), 'plaques near', 'memorial', 'plaque', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Statue'), 'statue', 'memorial', 'statue');
-SELECT test_getorcreate_amenity(make_standard_name('Statues'), 'statues', 'memorial', 'statue');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statue in'), 'statue in', 'memorial', 'statue', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statues in'), 'statues in', 'memorial', 'statue', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statue near'), 'statue near', 'memorial', 'statue', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Statues near'), 'statues near', 'memorial', 'statue', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('Stolperstein'), 'stolperstein', 'memorial', 'stolperstein');
-SELECT test_getorcreate_amenity(make_standard_name('Stolpersteins'), 'stolpersteins', 'memorial', 'stolperstein');
-SELECT test_getorcreate_amenity(make_standard_name('Stolpersteine'), 'stolpersteine', 'memorial', 'stolperstein');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Stolperstein in'), 'stolperstein in', 'memorial', 'stolperstein', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Stolpersteins in'), 'stolpersteins in', 'memorial', 'stolperstein', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Stolpersteine in'), 'stolpersteine in', 'memorial', 'stolperstein', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Stolperstein near'), 'stolperstein near', 'memorial', 'stolperstein', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Stolpersteins near'), 'stolpersteins near', 'memorial', 'stolperstein', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('Stolpersteine near'), 'stolpersteine near', 'memorial', 'stolperstein', 'near');
-SELECT test_getorcreate_amenity(make_standard_name('War Memorial'), 'war memorial', 'memorial', 'war_memorial');
-SELECT test_getorcreate_amenity(make_standard_name('War Memorials'), 'war memorials', 'memorial', 'war_memorial');
-SELECT test_getorcreate_amenityoperator(make_standard_name('War Memorial in'), 'war memorial in', 'memorial', 'war_memorial', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('War Memorials in'), 'war memorials in', 'memorial', 'war_memorial', 'in');
-SELECT test_getorcreate_amenityoperator(make_standard_name('War Memorial near'), 'war memorial near', 'memorial', 'war_memorial', 'near');
-SELECT test_getorcreate_amenityoperator(make_standard_name('War Memorials near'), 'war memorials near', 'memorial', 'war_memorial', 'near');
-CREATE INDEX idx_placex_classtype ON placex (class, type);CREATE TABLE place_classtype_aeroway_aerodrome AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'aeroway' AND type = 'aerodrome';
-CREATE INDEX idx_place_classtype_aeroway_aerodrome_centroid ON place_classtype_aeroway_aerodrome USING GIST (centroid);
-CREATE INDEX idx_place_classtype_aeroway_aerodrome_place_id ON place_classtype_aeroway_aerodrome USING btree(place_id);
-GRANT SELECT ON place_classtype_aeroway_aerodrome TO "www-data";
-CREATE TABLE place_classtype_amenity_bar AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'amenity' AND type = 'bar';
-CREATE INDEX idx_place_classtype_amenity_bar_centroid ON place_classtype_amenity_bar USING GIST (centroid);
-CREATE INDEX idx_place_classtype_amenity_bar_place_id ON place_classtype_amenity_bar USING btree(place_id);
-GRANT SELECT ON place_classtype_amenity_bar TO "www-data";
-CREATE TABLE place_classtype_amenity_pub AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'amenity' AND type = 'pub';
-CREATE INDEX idx_place_classtype_amenity_pub_centroid ON place_classtype_amenity_pub USING GIST (centroid);
-CREATE INDEX idx_place_classtype_amenity_pub_place_id ON place_classtype_amenity_pub USING btree(place_id);
-GRANT SELECT ON place_classtype_amenity_pub TO "www-data";
-CREATE TABLE place_classtype_amenity_restaurant AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'amenity' AND type = 'restaurant';
-CREATE INDEX idx_place_classtype_amenity_restaurant_centroid ON place_classtype_amenity_restaurant USING GIST (centroid);
-CREATE INDEX idx_place_classtype_amenity_restaurant_place_id ON place_classtype_amenity_restaurant USING btree(place_id);
-GRANT SELECT ON place_classtype_amenity_restaurant TO "www-data";
-CREATE TABLE place_classtype_artwork_type_mural AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'artwork_type' AND type = 'mural';
-CREATE INDEX idx_place_classtype_artwork_type_mural_centroid ON place_classtype_artwork_type_mural USING GIST (centroid);
-CREATE INDEX idx_place_classtype_artwork_type_mural_place_id ON place_classtype_artwork_type_mural USING btree(place_id);
-GRANT SELECT ON place_classtype_artwork_type_mural TO "www-data";
-CREATE TABLE place_classtype_artwork_type_sculpture AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'artwork_type' AND type = 'sculpture';
-CREATE INDEX idx_place_classtype_artwork_type_sculpture_centroid ON place_classtype_artwork_type_sculpture USING GIST (centroid);
-CREATE INDEX idx_place_classtype_artwork_type_sculpture_place_id ON place_classtype_artwork_type_sculpture USING btree(place_id);
-GRANT SELECT ON place_classtype_artwork_type_sculpture TO "www-data";
-CREATE TABLE place_classtype_artwork_type_statue AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'artwork_type' AND type = 'statue';
-CREATE INDEX idx_place_classtype_artwork_type_statue_centroid ON place_classtype_artwork_type_statue USING GIST (centroid);
-CREATE INDEX idx_place_classtype_artwork_type_statue_place_id ON place_classtype_artwork_type_statue USING btree(place_id);
-GRANT SELECT ON place_classtype_artwork_type_statue TO "www-data";
-CREATE TABLE place_classtype_atm_yes AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'atm' AND type = 'yes';
-CREATE INDEX idx_place_classtype_atm_yes_centroid ON place_classtype_atm_yes USING GIST (centroid);
-CREATE INDEX idx_place_classtype_atm_yes_place_id ON place_classtype_atm_yes USING btree(place_id);
-GRANT SELECT ON place_classtype_atm_yes TO "www-data";
-CREATE TABLE place_classtype_boundary_national_park AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'boundary' AND type = 'national_park';
-CREATE INDEX idx_place_classtype_boundary_national_park_centroid ON place_classtype_boundary_national_park USING GIST (centroid);
-CREATE INDEX idx_place_classtype_boundary_national_park_place_id ON place_classtype_boundary_national_park USING btree(place_id);
-GRANT SELECT ON place_classtype_boundary_national_park TO "www-data";
-CREATE TABLE place_classtype_changing_table_yes AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'changing_table' AND type = 'yes';
-CREATE INDEX idx_place_classtype_changing_table_yes_centroid ON place_classtype_changing_table_yes USING GIST (centroid);
-CREATE INDEX idx_place_classtype_changing_table_yes_place_id ON place_classtype_changing_table_yes USING btree(place_id);
-GRANT SELECT ON place_classtype_changing_table_yes TO "www-data";
-CREATE TABLE place_classtype_junction_roundabout AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'junction' AND type = 'roundabout';
-CREATE INDEX idx_place_classtype_junction_roundabout_centroid ON place_classtype_junction_roundabout USING GIST (centroid);
-CREATE INDEX idx_place_classtype_junction_roundabout_place_id ON place_classtype_junction_roundabout USING btree(place_id);
-GRANT SELECT ON place_classtype_junction_roundabout TO "www-data";
-CREATE TABLE place_classtype_memorial_plaque AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'memorial' AND type = 'plaque';
-CREATE INDEX idx_place_classtype_memorial_plaque_centroid ON place_classtype_memorial_plaque USING GIST (centroid);
-CREATE INDEX idx_place_classtype_memorial_plaque_place_id ON place_classtype_memorial_plaque USING btree(place_id);
-GRANT SELECT ON place_classtype_memorial_plaque TO "www-data";
-CREATE TABLE place_classtype_memorial_statue AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'memorial' AND type = 'statue';
-CREATE INDEX idx_place_classtype_memorial_statue_centroid ON place_classtype_memorial_statue USING GIST (centroid);
-CREATE INDEX idx_place_classtype_memorial_statue_place_id ON place_classtype_memorial_statue USING btree(place_id);
-GRANT SELECT ON place_classtype_memorial_statue TO "www-data";
-CREATE TABLE place_classtype_memorial_stolperstein AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'memorial' AND type = 'stolperstein';
-CREATE INDEX idx_place_classtype_memorial_stolperstein_centroid ON place_classtype_memorial_stolperstein USING GIST (centroid);
-CREATE INDEX idx_place_classtype_memorial_stolperstein_place_id ON place_classtype_memorial_stolperstein USING btree(place_id);
-GRANT SELECT ON place_classtype_memorial_stolperstein TO "www-data";
-CREATE TABLE place_classtype_memorial_war_memorial AS SELECT place_id AS place_id,st_centroid(geometry) AS centroid FROM placex WHERE class = 'memorial' AND type = 'war_memorial';
-CREATE INDEX idx_place_classtype_memorial_war_memorial_centroid ON place_classtype_memorial_war_memorial USING GIST (centroid);
-CREATE INDEX idx_place_classtype_memorial_war_memorial_place_id ON place_classtype_memorial_war_memorial USING btree(place_id);
-GRANT SELECT ON place_classtype_memorial_war_memorial TO "www-data";
-DROP INDEX idx_placex_classtype;
-
-DROP FUNCTION test_getorcreate_amenity;
-DROP FUNCTION test_getorcreate_amenityoperator;
+++ /dev/null
-#!/bin/bash -e
-#
-# hacks for broken vagrant box #DOCS:
-sudo rm -f /var/lib/dpkg/lock #DOCS:
-export APT_LISTCHANGES_FRONTEND=none #DOCS:
-export DEBIAN_FRONTEND=noninteractive #DOCS:
-
-#
-# *Note:* these installation instructions are also available in executable
-# form for use with vagrant under vagrant/Install-on-Ubuntu-18.sh.
-#
-# Installing the Required Software
-# ================================
-#
-# These instructions expect that you have a freshly installed Ubuntu 18.04.
-#
-# Make sure all packages are up-to-date by running:
-#
-
- sudo apt update -qq
-
-# Now you can install all packages needed for Nominatim:
-
- sudo apt install -y php-cgi
- sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \
- libboost-filesystem-dev libexpat1-dev zlib1g-dev\
- libbz2-dev libpq-dev \
- postgresql-10-postgis-2.4 \
- postgresql-contrib-10 postgresql-10-postgis-scripts \
- php-cli php-pgsql php-intl libicu-dev python3-pip \
- python3-psutil python3-jinja2 python3-yaml python3-icu git
-
-# Some of the Python packages that come with Ubuntu 18.04 are too old, so
-# install the latest version from pip:
-
- pip3 install --user python-dotenv datrie pyyaml psycopg2-binary
-
-#
-# System Configuration
-# ====================
-#
-# The following steps are meant to configure a fresh Ubuntu installation
-# for use with Nominatim. You may skip some of the steps if you have your
-# OS already configured.
-#
-# Creating Dedicated User Accounts
-# --------------------------------
-#
-# Nominatim will run as a global service on your machine. It is therefore
-# best to install it under its own separate user account. In the following
-# we assume this user is called nominatim and the installation will be in
-# /srv/nominatim. To create the user and directory run:
-#
-# sudo useradd -d /srv/nominatim -s /bin/bash -m nominatim
-#
-# You may find a more suitable location if you wish.
-#
-# To be able to copy and paste instructions from this manual, export
-# user name and home directory now like this:
-#
-if [ "x$USERNAME" == "x" ]; then #DOCS:
- export USERNAME=vagrant #DOCS: export USERNAME=nominatim
- export USERHOME=/home/vagrant #DOCS: export USERHOME=/srv/nominatim
-fi #DOCS:
-#
-# **Never, ever run the installation as a root user.** You have been warned.
-#
-# Make sure that system servers can read from the home directory:
-
- chmod a+x $USERHOME
-
-# Setting up PostgreSQL
-# ---------------------
-#
-# Tune the postgresql configuration, which is located in
-# `/etc/postgresql/10/main/postgresql.conf`. See section *Postgres Tuning* in
-# [the installation page](../admin/Installation.md#postgresql-tuning)
-# for the parameters to change.
-#
-# Restart the postgresql service after updating this config file.
-
-if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
- sudo pg_ctlcluster 10 main start #DOCS:
-else #DOCS:
- sudo systemctl restart postgresql
-fi #DOCS:
-
-#
-# Finally, we need to add two postgres users: one for the user that does
-# the import and another for the webserver which should access the database
-# for reading only:
-#
-
- sudo -u postgres createuser -s $USERNAME
- sudo -u postgres createuser www-data
-
-#
-# Installing Nominatim
-# ====================
-#
-# Building and Configuration
-# --------------------------
-#
-# Get the source code from Github and change into the source directory
-#
-if [ "x$1" == "xyes" ]; then #DOCS: :::sh
- cd $USERHOME
- git clone --recursive https://github.com/openstreetmap/Nominatim.git
- cd Nominatim
-else #DOCS:
- cd $USERHOME/Nominatim #DOCS:
-fi #DOCS:
-
-# When installing the latest source from github, you also need to
-# download the country grid:
-
-if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
- wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
-fi #DOCS:
-
-# The code must be built in a separate directory. Create this directory,
-# then configure and build Nominatim in there:
-
- mkdir $USERHOME/build
- cd $USERHOME/build
- cmake $USERHOME/Nominatim
- make
- sudo make install
-
-
-# Nominatim is now ready to use. You can continue with
-# [importing a database from OSM data](../admin/Import.md). If you want to set up
-# a webserver first, continue reading.
-#
-# Setting up a webserver
-# ======================
-#
-# The webserver should serve the php scripts from the website directory of your
-# [project directory](../admin/Import.md#creating-the-project-directory).
-# This directory needs to exist when being configured.
-# Therefore set up a project directory and create the website directory:
-
- mkdir $USERHOME/nominatim-project
- mkdir $USERHOME/nominatim-project/website
-
-# The import process will populate the directory later.
-#
-# Option 1: Using Apache
-# ----------------------
-#
-if [ "x$2" == "xinstall-apache" ]; then #DOCS:
-#
-# Apache has a PHP module that can be used to serve Nominatim. To install them
-# run:
-
- sudo apt install -y apache2 libapache2-mod-php
-
-# You need to create an alias to the website directory in your apache
-# configuration. Add a separate nominatim configuration to your webserver:
-
-#DOCS:```sh
-sudo tee /etc/apache2/conf-available/nominatim.conf << EOFAPACHECONF
-<Directory "$USERHOME/nominatim-project/website">
- Options FollowSymLinks MultiViews
- AddType text/html .php
- DirectoryIndex search.php
- Require all granted
-</Directory>
-
-Alias /nominatim $USERHOME/nominatim-project/website
-EOFAPACHECONF
-#DOCS:```
-
-#
-# Then enable the configuration with
-#
-
- sudo a2enconf nominatim
-
-# and restart apache:
-
-if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
- sudo apache2ctl start #DOCS:
-else #DOCS:
- sudo systemctl restart apache2
-fi #DOCS:
-
-# The Nominatim API is now available at `http://localhost/nominatim/`.
-
-fi #DOCS:
-
-#
-# Option 2: Using nginx
-# ---------------------
-#
-if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
-
-# Nginx has no native support for php scripts. You need to set up php-fpm for
-# this purpose. First install nginx and php-fpm:
-
- sudo apt install -y nginx php-fpm
-
-# You need to configure php-fpm to listen on a Unix socket.
-
-#DOCS:```sh
-sudo tee /etc/php/7.2/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
-[www]
-; Replace the tcp listener and add the unix socket
-listen = /var/run/php7.2-fpm.sock
-
-; Ensure that the daemon runs as the correct user
-listen.owner = www-data
-listen.group = www-data
-listen.mode = 0666
-
-; Unix user of FPM processes
-user = www-data
-group = www-data
-
-; Choose process manager type (static, dynamic, ondemand)
-pm = ondemand
-pm.max_children = 5
-EOF_PHP_FPM_CONF
-#DOCS:```
-
-# Then create a Nginx configuration to forward http requests to that socket.
-
-#DOCS:```sh
-sudo tee /etc/nginx/sites-available/default << EOF_NGINX_CONF
-server {
- listen 80 default_server;
- listen [::]:80 default_server;
-
- root $USERHOME/nominatim-project/website;
- index search.php index.html;
- location / {
- try_files \$uri \$uri/ @php;
- }
-
- location @php {
- fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
- fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
- fastcgi_param QUERY_STRING \$args;
- fastcgi_pass unix:/var/run/php7.2-fpm.sock;
- fastcgi_index index.php;
- include fastcgi_params;
- }
-
- location ~ [^/]\.php(/|$) {
- fastcgi_split_path_info ^(.+?\.php)(/.*)$;
- if (!-f \$document_root\$fastcgi_script_name) {
- return 404;
- }
- fastcgi_pass unix:/var/run/php7.2-fpm.sock;
- fastcgi_index search.php;
- include fastcgi.conf;
- }
-}
-EOF_NGINX_CONF
-#DOCS:```
-
-#
-# Enable the configuration and restart Nginx
-#
-
-if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
- sudo /usr/sbin/php-fpm7.2 --nodaemonize --fpm-config /etc/php/7.2/fpm/php-fpm.conf & #DOCS:
- sudo /usr/sbin/nginx & #DOCS:
-else #DOCS:
- sudo systemctl restart php7.2-fpm nginx
-fi #DOCS:
-
-# The Nominatim API is now available at `http://localhost/`.
-
-
-
-fi #DOCS:
+++ /dev/null
-#!/bin/bash -e
-#
-# hacks for broken vagrant box #DOCS:
-sudo rm -f /var/lib/dpkg/lock #DOCS:
-export APT_LISTCHANGES_FRONTEND=none #DOCS:
-export DEBIAN_FRONTEND=noninteractive #DOCS:
-
-# *Note:* these installation instructions are also available in executable
-# form for use with vagrant under vagrant/Install-on-Ubuntu-20.sh.
-#
-# Installing the Required Software
-# ================================
-#
-# These instructions expect that you have a freshly installed Ubuntu 20.04.
-#
-# Make sure all packages are up-to-date by running:
-#
-
- sudo apt update -qq
-
-# Now you can install all packages needed for Nominatim:
-
- sudo apt install -y php-cgi
- sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \
- libboost-filesystem-dev libexpat1-dev zlib1g-dev \
- libbz2-dev libpq-dev \
- postgresql-12-postgis-3 \
- postgresql-contrib-12 postgresql-12-postgis-3-scripts \
- php-cli php-pgsql php-intl libicu-dev python3-dotenv \
- python3-psycopg2 python3-psutil python3-jinja2 \
- python3-icu python3-datrie python3-yaml git
-
-#
-# System Configuration
-# ====================
-#
-# The following steps are meant to configure a fresh Ubuntu installation
-# for use with Nominatim. You may skip some of the steps if you have your
-# OS already configured.
-#
-# Creating Dedicated User Accounts
-# --------------------------------
-#
-# Nominatim will run as a global service on your machine. It is therefore
-# best to install it under its own separate user account. In the following
-# we assume this user is called nominatim and the installation will be in
-# /srv/nominatim. To create the user and directory run:
-#
-# sudo useradd -d /srv/nominatim -s /bin/bash -m nominatim
-#
-# You may find a more suitable location if you wish.
-#
-# To be able to copy and paste instructions from this manual, export
-# user name and home directory now like this:
-#
-if [ "x$USERNAME" == "x" ]; then #DOCS:
- export USERNAME=vagrant #DOCS: export USERNAME=nominatim
- export USERHOME=/home/vagrant #DOCS: export USERHOME=/srv/nominatim
-fi #DOCS:
-#
-# **Never, ever run the installation as a root user.** You have been warned.
-#
-# Make sure that system servers can read from the home directory:
-
- chmod a+x $USERHOME
-
-# Setting up PostgreSQL
-# ---------------------
-#
-# Tune the postgresql configuration, which is located in
-# `/etc/postgresql/12/main/postgresql.conf`. See section *Postgres Tuning* in
-# [the installation page](../admin/Installation.md#postgresql-tuning)
-# for the parameters to change.
-#
-# Restart the postgresql service after updating this config file.
-
-if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
- sudo pg_ctlcluster 12 main start #DOCS:
-else #DOCS:
- sudo systemctl restart postgresql
-fi #DOCS:
-#
-# Finally, we need to add two postgres users: one for the user that does
-# the import and another for the webserver which should access the database
-# for reading only:
-#
-
- sudo -u postgres createuser -s $USERNAME
- sudo -u postgres createuser www-data
-
-#
-# Installing Nominatim
-# ====================
-#
-# Building and Configuration
-# --------------------------
-#
-# Get the source code from Github and change into the source directory
-#
-if [ "x$1" == "xyes" ]; then #DOCS: :::sh
- cd $USERHOME
- git clone --recursive https://github.com/openstreetmap/Nominatim.git
- cd Nominatim
-else #DOCS:
- cd $USERHOME/Nominatim #DOCS:
-fi #DOCS:
-
-# When installing the latest source from github, you also need to
-# download the country grid:
-
-if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
- wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
-fi #DOCS:
-
-# The code must be built in a separate directory. Create this directory,
-# then configure and build Nominatim in there:
-
- mkdir $USERHOME/build
- cd $USERHOME/build
- cmake $USERHOME/Nominatim
- make
- sudo make install
-
-# Nominatim is now ready to use. You can continue with
-# [importing a database from OSM data](../admin/Import.md). If you want to set up
-# a webserver first, continue reading.
-#
-# Setting up a webserver
-# ======================
-#
-# The webserver should serve the php scripts from the website directory of your
-# [project directory](../admin/Import.md#creating-the-project-directory).
-# This directory needs to exist when being configured.
-# Therefore set up a project directory and create a website directory:
-
- mkdir $USERHOME/nominatim-project
- mkdir $USERHOME/nominatim-project/website
-
-# The import process will populate the directory later.
-
-#
-# Option 1: Using Apache
-# ----------------------
-#
-if [ "x$2" == "xinstall-apache" ]; then #DOCS:
-#
-# Apache has a PHP module that can be used to serve Nominatim. To install them
-# run:
-
- sudo apt install -y apache2 libapache2-mod-php
-
-# You need to create an alias to the website directory in your apache
-# configuration. Add a separate nominatim configuration to your webserver:
-
-#DOCS:```sh
-sudo tee /etc/apache2/conf-available/nominatim.conf << EOFAPACHECONF
-<Directory "$USERHOME/nominatim-project/website">
- Options FollowSymLinks MultiViews
- AddType text/html .php
- DirectoryIndex search.php
- Require all granted
-</Directory>
-
-Alias /nominatim $USERHOME/nominatim-project/website
-EOFAPACHECONF
-#DOCS:```
-
-#
-# Then enable the configuration and restart apache
-#
-
- sudo a2enconf nominatim
-if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
- sudo apache2ctl start #DOCS:
-else #DOCS:
- sudo systemctl restart apache2
-fi #DOCS:
-
-# The Nominatim API is now available at `http://localhost/nominatim/`.
-
-fi #DOCS:
-
-#
-# Option 2: Using nginx
-# ---------------------
-#
-if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
-
-# Nginx has no native support for php scripts. You need to set up php-fpm for
-# this purpose. First install nginx and php-fpm:
-
- sudo apt install -y nginx php-fpm
-
-# You need to configure php-fpm to listen on a Unix socket.
-
-#DOCS:```sh
-sudo tee /etc/php/7.4/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
-[www]
-; Replace the tcp listener and add the unix socket
-listen = /var/run/php7.4-fpm.sock
-
-; Ensure that the daemon runs as the correct user
-listen.owner = www-data
-listen.group = www-data
-listen.mode = 0666
-
-; Unix user of FPM processes
-user = www-data
-group = www-data
-
-; Choose process manager type (static, dynamic, ondemand)
-pm = ondemand
-pm.max_children = 5
-EOF_PHP_FPM_CONF
-#DOCS:```
-
-# Then create a Nginx configuration to forward http requests to that socket.
-
-#DOCS:```sh
-sudo tee /etc/nginx/sites-available/default << EOF_NGINX_CONF
-server {
- listen 80 default_server;
- listen [::]:80 default_server;
-
- root $USERHOME/nominatim-project/website;
- index search.php index.html;
- location / {
- try_files \$uri \$uri/ @php;
- }
-
- location @php {
- fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
- fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
- fastcgi_param QUERY_STRING \$args;
- fastcgi_pass unix:/var/run/php7.4-fpm.sock;
- fastcgi_index index.php;
- include fastcgi_params;
- }
-
- location ~ [^/]\.php(/|$) {
- fastcgi_split_path_info ^(.+?\.php)(/.*)$;
- if (!-f \$document_root\$fastcgi_script_name) {
- return 404;
- }
- fastcgi_pass unix:/var/run/php7.4-fpm.sock;
- fastcgi_index search.php;
- include fastcgi.conf;
- }
-}
-EOF_NGINX_CONF
-#DOCS:```
-
-# If you have some errors, make sure that php7.4-fpm.sock is well under
-# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
-# to /var/run/php/php7.4-fpm.sock.
-#
-# Enable the configuration and restart Nginx
-#
-
-if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
- sudo /usr/sbin/php-fpm7.4 --nodaemonize --fpm-config /etc/php/7.4/fpm/php-fpm.conf & #DOCS:
- sudo /usr/sbin/nginx & #DOCS:
-else #DOCS:
- sudo systemctl restart php7.4-fpm nginx
-fi #DOCS:
-
-# The Nominatim API is now available at `http://localhost/`.
-
-
-
-fi #DOCS:
# Make sure all packages are up-to-date by running:
#
- sudo apt update -qq
+ sudo apt-get update -qq
# Now you can install all packages needed for Nominatim:
- sudo apt install -y php-cgi
- sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \
+ sudo apt-get install -y build-essential cmake g++ libboost-dev libboost-system-dev \
libboost-filesystem-dev libexpat1-dev zlib1g-dev \
- libbz2-dev libpq-dev \
- postgresql-server-dev-14 postgresql-14-postgis-3 \
+ libbz2-dev libpq-dev liblua5.3-dev lua5.3 lua-dkjson \
+ nlohmann-json3-dev postgresql-14-postgis-3 \
postgresql-contrib-14 postgresql-14-postgis-3-scripts \
- php-cli php-pgsql php-intl libicu-dev python3-dotenv \
- python3-psycopg2 python3-psutil python3-jinja2 \
- python3-icu python3-datrie git
+ libicu-dev virtualenv git
#
# System Configuration
#
# You may find a more suitable location if you wish.
#
+# The following instructions assume you are logged in as this user.
+# You can also switch to the user with:
+#
+# sudo -u nominatim bash
+#
# To be able to copy and paste instructions from this manual, export
# user name and home directory now like this:
#
# ---------------------
#
# Tune the postgresql configuration, which is located in
-# `/etc/postgresql/14/main/postgresql.conf`. See section *Postgres Tuning* in
-# [the installation page](../admin/Installation.md#postgresql-tuning)
+# `/etc/postgresql/14/main/postgresql.conf`. See section *Tuning the PostgreSQL database*
+# in [the installation page](../admin/Installation.md#tuning-the-postgresql-database)
# for the parameters to change.
#
# Restart the postgresql service after updating this config file.
#
if [ "x$1" == "xyes" ]; then #DOCS: :::sh
cd $USERHOME
- git clone --recursive https://github.com/openstreetmap/Nominatim.git
+ git clone https://github.com/osm-search/Nominatim.git
cd Nominatim
else #DOCS:
cd $USERHOME/Nominatim #DOCS:
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
- wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
+ wget -O data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
fi #DOCS:
-# The code must be built in a separate directory. Create this directory,
-# then configure and build Nominatim in there:
+# Nominatim needs osm2pgsql >= 1.8. The version that comes with Ubuntu is
+# too old. Download and compile your own:
- mkdir $USERHOME/build
- cd $USERHOME/build
- cmake $USERHOME/Nominatim
+ cd $USERHOME
+ git clone https://github.com/osm2pgsql-dev/osm2pgsql
+ mkdir osm2pgsql-build
+ cd osm2pgsql-build
+ cmake ../osm2pgsql
make
sudo make install
+ cd $USERHOME/Nominatim
+
+# Nominatim should be installed in a separate Python virtual environment.
+# Create the virtual environment:
+
+ virtualenv $USERHOME/nominatim-venv
+
+# We want the faster binary version pf psycopg, so install that:
+
+ $USERHOME/nominatim-venv/bin/pip install psycopg[binary]
+
+# Now install Nominatim using pip:
+
+ cd $USERHOME/Nominatim
+ $USERHOME/nominatim-venv/bin/pip install packaging/nominatim-db
# Nominatim is now ready to use. You can continue with
# [importing a database from OSM data](../admin/Import.md). If you want to set up
-# a webserver first, continue reading.
+# the API frontend first, continue reading.
#
+# Setting up the Python frontend
+# ==============================
+#
+# The Python frontend is contained in the nominatim-api package. To run
+# the API as a webservice, you also need falcon with uvicorn to serve the API.
+# It is generally recommended to run falcon/uvicorn on top of gunicorn.
+#
+# To install all packages, run:
+
+#DOCS:```sh
+$USERHOME/nominatim-venv/bin/pip install falcon uvicorn gunicorn
+cd $USERHOME/Nominatim
+$USERHOME/nominatim-venv/bin/pip install packaging/nominatim-api
+#DOCS:```
+
+
+# Next you need to create a systemd job that runs Nominatim on gunicorn.
+# First create a systemd job that manages the socket file:
+
+#DOCS:```sh
+sudo tee /etc/systemd/system/nominatim.socket << EOFSOCKETSYSTEMD
+[Unit]
+Description=Gunicorn socket for Nominatim
+
+[Socket]
+ListenStream=/run/nominatim.sock
+SocketUser=www-data
+
+[Install]
+WantedBy=multi-user.target
+EOFSOCKETSYSTEMD
+#DOCS:```
+
+# Then create the service for Nominatim itself.
+
+#DOCS:```sh
+sudo tee /etc/systemd/system/nominatim.service << EOFNOMINATIMSYSTEMD
+[Unit]
+Description=Nominatim running as a gunicorn application
+After=network.target
+Requires=nominatim.socket
+
+[Service]
+Type=simple
+User=www-data
+Group=www-data
+WorkingDirectory=$USERHOME/nominatim-project
+ExecStart=$USERHOME/nominatim-venv/bin/gunicorn -b unix:/run/nominatim.sock -w 4 -k uvicorn.workers.UvicornWorker "nominatim_api.server.falcon.server:run_wsgi()"
+ExecReload=/bin/kill -s HUP \$MAINPID
+PrivateTmp=true
+TimeoutStopSec=5
+KillMode=mixed
+
+[Install]
+WantedBy=multi-user.target
+EOFNOMINATIMSYSTEMD
+#DOCS:```
+
+# Activate the services:
+
+if [ "x$NOSYSTEMD" != "xyes" ]; then #DOCS:
+ sudo systemctl daemon-reload
+ sudo systemctl enable nominatim.socket
+ sudo systemctl start nominatim.socket
+ sudo systemctl enable nominatim.service
+fi #DOCS:
+
# Setting up a webserver
# ======================
#
-# The webserver should serve the php scripts from the website directory of your
-# [project directory](../admin/Import.md#creating-the-project-directory).
-# This directory needs to exist when being configured.
-# Therefore set up a project directory and create a website directory:
+# The webserver is only needed as a proxy between the public interface
+# and the gunicorn service.
+#
+# The frontend will need configuration information from the project
+# directory, which will be populated later
+# [during the import process](../admin/Import.md#creating-the-project-directory)
+# Already create the project directory itself now:
mkdir $USERHOME/nominatim-project
- mkdir $USERHOME/nominatim-project/website
-
-# The import process will populate the directory later.
#
# Option 1: Using Apache
#
if [ "x$2" == "xinstall-apache" ]; then #DOCS:
#
-# Apache has a PHP module that can be used to serve Nominatim. To install them
-# run:
+# First install apache itself and enable the proxy module:
- sudo apt install -y apache2 libapache2-mod-php
+ sudo apt-get install -y apache2
+ sudo a2enmod proxy_http
-# You need to create an alias to the website directory in your apache
-# configuration. Add a separate nominatim configuration to your webserver:
+#
+# To set up proxying for Apache add the following configuration:
#DOCS:```sh
sudo tee /etc/apache2/conf-available/nominatim.conf << EOFAPACHECONF
-<Directory "$USERHOME/nominatim-project/website">
- Options FollowSymLinks MultiViews
- AddType text/html .php
- DirectoryIndex search.php
- Require all granted
-</Directory>
-
-Alias /nominatim $USERHOME/nominatim-project/website
+
+ProxyPass /nominatim "unix:/run/nominatim.sock|http://localhost/"
EOFAPACHECONF
#DOCS:```
# Then enable the configuration and restart apache
#
- sudo a2enconf nominatim
+#DOCS:```sh
+sudo a2enconf nominatim
+#DOCS:```
+
if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
sudo apache2ctl start #DOCS:
else #DOCS:
sudo systemctl restart apache2
fi #DOCS:
-# The Nominatim API is now available at `http://localhost/nominatim/`.
+# The Nominatim API is now available at `http://localhost/nominatim/`. Point your browser
+# to the status output `http://localhost/nominatim/status` to test if everything is ok.
fi #DOCS:
#
if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
-# Nginx has no native support for php scripts. You need to set up php-fpm for
-# this purpose. First install nginx and php-fpm:
-
- sudo apt install -y nginx php-fpm
+# First install nginx itself:
-# You need to configure php-fpm to listen on a Unix socket.
+ sudo apt-get install -y nginx
-#DOCS:```sh
-sudo tee /etc/php/8.1/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
-[www]
-; Replace the tcp listener and add the unix socket
-listen = /var/run/php8.1-fpm.sock
-
-; Ensure that the daemon runs as the correct user
-listen.owner = www-data
-listen.group = www-data
-listen.mode = 0666
-
-; Unix user of FPM processes
-user = www-data
-group = www-data
-
-; Choose process manager type (static, dynamic, ondemand)
-pm = ondemand
-pm.max_children = 5
-EOF_PHP_FPM_CONF
-#DOCS:```
# Then create a Nginx configuration to forward http requests to that socket.
listen [::]:80 default_server;
root $USERHOME/nominatim-project/website;
- index search.php index.html;
- location / {
- try_files \$uri \$uri/ @php;
- }
-
- location @php {
- fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
- fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
- fastcgi_param QUERY_STRING \$args;
- fastcgi_pass unix:/var/run/php8.1-fpm.sock;
- fastcgi_index index.php;
- include fastcgi_params;
- }
-
- location ~ [^/]\.php(/|$) {
- fastcgi_split_path_info ^(.+?\.php)(/.*)$;
- if (!-f \$document_root\$fastcgi_script_name) {
- return 404;
- }
- fastcgi_pass unix:/var/run/php7.4-fpm.sock;
- fastcgi_index search.php;
- include fastcgi.conf;
+ index /search;
+
+ location /nominatim/ {
+ proxy_set_header Host \$http_host;
+ proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto \$scheme;
+ proxy_redirect off;
+ proxy_pass http://unix:/run/nominatim.sock:/;
}
}
EOF_NGINX_CONF
#DOCS:```
-# If you have some errors, make sure that php8.1-fpm.sock is well under
-# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
-# to /var/run/php/php8.1-fpm.sock.
-#
# Enable the configuration and restart Nginx
#
if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
- sudo /usr/sbin/php-fpm8.1 --nodaemonize --fpm-config /etc/php/8.1/fpm/php-fpm.conf & #DOCS:
sudo /usr/sbin/nginx & #DOCS:
else #DOCS:
- sudo systemctl restart php8.1-fpm nginx
+ sudo systemctl restart nginx
fi #DOCS:
-# The Nominatim API is now available at `http://localhost/`.
-
-
+# The Nominatim API is now available at `http://localhost/nominatim/`. Point your browser
+# to the status output `http://localhost/nominatim/status` to test if everything is ok.
fi #DOCS:
--- /dev/null
+#!/bin/bash -e
+#
+# hacks for broken vagrant box #DOCS:
+sudo rm -f /var/lib/dpkg/lock #DOCS:
+export APT_LISTCHANGES_FRONTEND=none #DOCS:
+export DEBIAN_FRONTEND=noninteractive #DOCS:
+
+# *Note:* these installation instructions are also available in executable
+# form for use with vagrant under vagrant/Install-on-Ubuntu-24.sh.
+#
+# Installing the Required Software
+# ================================
+#
+# These instructions expect that you have a freshly installed Ubuntu 24.04.
+#
+# Make sure all packages are up-to-date by running:
+#
+
+ sudo apt-get update -qq
+
+# Now you can install all packages needed for Nominatim:
+
+ sudo apt-get install -y osm2pgsql postgresql-postgis postgresql-postgis-scripts \
+ pkg-config libicu-dev virtualenv git
+
+
+#
+# System Configuration
+# ====================
+#
+# The following steps are meant to configure a fresh Ubuntu installation
+# for use with Nominatim. You may skip some of the steps if you have your
+# OS already configured.
+#
+# Creating Dedicated User Accounts
+# --------------------------------
+#
+# Nominatim will run as a global service on your machine. It is therefore
+# best to install it under its own separate user account. In the following
+# we assume this user is called nominatim and the installation will be in
+# /srv/nominatim. To create the user and directory run:
+#
+# sudo useradd -d /srv/nominatim -s /bin/bash -m nominatim
+#
+# You may find a more suitable location if you wish.
+#
+# The following instructions assume you are logged in as this user.
+# You can also switch to the user with:
+#
+# sudo -u nominatim bash
+#
+# To be able to copy and paste instructions from this manual, export
+# user name and home directory now like this:
+#
+if [ "x$USERNAME" == "x" ]; then #DOCS:
+ export USERNAME=vagrant #DOCS: export USERNAME=nominatim
+ export USERHOME=/home/vagrant #DOCS: export USERHOME=/srv/nominatim
+fi #DOCS:
+#
+# **Never, ever run the installation as a root user.** You have been warned.
+#
+# Make sure that system servers can read from the home directory:
+
+ chmod a+x $USERHOME
+
+# Setting up PostgreSQL
+# ---------------------
+#
+# Tune the postgresql configuration, which is located in
+# `/etc/postgresql/16/main/postgresql.conf`. See section *Tuning the PostgreSQL database*
+# in [the installation page](../admin/Installation.md#tuning-the-postgresql-database)
+# for the parameters to change.
+#
+# Restart the postgresql service after updating this config file.
+
+if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
+ sudo pg_ctlcluster 16 main start #DOCS:
+else #DOCS:
+ sudo systemctl restart postgresql
+fi #DOCS:
+#
+# Finally, we need to add two postgres users: one for the user that does
+# the import and another for the webserver which should access the database
+# for reading only:
+#
+
+ sudo -u postgres createuser -s $USERNAME
+ sudo -u postgres createuser www-data
+
+#
+# Installing Nominatim
+# ====================
+#
+# Building and Configuration
+# --------------------------
+#
+# Get the source code from Github and change into the source directory
+#
+if [ "x$1" == "xyes" ]; then #DOCS: :::sh
+ cd $USERHOME
+ git clone https://github.com/osm-search/Nominatim.git
+ cd Nominatim
+else #DOCS:
+ cd $USERHOME/Nominatim #DOCS:
+fi #DOCS:
+
+# When installing the latest source from github, you also need to
+# download the country grid:
+
+if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
+ wget -O data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
+fi #DOCS:
+
+# Nominatim should be installed in a separate Python virtual environment.
+# Create the virtual environment:
+
+ virtualenv $USERHOME/nominatim-venv
+
+# We want the faster binary version pf psycopg, so install that:
+
+ $USERHOME/nominatim-venv/bin/pip install psycopg[binary]
+
+# Now install Nominatim using pip:
+
+ cd $USERHOME/Nominatim
+ $USERHOME/nominatim-venv/bin/pip install packaging/nominatim-db
+
+# Nominatim is now ready to use. The nominatim binary is available at
+# `$USERHOME/venv/bin/nominatim`. If you want to have 'nominatim' in your
+# path, simply activate the virtual environment:
+#
+#DOCS:```sh
+# . $USERHOME/nominatim-venv/bin/activate
+#DOCS:```
+#
+# You can continue with
+# [importing a database from OSM data](../admin/Import.md). If you want to set up
+# the API frontend first, continue reading.
+#
+# Setting up the Python frontend
+# ==============================
+#
+# The Python frontend is contained in the nominatim-api package. To run
+# the API as a webservice, you also need falcon with uvicorn to serve the API.
+# It is generally recommended to run falcon/uvicorn on top of gunicorn.
+#
+# To install all packages, run:
+
+#DOCS:```sh
+$USERHOME/nominatim-venv/bin/pip install falcon uvicorn gunicorn
+cd $USERHOME/Nominatim
+$USERHOME/nominatim-venv/bin/pip install packaging/nominatim-api
+#DOCS:```
+
+# Next you need to create a systemd job that runs Nominatim on gunicorn.
+# First create a systemd job that manages the socket file:
+
+#DOCS:```sh
+sudo tee /etc/systemd/system/nominatim.socket << EOFSOCKETSYSTEMD
+[Unit]
+Description=Gunicorn socket for Nominatim
+
+[Socket]
+ListenStream=/run/nominatim.sock
+SocketUser=www-data
+
+[Install]
+WantedBy=multi-user.target
+EOFSOCKETSYSTEMD
+#DOCS:```
+
+# Then create the service for Nominatim itself.
+
+#DOCS:```sh
+sudo tee /etc/systemd/system/nominatim.service << EOFNOMINATIMSYSTEMD
+[Unit]
+Description=Nominatim running as a gunicorn application
+After=network.target
+Requires=nominatim.socket
+
+[Service]
+Type=simple
+User=www-data
+Group=www-data
+WorkingDirectory=$USERHOME/nominatim-project
+ExecStart=$USERHOME/nominatim-venv/bin/gunicorn -b unix:/run/nominatim.sock -w 4 -k uvicorn.workers.UvicornWorker "nominatim_api.server.falcon.server:run_wsgi()"
+ExecReload=/bin/kill -s HUP \$MAINPID
+PrivateTmp=true
+TimeoutStopSec=5
+KillMode=mixed
+
+[Install]
+WantedBy=multi-user.target
+EOFNOMINATIMSYSTEMD
+#DOCS:```
+
+# Activate the services:
+
+if [ "x$NOSYSTEMD" != "xyes" ]; then #DOCS:
+ sudo systemctl daemon-reload
+ sudo systemctl enable nominatim.socket
+ sudo systemctl start nominatim.socket
+ sudo systemctl enable nominatim.service
+fi #DOCS:
+
+# Setting up a webserver
+# ======================
+#
+# The webserver is only needed as a proxy between the public interface
+# and the gunicorn service.
+#
+# The frontend will need configuration information from the project
+# directory, which will be populated later
+# [during the import process](../admin/Import.md#creating-the-project-directory)
+# Already create the project directory itself now:
+
+ mkdir $USERHOME/nominatim-project
+
+#
+# Option 1: Using Apache
+# ----------------------
+#
+if [ "x$2" == "xinstall-apache" ]; then #DOCS:
+#
+# First install apache itself and enable the proxy module:
+
+ sudo apt-get install -y apache2
+ sudo a2enmod proxy_http
+
+#
+# To set up proxying for Apache add the following configuration:
+
+#DOCS:```sh
+sudo tee /etc/apache2/conf-available/nominatim.conf << EOFAPACHECONF
+
+ProxyPass /nominatim "unix:/run/nominatim.sock|http://localhost/"
+EOFAPACHECONF
+#DOCS:```
+
+#
+# Then enable the configuration and restart apache
+#
+
+#DOCS:```sh
+sudo a2enconf nominatim
+#DOCS:```
+
+if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
+ sudo apache2ctl start #DOCS:
+else #DOCS:
+ sudo systemctl restart apache2
+fi #DOCS:
+
+# The Nominatim API is now available at `http://localhost/nominatim/`. Point your browser
+# to the status output `http://localhost/nominatim/status` to test if everything is ok.
+
+fi #DOCS:
+
+#
+# Option 2: Using nginx
+# ---------------------
+#
+if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
+
+# First install nginx itself:
+
+ sudo apt-get install -y nginx
+
+
+# Then create a Nginx configuration to forward http requests to that socket.
+
+#DOCS:```sh
+sudo tee /etc/nginx/sites-available/default << EOF_NGINX_CONF
+server {
+ listen 80 default_server;
+ listen [::]:80 default_server;
+
+ root $USERHOME/nominatim-project/website;
+ index /search;
+
+ location /nominatim/ {
+ proxy_set_header Host \$http_host;
+ proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto \$scheme;
+ proxy_redirect off;
+ proxy_pass http://unix:/run/nominatim.sock:/;
+ }
+}
+EOF_NGINX_CONF
+#DOCS:```
+
+# Enable the configuration and restart Nginx
+#
+
+if [ "x$NOSYSTEMD" == "xyes" ]; then #DOCS:
+ sudo /usr/sbin/nginx & #DOCS:
+else #DOCS:
+ sudo systemctl restart nginx
+fi #DOCS:
+
+# The Nominatim API is now available at `http://localhost/nominatim/`. Point your browser
+# to the status output `http://localhost/nominatim/status` to test if everything is ok.
+
+fi #DOCS: