From 86d90bc46005c00f2367ad759804f528adc4c6a5 Mon Sep 17 00:00:00 2001 From: Sarah Hoffmann Date: Sun, 7 Sep 2014 17:02:10 +0200 Subject: [PATCH] add functional tests These are the tests that were formerly located at https://github.com/lonvia/test-nominatim --- Makefile.am | 21 + tests/README.md | 94 ++++ tests/features/api/details.feature | 14 + tests/features/api/language.feature | 100 ++++ tests/features/api/regression.feature | 179 +++++++ tests/features/api/reverse.feature | 13 + tests/features/api/reverse_simple.feature | 52 ++ tests/features/api/search.feature | 74 +++ tests/features/api/search_order.feature | 33 ++ tests/features/api/search_params.feature | 172 +++++++ tests/features/api/search_simple.feature | 227 +++++++++ tests/features/api/search_structured.feature | 41 ++ tests/features/db/import/linking.feature | 98 ++++ tests/features/db/import/naming.feature | 202 ++++++++ tests/features/db/import/parenting.feature | 458 ++++++++++++++++++ tests/features/db/import/placex.feature | 383 +++++++++++++++ tests/features/db/import/search_terms.feature | 28 ++ tests/features/db/import/simple.feature | 17 + .../features/db/update/linked_places.feature | 92 ++++ tests/features/db/update/naming.feature | 39 ++ tests/features/db/update/simple.feature | 55 +++ .../osm2pgsql/import/relation.feature | 13 + .../features/osm2pgsql/import/simple.feature | 49 ++ .../osm2pgsql/update/relation.feature | 30 ++ .../features/osm2pgsql/update/simple.feature | 22 + tests/scenes/bin/make_scenes.sh | 31 ++ tests/scenes/bin/osm2wkt.cc | 101 ++++ .../scenes/data/building-on-street-corner.wkt | 7 + tests/scenes/data/country.sql | 1 + tests/scenes/data/country.wkt | 250 ++++++++++ tests/scenes/data/points-on-roads.wkt | 8 + tests/scenes/data/poly-area.wkt | 11 + tests/scenes/data/poly-areas.osm | 168 +++++++ tests/scenes/data/road-with-alley.wkt | 6 + tests/scenes/data/roads-with-pois.wkt | 6 + tests/scenes/data/roads.osm | 300 ++++++++++++ tests/scenes/data/split-road.wkt | 6 + tests/steps/api_result.py | 232 +++++++++ tests/steps/api_setup.py | 114 +++++ tests/steps/db_results.py | 124 +++++ tests/steps/db_setup.py | 272 +++++++++++ tests/steps/osm2pgsql_setup.py | 212 ++++++++ tests/steps/terrain.py | 246 ++++++++++ 43 files changed, 4601 insertions(+) create mode 100644 tests/README.md create mode 100644 tests/features/api/details.feature create mode 100644 tests/features/api/language.feature create mode 100644 tests/features/api/regression.feature create mode 100644 tests/features/api/reverse.feature create mode 100644 tests/features/api/reverse_simple.feature create mode 100644 tests/features/api/search.feature create mode 100644 tests/features/api/search_order.feature create mode 100644 tests/features/api/search_params.feature create mode 100644 tests/features/api/search_simple.feature create mode 100644 tests/features/api/search_structured.feature create mode 100644 tests/features/db/import/linking.feature create mode 100644 tests/features/db/import/naming.feature create mode 100644 tests/features/db/import/parenting.feature create mode 100644 tests/features/db/import/placex.feature create mode 100644 tests/features/db/import/search_terms.feature create mode 100644 tests/features/db/import/simple.feature create mode 100644 tests/features/db/update/linked_places.feature create mode 100644 tests/features/db/update/naming.feature create mode 100644 tests/features/db/update/simple.feature create mode 100644 tests/features/osm2pgsql/import/relation.feature create mode 100644 tests/features/osm2pgsql/import/simple.feature create mode 100644 tests/features/osm2pgsql/update/relation.feature create mode 100644 tests/features/osm2pgsql/update/simple.feature create mode 100755 tests/scenes/bin/make_scenes.sh create mode 100644 tests/scenes/bin/osm2wkt.cc create mode 100644 tests/scenes/data/building-on-street-corner.wkt create mode 100644 tests/scenes/data/country.sql create mode 100644 tests/scenes/data/country.wkt create mode 100644 tests/scenes/data/points-on-roads.wkt create mode 100644 tests/scenes/data/poly-area.wkt create mode 100644 tests/scenes/data/poly-areas.osm create mode 100644 tests/scenes/data/road-with-alley.wkt create mode 100644 tests/scenes/data/roads-with-pois.wkt create mode 100644 tests/scenes/data/roads.osm create mode 100644 tests/scenes/data/split-road.wkt create mode 100644 tests/steps/api_result.py create mode 100644 tests/steps/api_setup.py create mode 100644 tests/steps/db_results.py create mode 100644 tests/steps/db_setup.py create mode 100644 tests/steps/osm2pgsql_setup.py create mode 100644 tests/steps/terrain.py diff --git a/Makefile.am b/Makefile.am index 910ebea7..3f57590c 100644 --- a/Makefile.am +++ b/Makefile.am @@ -1,6 +1,27 @@ ACLOCAL_AMFLAGS = -I osm2pgsql/m4 +AUTOMAKE_OPTIONS = -Wno-portability SUBDIRS = osm2pgsql module nominatim +NOMINATIM_SERVER ?= $(shell echo a | php -F lib/init.php -E 'echo CONST_Website_BaseURL."\n";') +NOMINATIM_DATABASE ?= $(shell echo a | php -F lib/init.php -E 'echo DB::parseDSN(CONST_Database_DSN)["database"];') + install: @echo Nominatim needs to be executed directly from this directory. No install necessary. + +test: + cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only + +test-fast: + cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only + +test-db: + cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only -t DB + +test-db-fast: + cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only -t DB + +test-api: + cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only features/api + +.PHONY: test test-fast test-db test-db-fast test-api diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 00000000..2e8cfa22 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,94 @@ +This directory contains functional tests for the Nominatim API, +for the import/update from osm files and for indexing. + +The tests use the lettuce framework (http://lettuce.it/) and +nose (https://nose.readthedocs.org). API tests are meant to be run +against a Nominatim installation with a complete planet-wide +setup based on a fairly recent planet. If you only have an +excerpt, some of the API tests may fail. Database tests can be +run without having a database installed. + +Prerequisites +============= + + * lettuce framework (http://lettuce.it/) + * nose (https://nose.readthedocs.org) + * pytidylib (http://countergram.com/open-source/pytidylib) + * haversine (https://github.com/mapado/haversine) + +Usage +===== + + * get prerequisites + + [sudo] pip install lettuce nose pytidylib haversine + + * run the tests + + NOMINATIM_SERVER=http://your.nominatim.instance/ lettuce features + +The tests can be configured with a set of environment variables: + + * `NOMINATIM_SERVER` - URL of the nominatim instance (API tests) + * `NOMINATIM_DIR` - source directory of Nominatim (import tests) + * `TEMPLATE_DB` - name of template database used as a skeleton for + the test databases (db tests) + * `TEST_DB` - name of test database (db tests) + * `NOMINATIM_SETTINGS` - file to write temporary Nominatim settings to (db tests) + * `NOMINATIM_REUSE_TEMPLATE` - if defined, the template database will not be + deleted after the test runs and reused during + the next run. This speeds up tests considerably + but might lead to outdated errors for some + changes in the database layout. + * `LOGLEVEL` - set to 'debug' to get more verbose output (only works properly + when output to a logfile is configured) + * `LOGFILE` - sends debug output to the given file + +Writing Tests +============= + +The following explanation assume that the reader is familiar with the lettuce +notations of features, scenarios and steps. + +All possible steps can be found in the `steps` directory and should ideally +be documented. + + +API Tests (`features/api`) +-------------------------- + +These tests are meant to test the different API calls and their parameters. + +There are two kind of steps defined for these tests: +request setup steps (see `steps/api_setup.py`) +and steps for checking results (see `steps/api_result.py`). + +Each scenario follows this simple sequence of steps: + + 1. One or more steps to define parameters and HTTP headers of the request. + These are cumulative, so you can use multiple steps. + 2. A single step to call the API. This sends a HTTP request to the configured + server and collects the answer. The cached parameters will be deleted, + to ensure that the setup works properly with scenario outlines. + 3. As many result checks as necessary. The result remains cached, so that + multiple tests can be added here. + +Indexing Tests (`features/db`) +--------------------------------------------------- + +These tests check the import and update of the Nominatim database. They do not +test the correctness of osm2pgsql. Each test will write some data into the `place` +table (and optionally `the planet_osm_*` tables if required) and then run +Nominatim's processing functions on that. + +These tests need to create their own test databases. By default they will be +called `test_template_nominatim` and `test_nominatim`. Names can be changed with +the environment variables `TEMPLATE_DB` and `TEST_DB`. The user running the tests +needs superuser rights for postgres. + + +Import Tests (`features/osm2pgsql`) +----------------------------------- + +These tests check that data is imported correctly into the place table. They +use the same template database as the Indexing tests, so the same remarks apply. diff --git a/tests/features/api/details.feature b/tests/features/api/details.feature new file mode 100644 index 00000000..fd0b0e01 --- /dev/null +++ b/tests/features/api/details.feature @@ -0,0 +1,14 @@ +Feature: Object details + Check details page for correctness + + Scenario Outline: Details via OSM id + When looking up details for + Then the result is valid + + Examples: + | object + | 1758375 + | N158845944 + | W72493656 + | R62422 + diff --git a/tests/features/api/language.feature b/tests/features/api/language.feature new file mode 100644 index 00000000..529dc021 --- /dev/null +++ b/tests/features/api/language.feature @@ -0,0 +1,100 @@ +Feature: Localization of search results + + Scenario: Search - default language + When sending json search query "Germany" + Then results contain + | ID | display_name + | 0 | Deutschland.* + + Scenario: Search - accept-language first + Given the request parameters + | accept-language + | en,de + When sending json search query "Deutschland" + Then results contain + | ID | display_name + | 0 | Germany.* + + Scenario: Search - accept-language missing + Given the request parameters + | accept-language + | xx,fr,en,de + When sending json search query "Deutschland" + Then results contain + | ID | display_name + | 0 | Allemagne.* + + Scenario: Search - http accept language header first + Given the HTTP header + | accept-language + | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3 + When sending json search query "Deutschland" + Then results contain + | ID | display_name + | 0 | Allemagne.* + + Scenario: Search - http accept language header and accept-language + Given the request parameters + | accept-language + | de,en + Given the HTTP header + | accept-language + | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3 + When sending json search query "Deutschland" + Then results contain + | ID | display_name + | 0 | Deutschland.* + + Scenario: Search - http accept language header fallback + Given the HTTP header + | accept-language + | fr-ca,en-ca;q=0.5 + When sending json search query "Deutschland" + Then results contain + | ID | display_name + | 0 | Allemagne.* + + Scenario: Search - http accept language header fallback (upper case) + Given the HTTP header + | accept-language + | fr-FR;q=0.8,en-ca;q=0.5 + When sending json search query "Deutschland" + Then results contain + | ID | display_name + | 0 | Allemagne.* + + Scenario: Reverse - default language + When looking up coordinates 48.13921,11.57328 + Then result addresses contain + | ID | city + | 0 | München + + Scenario: Reverse - accept-language parameter + Given the request parameters + | accept-language + | en,fr + When looking up coordinates 48.13921,11.57328 + Then result addresses contain + | ID | city + | 0 | Munich + + Scenario: Reverse - HTTP accept language header + Given the HTTP header + | accept-language + | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3 + When looking up coordinates 48.13921,11.57328 + Then result addresses contain + | ID | city + | 0 | Munich + + Scenario: Reverse - accept-language parameter and HTTP header + Given the request parameters + | accept-language + | it + Given the HTTP header + | accept-language + | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3 + When looking up coordinates 48.13921,11.57328 + Then result addresses contain + | ID | city + | 0 | Monaco di Baviera diff --git a/tests/features/api/regression.feature b/tests/features/api/regression.feature new file mode 100644 index 00000000..3e409fe8 --- /dev/null +++ b/tests/features/api/regression.feature @@ -0,0 +1,179 @@ +Feature: API regression tests + Tests error cases reported in tickets. + + @poldi-only + Scenario Outline: github #36 + When sending json search query "" with address + Then result addresses contain + | ID | road | city + | 0 | Seegasse | Gemeinde Wieselburg-Land + + Examples: + | query + | Seegasse, Gemeinde Wieselburg-Land + | Seegasse, Wieselburg-Land + | Seegasse, Wieselburg + + Scenario: trac #2430 + When sending json search query "89 River Avenue, Hoddesdon, Hertfordshire, EN11 0JT" + Then at least 1 result is returned + + Scenario: trac #2440 + When sending json search query "East Harvard Avenue, Denver" + Then more than 2 results are returned + + Scenario: trac #2456 + When sending xml search query "Borlänge Kommun" + Then results contain + | ID | place_rank + | 0 | 19 + + Scenario: trac #2530 + When sending json search query "Lange Straße, Bamberg" with address + Then result addresses contain + | ID | town + | 0 | Bamberg + + Scenario: trac #2541 + When sending json search query "pad, germany" + Then results contain + | ID | class | display_name + | 0 | aeroway | Paderborn/Lippstadt,.* + + Scenario: trac #2579 + When sending json search query "Johnsons Close, hackbridge" with address + Then result addresses contain + | ID | postcode + | 0 | SM5 2LU + + @Fail + Scenario Outline: trac #2586 + When sending json search query "" with address + Then result addresses contain + | ID | country_code + | 0 | uk + + Examples: + | query + | DL7 0SN + | DL70SN + + Scenario: trac #2628 (1) + When sending json search query "Adam Kraft Str" with address + Then result addresses contain + | ID | road + | 0 | Adam-Kraft-Straße + + Scenario: trac #2628 (2) + When sending json search query "Maxfeldstr. 5, Nürnberg" with address + Then result addresses contain + | ID | house_number | road | city + | 0 | 5 | Maxfeldstraße | Nürnberg + + Scenario: trac #2638 + When sending json search query "Nöthnitzer Str. 40, 01187 Dresden" with address + Then result addresses contain + | ID | house_number | road | city + | 0 | 40 | Nöthnitzer Straße | Dresden + + Scenario Outline: trac #2667 + When sending json search query "" with address + Then result addresses contain + | ID | house_number + | 0 | + + Examples: + | number | query + | 16 | 16 Woodpecker Way, Cambourne + | 14906 | 14906, 114 Street Northwest, Edmonton, Alberta, Canada + | 14904 | 14904, 114 Street Northwest, Edmonton, Alberta, Canada + | 15022 | 15022, 114 Street Northwest, Edmonton, Alberta, Canada + | 15024 | 15024, 114 Street Northwest, Edmonton, Alberta, Canada + + Scenario: trac #2681 + When sending json search query "kirchstraße troisdorf Germany" + Then results contain + | ID | display_name + | 0 | .*, Troisdorf, .* + + Scenario: trac #2758 + When sending json search query "6а, полуботка, чернигов" with address + Then result addresses contain + | ID | house_number + | 0 | 6а + + Scenario: trac #2790 + When looking up coordinates 49.0942079697809,8.27565898861822 + Then result addresses contain + | ID | road | village | country + | 0 | Daimlerstraße | Jockgrim | Deutschland + + Scenario: trac #2794 + When sending json search query "4008" + Then results contain + | ID | class | type + | 0 | place | postcode + + Scenario: trac #2797 + When sending json search query "Philippstr.4, 52349 Düren" with address + Then result addresses contain + | ID | road | town + | 0 | Philippstraße | Düren + + Scenario: trac #2830 + When sending json search query "528, Merkley Drive, K4A 1N5,CA" with address + Then result addresses contain + | ID | house_number | road | postcode | country + | 0 | 528 | Merkley Drive | K4A 1N5 | Canada + + Scenario: trac #2830 + When sending json search query "K4A 1N5,CA" + Then results contain + | ID | class | type | display_name + | 0 | place | postcode | .*, Canada + + Scenario: trac #2845 + When sending json search query "Leliestraat 31, Zwolle" with address + Then result addresses contain + | ID | city + | 0 | Zwolle + + Scenario: trac #2852 + When sending json search query "berlinerstrasse, leipzig" with address + Then result addresses contain + | ID | road + | 0 | Berliner Straße + + Scenario: trac #2871 + When looking up coordinates -33.906895553,150.99609375 + Then result addresses contain + | ID | city | postcode | country + | 0 | [^0-9]* | 2197 | Australia + + Scenario: trac #2974 + When sending json search query "Azadi Square, Faruj" with address + Then result addresses contain + | ID | road | city + | 0 | ميدان آزادي | فاروج + And results contain + | ID | latlon + | 0 | 37.2323,58.2193 +-1km + + Scenario: trac #2981 + When sending json search query "Ohmstraße 7, Berlin" with address + Then at least 2 results are returned + And result addresses contain + | house_number | road | state + | 7 | Ohmstraße | Berlin + + Scenario: trac #3049 + When sending json search query "Soccer City" + Then results contain + | ID | class | type | latlon + | 0 | leisure | stadium | -26.2347261,27.982645 +-50m + + Scenario: trac #3130 + When sending json search query "Old Way, Frinton" + Then results contain + | ID | class | latlon + | 0 | highway | 51.8324206,1.2447352 +-100m diff --git a/tests/features/api/reverse.feature b/tests/features/api/reverse.feature new file mode 100644 index 00000000..b46ab2cd --- /dev/null +++ b/tests/features/api/reverse.feature @@ -0,0 +1,13 @@ +Feature: Reverse geocoding + Testing the reverse function + + # Make sure country is not overwritten by the postcode + Scenario: Country is returned + Given the request parameters + | accept-language + | de + When looking up coordinates 53.9788769,13.0830313 + Then result addresses contain + | ID | country + | 0 | Deutschland + diff --git a/tests/features/api/reverse_simple.feature b/tests/features/api/reverse_simple.feature new file mode 100644 index 00000000..832b2011 --- /dev/null +++ b/tests/features/api/reverse_simple.feature @@ -0,0 +1,52 @@ +Feature: Simple Reverse Tests + Simple tests for internal server errors and response format. + These tests should pass on any Nominatim installation. + + Scenario Outline: Simple reverse-geocoding + When looking up xml coordinates , + Then the result is valid xml + When looking up json coordinates , + Then the result is valid json + When looking up jsonv2 coordinates , + Then the result is valid json + + Examples: + | lat | lon + | 0.0 | 0.0 + | 45.3 | 3.5 + | -79.34 | 23.5 + | 0.23 | -178.555 + + Scenario Outline: Wrapping of legal jsonp requests + Given the request parameters + | json_callback + | foo + When looking up coordinates 67.3245,0.456 + Then the result is valid json + + Examples: + | format + | json + | jsonv2 + + Scenario: Reverse-geocoding without address + Given the request parameters + | addressdetails + | 0 + When looking up xml coordinates 36.791966,127.171726 + Then the result is valid xml + When looking up json coordinates 36.791966,127.171726 + Then the result is valid json + When looking up jsonv2 coordinates 36.791966,127.171726 + Then the result is valid json + + Scenario: Reverse-geocoding with zoom + Given the request parameters + | zoom + | 10 + When looking up xml coordinates 36.791966,127.171726 + Then the result is valid xml + When looking up json coordinates 36.791966,127.171726 + Then the result is valid json + When looking up jsonv2 coordinates 36.791966,127.171726 + Then the result is valid json diff --git a/tests/features/api/search.feature b/tests/features/api/search.feature new file mode 100644 index 00000000..9b861a7b --- /dev/null +++ b/tests/features/api/search.feature @@ -0,0 +1,74 @@ +Feature: Search queries + Testing correctness of results + + Scenario: UK House number search + When sending json search query "27 Thoresby Road, Broxtowe" with address + Then address of result 0 contains + | type | value + | house_number | 27 + | road | Thoresby Road + | city | Broxtowe + | state | England + | country | United Kingdom + | country_code | gb + + + Scenario: House number search for non-street address + Given the request parameters + | accept-language + | en + When sending json search query "4 Pomocnia, Poland" with address + Then address of result 0 is + | type | value + | house_number | 4 + | suburb | Pomocnia + | county | gmina Pokrzywnica + | state | Masovian Voivodeship + | postcode | 06-121 + | country | Poland + | country_code | pl + + Scenario: House number interpolation even + Given the request parameters + | accept-language + | en + When sending json search query "140 rue Don Bosco, Saguenay" with address + Then address of result 0 contains + | type | value + | house_number | 140 + | road | rue Don Bosco + | city | Saguenay + | state | Quebec + | country | Canada + | country_code | ca + + Scenario: House number interpolation odd + Given the request parameters + | accept-language + | en + When sending json search query "141 rue Don Bosco, Saguenay" with address + Then address of result 0 contains + | type | value + | house_number | 141 + | road | rue Don Bosco + | city | Saguenay + | state | Quebec + | country | Canada + | country_code | ca + + Scenario: TIGER house number + When sending json search query "3 West Victory Way, Craig" + Then result 0 has not attributes osm_id,osm_type + + Scenario: TIGER house number (road fallback) + When sending json search query "3030 West Victory Way, Craig" + Then result 0 has attributes osm_id,osm_type + + Scenario: Expansion of Illinois + Given the request parameters + | accept-language + | en + When sending json search query "il, us" + Then results contain + | ID | display_name + | 0 | Illinois.* diff --git a/tests/features/api/search_order.feature b/tests/features/api/search_order.feature new file mode 100644 index 00000000..11db163f --- /dev/null +++ b/tests/features/api/search_order.feature @@ -0,0 +1,33 @@ +Feature: Result order for Geocoding + Testing that importance ordering returns sensible results + + Scenario Outline: city order in street search + When sending json search query ", " with address + Then address of result 0 contains + | type | value + | | + + Examples: + | type | city | street + | city | Zürich | Rigistr + | city | Karlsruhe | Sophienstr + | city | München | Karlstr + | city | Praha | Dlouhá + + Scenario Outline: use more important city in street search + When sending json search query ", " with address + Then result addresses contain + | ID | country_code + | 0 | + + Examples: + | country | city | street + | gb | London | Main St + | gb | Manchester | Central Street + + # https://trac.openstreetmap.org/ticket/5094 + Scenario: housenumbers are ordered by complete match first + When sending json search query "4 Докукина Москва" with address + Then result addresses contain + | ID | house_number + | 0 | 4 diff --git a/tests/features/api/search_params.feature b/tests/features/api/search_params.feature new file mode 100644 index 00000000..5b765f12 --- /dev/null +++ b/tests/features/api/search_params.feature @@ -0,0 +1,172 @@ +Feature: Search queries + Testing different queries and parameters + + Scenario: Simple XML search + When sending xml search query "Schaan" + Then result 0 has attributes place_id,osm_type,osm_id + And result 0 has attributes place_rank,boundingbox + And result 0 has attributes lat,lon,display_name + And result 0 has attributes class,type,importance,icon + And result 0 has not attributes address + + Scenario: Simple JSON search + When sending json search query "Vaduz" + And result 0 has attributes place_id,licence,icon,class,type + And result 0 has attributes osm_type,osm_id,boundingbox + And result 0 has attributes lat,lon,display_name,importance + And result 0 has not attributes address + + Scenario: JSON search with addressdetails + When sending json search query "Montevideo" with address + Then address of result 0 is + | type | value + | city | Montevideo + | state | Montevideo + | country | Uruguay + | country_code | uy + + Scenario: XML search with addressdetails + When sending xml search query "Inuvik" with address + Then address of result 0 is + | type | value + | town | Inuvik + | state | Northwest Territories + | country | Canada + | country_code | ca + + Scenario: Address details with unknown class types + When sending json search query "foobar, Essen" with address + Then results contain + | ID | class | type + | 0 | leisure | hackerspace + And result addresses contain + | ID | address29 + | 0 | foobar + And address of result 0 does not contain leisure,hackerspace + + Scenario: Disabling deduplication + When sending json search query "Oxford Street, London" + Then there are no duplicates + Given the request parameters + | dedupe + | 0 + When sending json search query "Oxford Street, London" + Then there are duplicates + + Scenario: Search with bounded viewbox in right area + Given the request parameters + | bounded | viewbox + | 1 | -87.7,41.9,-87.57,41.85 + When sending json search query "restaurant" with address + Then result addresses contain + | ID | city + | 0 | Chicago + + Scenario: Search with bounded viewboxlbrt in right area + Given the request parameters + | bounded | viewboxlbrt + | 1 | -87.7,41.85,-87.57,41.9 + When sending json search query "restaurant" with address + Then result addresses contain + | ID | city + | 0 | Chicago + + Scenario: No POI search with unbounded viewbox + Given the request parameters + | viewbox + | -87.7,41.9,-87.57,41.85 + When sending json search query "restaurant" + Then results contain + | display_name + | [^,]*(?i)restaurant.* + + Scenario: bounded search remains within viewbox, even with no results + Given the request parameters + | bounded | viewbox + | 1 | -5.662003,43.54285,-5.6563282,43.5403125 + When sending json search query "restaurant" + Then less than 1 result is returned + + Scenario: bounded search remains within viewbox with results + Given the request parameters + | bounded | viewbox + | 1 | -5.662003,43.55,-5.6563282,43.5403125 + When sending json search query "restaurant" + | lon | lat + | >= -5.662003 | >= 43.5403125 + | <= -5.6563282| <= 43.55 + + Scenario: Prefer results within viewbox + Given the request parameters + | accept-language + | en + When sending json search query "royan" with address + Then result addresses contain + | ID | country + | 0 | France + Given the request parameters + | accept-language | viewbox + | en | 51.94,36.59,51.99,36.56 + When sending json search query "royan" with address + Then result addresses contain + | ID | country + | 0 | Iran + + Scenario: Overly large limit number for search results + Given the request parameters + | limit + | 1000 + When sending json search query "Neustadt" + Then at most 50 results are returned + + Scenario: Limit number of search results + Given the request parameters + | limit + | 4 + When sending json search query "Neustadt" + Then exactly 4 results are returned + + Scenario: Restrict to feature type country + Given the request parameters + | featureType + | country + When sending xml search query "Monaco" + Then results contain + | place_rank + | 4 + + Scenario: Restrict to feature type state + When sending xml search query "Berlin" + Then results contain + | ID | place_rank + | 0 | 1[56] + Given the request parameters + | featureType + | state + When sending xml search query "Berlin" + Then results contain + | place_rank + | [78] + + Scenario: Restrict to feature type city + Given the request parameters + | featureType + | city + When sending xml search query "Monaco" + Then results contain + | place_rank + | 1[56789] + + + Scenario: Restrict to feature type settlement + When sending json search query "Everest" + Then results contain + | ID | display_name + | 0 | Mount Everest.* + Given the request parameters + | featureType + | settlement + When sending json search query "Everest" + Then results contain + | ID | display_name + | 0 | Everest.* diff --git a/tests/features/api/search_simple.feature b/tests/features/api/search_simple.feature new file mode 100644 index 00000000..9c796e99 --- /dev/null +++ b/tests/features/api/search_simple.feature @@ -0,0 +1,227 @@ +Feature: Simple Tests + Simple tests for internal server errors and response format. + These tests should pass on any Nominatim installation. + + Scenario Outline: Testing different parameters + Given the request parameters + | + | + When sending search query "Manchester" + Then the result is valid html + Given the request parameters + | + | + When sending html search query "Manchester" + Then the result is valid html + Given the request parameters + | + | + When sending xml search query "Manchester" + Then the result is valid xml + Given the request parameters + | + | + When sending json search query "Manchester" + Then the result is valid json + Given the request parameters + | + | + When sending jsonv2 search query "Manchester" + Then the result is valid json + + Examples: + | parameter | value + | addressdetails | 1 + | addressdetails | 0 + | polygon | 1 + | polygon | 0 + | polygon_text | 1 + | polygon_text | 0 + | polygon_kml | 1 + | polygon_kml | 0 + | polygon_geojson | 1 + | polygon_geojson | 0 + | polygon_svg | 1 + | polygon_svg | 0 + | accept-language | de,en + | countrycodes | uk,ir + | bounded | 1 + | bounded | 0 + | exclude_place_ids| 385252,1234515 + | limit | 1000 + | dedupe | 1 + | dedupe | 0 + + Scenario: Search with invalid output format + Given the request parameters + | format + | fd$# + When sending search query "Berlin" + Then the result is valid html + + Scenario Outline: Simple Searches + When sending search query "" + Then the result is valid html + When sending html search query "" + Then the result is valid html + When sending xml search query "" + Then the result is valid xml + When sending json search query "" + Then the result is valid json + When sending jsonv2 search query "" + Then the result is valid json + + Examples: + | query + | New York, New York + | France + | 12, Main Street, Houston + | München + | 東京都 + | hotels in nantes + | xywxkrf + | gh; foo() + | %#$@*&l;der#$! + | 234 + | 47.4,8.3 + + Scenario: Empty XML search + When sending xml search query "xnznxvcx" + Then result header contains + | attr | value + | querystring | xnznxvcx + | polygon | false + | more_url | .*format=xml.*q=xnznxvcx.* + + Scenario: Empty XML search with special XML characters + When sending xml search query "xfdghn&zxn"xvbyxcssdex" + Then result header contains + | attr | value + | querystring | xfdghn&zxn"xvbyxcssdex + | polygon | false + | more_url | .*format=xml.*q=xfdghn&zxn"xvbyxcssdex.* + + Scenario: Empty XML search with viewbox + Given the request parameters + | viewbox + | 12,45.13,77,33 + When sending xml search query "xnznxvcx" + Then result header contains + | attr | value + | querystring | xnznxvcx + | polygon | false + | viewbox | 12,45.13,77,33 + + Scenario: Empty XML search with viewboxlbrt + Given the request parameters + | viewboxlbrt + | 12,34.13,77,45 + When sending xml search query "xnznxvcx" + Then result header contains + | attr | value + | querystring | xnznxvcx + | polygon | false + | viewbox | 12,45.13,77,33 + + Scenario: Empty XML search with viewboxlbrt and viewbox + Given the request parameters + | viewbox | viewboxblrt + | 12,45.13,77,33 | 1,2,3,4 + When sending xml search query "pub" + Then result header contains + | attr | value + | querystring | pub + | polygon | false + | viewbox | 12,45.13,77,33 + + + Scenario Outline: Empty XML search with polygon values + Given the request parameters + | polygon + | + When sending xml search query "xnznxvcx" + Then result header contains + | attr | value + | polygon | + + Examples: + | result | polyval + | false | 0 + | true | 1 + | true | True + | true | true + | true | false + | true | FALSE + | true | yes + | true | no + | true | '; delete from foobar; select ' + + + Scenario: Empty XML search with exluded place ids + Given the request parameters + | exclude_place_ids + | 123,76,342565 + When sending xml search query "jghrleoxsbwjer" + Then result header contains + | attr | value + | exclude_place_ids | 123,76,342565 + + Scenario Outline: Wrapping of legal jsonp search requests + Given the request parameters + | json_callback + | + When sending json search query "Tokyo" + Then there is a json wrapper "" + + Examples: + | data + | foo + | FOO + | __world + | $me + | m1[4] + | d_r[$d] + + Scenario Outline: Wrapping of illegal jsonp search requests + Given the request parameters + | json_callback + | + When sending json search query "Tokyo" + Then a HTTP 400 is returned + + Examples: + | data + | 1asd + | bar(foo) + | XXX['bad'] + | foo; evil + + Scenario Outline: Ignore jsonp parameter for anything but json + Given the request parameters + | json_callback + | 234 + When sending json search query "Malibu" + Then a HTTP 400 is returned + Given the request parameters + | json_callback + | 234 + When sending xml search query "Malibu" + Then the result is valid xml + Given the request parameters + | json_callback + | 234 + When sending html search query "Malibu" + Then the result is valid html + + Scenario: Empty JSON search + When sending json search query "YHlERzzx" + Then exactly 0 results are returned + + Scenario: Empty JSONv2 search + When sending jsonv2 search query "Flubb XdfESSaZx" + Then exactly 0 results are returned + + Scenario: Search for non-existing coordinates + When sending json search query "-21.0,-33.0" + Then exactly 0 results are returned + diff --git a/tests/features/api/search_structured.feature b/tests/features/api/search_structured.feature new file mode 100644 index 00000000..27e5d344 --- /dev/null +++ b/tests/features/api/search_structured.feature @@ -0,0 +1,41 @@ +Feature: Structured search queries + Testing correctness of results with + structured queries + + Scenario: Country only + When sending json structured query with address + | country + | Canada + Then address of result 0 is + | type | value + | country | Canada + | country_code | ca + + Scenario: Postcode only + When sending json structured query with address + | postalcode + | 22547 + Then at least 1 result is returned + And results contain + | type + | post(al_)?code + And result addresses contain + | postcode + | 22547 + + + Scenario: Street, postcode and country + When sending xml structured query with address + | street | postalcode | country + | Old Palace Road | GU2 7UP | United Kingdom + Then at least 1 result is returned + Then result header contains + | attr | value + | querystring | Old Palace Road, GU2 7UP, United Kingdom + + + Scenario: gihub #176 + When sending json structured query with address + | city + | Washington + Then at least 1 result is returned diff --git a/tests/features/db/import/linking.feature b/tests/features/db/import/linking.feature new file mode 100644 index 00000000..11602b7d --- /dev/null +++ b/tests/features/db/import/linking.feature @@ -0,0 +1,98 @@ +@DB +Feature: Linking of places + Tests for correctly determining linked places + + Scenario: Waterways are linked when in waterway relations + Given the scene split-road + And the place ways + | osm_type | osm_id | class | type | name | geometry + | W | 1 | waterway | river | Rhein | :w-2 + | W | 2 | waterway | river | Rhein | :w-3 + | R | 13 | waterway | river | Rhein | :w-1 + :w-2 + :w-3 + | R | 23 | waterway | river | Limmat| :w-4a + And the relations + | id | members | tags + | 13 | R23:tributary,W1,W2:main_stream | 'type' : 'waterway' + When importing + Then table placex contains + | object | linked_place_id + | W1 | R13 + | W2 | R13 + | R13 | None + | R23 | None + When sending query "rhein" + Then results contain + | osm_type + | R + + Scenario: Relations are not linked when in waterway relations + Given the scene split-road + And the place ways + | osm_type | osm_id | class | type | name | geometry + | W | 1 | waterway | river | Rhein | :w-2 + | W | 2 | waterway | river | Rhein | :w-3 + | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3 + | R | 2 | waterway | river | Limmat| :w-4a + And the relations + | id | members | tags + | 1 | R2 | 'type' : 'waterway' + When importing + Then table placex contains + | object | linked_place_id + | W1 | None + | W2 | None + | R1 | None + | R2 | None + + Scenario: Empty waterway relations are handled correctly + Given the scene split-road + And the place ways + | osm_type | osm_id | class | type | name | geometry + | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3 + And the relations + | id | members | tags + | 1 | | 'type' : 'waterway' + When importing + Then table placex contains + | object | linked_place_id + | R1 | None + + Scenario: Waterways are not linked when waterway types don't match + Given the scene split-road + And the place ways + | osm_type | osm_id | class | type | name | geometry + | W | 1 | waterway | drain | Rhein | :w-2 + | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3 + And the relations + | id | members | tags + | 1 | N23,N34,W1,R45 | 'type' : 'multipolygon' + When importing + Then table placex contains + | object | linked_place_id + | W1 | None + | R1 | None + When sending query "rhein" + Then results contain + | ID | osm_type + | 0 | R + | 1 | W + + Scenario: Side streams are linked only when they have the same name + Given the scene split-road + And the place ways + | osm_type | osm_id | class | type | name | geometry + | W | 1 | waterway | river | Rhein2 | :w-2 + | W | 2 | waterway | river | Rhein | :w-3 + | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3 + And the relations + | id | members | tags + | 1 | W1:side_stream,W2:side_stream | 'type' : 'waterway' + When importing + Then table placex contains + | object | linked_place_id + | W1 | None + | W2 | R1 + When sending query "rhein2" + Then results contain + | osm_type + | W diff --git a/tests/features/db/import/naming.feature b/tests/features/db/import/naming.feature new file mode 100644 index 00000000..8081c491 --- /dev/null +++ b/tests/features/db/import/naming.feature @@ -0,0 +1,202 @@ +@DB +Feature: Import and search of names + Tests all naming related issues: normalisation, + abbreviations, internationalisation, etc. + + + Scenario: Case-insensitivity of search + Given the place nodes + | osm_id | class | type | name + | 1 | place | locality | 'name' : 'FooBar' + When importing + Then table placex contains + | object | class | type | name + | N1 | place | locality | 'name' : 'FooBar' + When sending query "FooBar" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "foobar" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "fOObar" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "FOOBAR" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + + Scenario: Multiple spaces in name + Given the place nodes + | osm_id | class | type | name + | 1 | place | locality | 'name' : 'one two three' + When importing + When sending query "one two three" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "one two three" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "one two three" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query " one two three" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + + Scenario: Special characters in name + Given the place nodes + | osm_id | class | type | name + | 1 | place | locality | 'name' : 'Jim-Knopf-Str' + | 2 | place | locality | 'name' : 'Smith/Weston' + | 3 | place | locality | 'name' : 'space mountain' + | 4 | place | locality | 'name' : 'space' + | 5 | place | locality | 'name' : 'mountain' + When importing + When sending query "Jim-Knopf-Str" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "Jim Knopf-Str" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "Jim Knopf Str" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "Jim/Knopf-Str" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "Jim-Knopfstr" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + When sending query "Smith/Weston" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 2 + When sending query "Smith Weston" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 2 + When sending query "Smith-Weston" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 2 + When sending query "space mountain" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 3 + When sending query "space-mountain" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 3 + When sending query "space/mountain" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 3 + When sending query "space\mountain" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 3 + When sending query "space(mountain)" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 3 + + Scenario: No copying name tag if only one name + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | locality | 'name' : 'german' | country:de + When importing + Then table placex contains + | object | calculated_country_code | + | N1 | de + And table placex contains as names for N1 + | object | k | v + | N1 | name | german + + Scenario: Copying name tag to default language if it does not exist + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish' | country:de + When importing + Then table placex contains + | object | calculated_country_code | + | N1 | de + And table placex contains as names for N1 + | k | v + | name | german + | name:fi | finnish + | name:de | german + + Scenario: Copying default language name tag to name if it does not exist + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | locality | 'name:de' : 'german', 'name:fi' : 'finnish' | country:de + When importing + Then table placex contains + | object | calculated_country_code | + | N1 | de + And table placex contains as names for N1 + | k | v + | name | german + | name:fi | finnish + | name:de | german + + Scenario: Do not overwrite default language with name tag + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish', 'name:de' : 'local' | country:de + When importing + Then table placex contains + | object | calculated_country_code | + | N1 | de + And table placex contains as names for N1 + | k | v + | name | german + | name:fi | finnish + | name:de | local + + Scenario: Landuse without name are ignored + Given the place areas + | osm_type | osm_id | class | type | geometry + | R | 1 | natural | meadow | (0 0, 1 0, 1 1, 0 1, 0 0) + | R | 2 | landuse | industrial | (0 0, -1 0, -1 -1, 0 -1, 0 0) + When importing + Then table placex has no entry for R1 + And table placex has no entry for R2 + + Scenario: Landuse with name are found + Given the place areas + | osm_type | osm_id | class | type | name | geometry + | R | 1 | natural | meadow | 'name' : 'landuse1' | (0 0, 1 0, 1 1, 0 1, 0 0) + | R | 2 | landuse | industrial | 'name' : 'landuse2' | (0 0, -1 0, -1 -1, 0 -1, 0 0) + When importing + When sending query "landuse1" + Then results contain + | ID | osm_type | osm_id + | 0 | R | 1 + When sending query "landuse2" + Then results contain + | ID | osm_type | osm_id + | 0 | R | 2 + + Scenario: Postcode boundaries without ref + Given the place areas + | osm_type | osm_id | class | type | postcode | geometry + | R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0) + When importing + When sending query "12345" + Then results contain + | ID | osm_type | osm_id + | 0 | R | 1 diff --git a/tests/features/db/import/parenting.feature b/tests/features/db/import/parenting.feature new file mode 100644 index 00000000..dd155c45 --- /dev/null +++ b/tests/features/db/import/parenting.feature @@ -0,0 +1,458 @@ +@DB +Feature: Parenting of objects + Tests that the correct parent is choosen + + Scenario: Address inherits postcode from its street unless it has a postcode + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | housenumber | geometry + | 1 | place | house | 4 | :p-N1 + And the place nodes + | osm_id | class | type | housenumber | postcode | geometry + | 2 | place | house | 5 | 99999 | :p-N1 + And the place ways + | osm_id | class | type | name | postcode | geometry + | 1 | highway | residential | galoo | 12345 | :w-north + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + | N2 | W1 + When sending query "4 galoo" + Then results contain + | ID | osm_type | osm_id | langaddress + | 0 | N | 1 | 4, galoo, 12345 + When sending query "5 galoo" + Then results contain + | ID | osm_type | osm_id | langaddress + | 0 | N | 2 | 5, galoo, 99999 + + + Scenario: Address without tags, closest street + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | geometry + | 1 | place | house | :p-N1 + | 2 | place | house | :p-N2 + | 3 | place | house | :p-S1 + | 4 | place | house | :p-S2 + And the named place ways + | osm_id | class | type | geometry + | 1 | highway | residential | :w-north + | 2 | highway | residential | :w-south + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + | N2 | W1 + | N3 | W2 + | N4 | W2 + + Scenario: Address without tags avoids unnamed streets + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | geometry + | 1 | place | house | :p-N1 + | 2 | place | house | :p-N2 + | 3 | place | house | :p-S1 + | 4 | place | house | :p-S2 + And the place ways + | osm_id | class | type | geometry + | 1 | highway | residential | :w-north + And the named place ways + | osm_id | class | type | geometry + | 2 | highway | residential | :w-south + When importing + Then table placex contains + | object | parent_place_id + | N1 | W2 + | N2 | W2 + | N3 | W2 + | N4 | W2 + + Scenario: addr:street tag parents to appropriately named street + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | street| geometry + | 1 | place | house | south | :p-N1 + | 2 | place | house | north | :p-N2 + | 3 | place | house | south | :p-S1 + | 4 | place | house | north | :p-S2 + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | north | :w-north + | 2 | highway | residential | south | :w-south + When importing + Then table placex contains + | object | parent_place_id + | N1 | W2 + | N2 | W1 + | N3 | W2 + | N4 | W1 + + Scenario: addr:street tag parents to next named street + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | street | geometry + | 1 | place | house | abcdef | :p-N1 + | 2 | place | house | abcdef | :p-N2 + | 3 | place | house | abcdef | :p-S1 + | 4 | place | house | abcdef | :p-S2 + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | abcdef | :w-north + | 2 | highway | residential | abcdef | :w-south + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + | N2 | W1 + | N3 | W2 + | N4 | W2 + + Scenario: addr:street tag without appropriately named street + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | street | geometry + | 1 | place | house | abcdef | :p-N1 + | 2 | place | house | abcdef | :p-N2 + | 3 | place | house | abcdef | :p-S1 + | 4 | place | house | abcdef | :p-S2 + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | abcde | :w-north + | 2 | highway | residential | abcde | :w-south + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + | N2 | W1 + | N3 | W2 + | N4 | W2 + + Scenario: addr:place address + Given the scene road-with-alley + And the place nodes + | osm_id | class | type | addr_place | geometry + | 1 | place | house | myhamlet | :n-alley + And the place nodes + | osm_id | class | type | name | geometry + | 2 | place | hamlet | myhamlet | :n-main-west + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | myhamlet | :w-main + When importing + Then table placex contains + | object | parent_place_id + | N1 | N2 + + Scenario: addr:street is preferred over addr:place + Given the scene road-with-alley + And the place nodes + | osm_id | class | type | addr_place | street | geometry + | 1 | place | house | myhamlet | mystreet| :n-alley + And the place nodes + | osm_id | class | type | name | geometry + | 2 | place | hamlet | myhamlet | :n-main-west + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | mystreet | :w-main + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + + Scenario: Untagged address in simple associated street relation + Given the scene road-with-alley + And the place nodes + | osm_id | class | type | geometry + | 1 | place | house | :n-alley + | 2 | place | house | :n-corner + | 3 | place | house | :n-main-west + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | foo | :w-main + | 2 | highway | service | bar | :w-alley + And the relations + | id | members | tags + | 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet' + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + | N2 | W1 + | N3 | W1 + + Scenario: Avoid unnamed streets in simple associated street relation + Given the scene road-with-alley + And the place nodes + | osm_id | class | type | geometry + | 1 | place | house | :n-alley + | 2 | place | house | :n-corner + | 3 | place | house | :n-main-west + And the named place ways + | osm_id | class | type | geometry + | 1 | highway | residential | :w-main + And the place ways + | osm_id | class | type | geometry + | 2 | highway | residential | :w-alley + And the relations + | id | members | tags + | 1 | N1,N2,N3,W2:street,W1:street | 'type' : 'associatedStreet' + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + | N2 | W1 + | N3 | W1 + + ### Scenario 10 + Scenario: Associated street relation overrides addr:street + Given the scene road-with-alley + And the place nodes + | osm_id | class | type | street | geometry + | 1 | place | house | bar | :n-alley + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | foo | :w-main + | 2 | highway | residential | bar | :w-alley + And the relations + | id | members | tags + | 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet' + When importing + Then table placex contains + | object | parent_place_id + | N1 | W1 + + Scenario: Building without tags, closest street from center point + Given the scene building-on-street-corner + And the named place ways + | osm_id | class | type | geometry + | 1 | building | yes | :w-building + | 2 | highway | primary | :w-WE + | 3 | highway | residential | :w-NS + When importing + Then table placex contains + | object | parent_place_id + | W1 | W3 + + Scenario: Building with addr:street tags + Given the scene building-on-street-corner + And the named place ways + | osm_id | class | type | street | geometry + | 1 | building | yes | bar | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + When importing + Then table placex contains + | object | parent_place_id + | W1 | W2 + + Scenario: Building with addr:place tags + Given the scene building-on-street-corner + And the place nodes + | osm_id | class | type | name | geometry + | 1 | place | village | bar | :n-outer + And the named place ways + | osm_id | class | type | addr_place | geometry + | 1 | building | yes | bar | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + When importing + Then table placex contains + | object | parent_place_id + | W1 | N1 + + Scenario: Building in associated street relation + Given the scene building-on-street-corner + And the named place ways + | osm_id | class | type | geometry + | 1 | building | yes | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + And the relations + | id | members | tags + | 1 | W1:house,W2:street | 'type' : 'associatedStreet' + When importing + Then table placex contains + | object | parent_place_id + | W1 | W2 + + Scenario: Building in associated street relation overrides addr:street + Given the scene building-on-street-corner + And the named place ways + | osm_id | class | type | street | geometry + | 1 | building | yes | foo | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + And the relations + | id | members | tags + | 1 | W1:house,W2:street | 'type' : 'associatedStreet' + When importing + Then table placex contains + | object | parent_place_id + | W1 | W2 + + Scenario: Wrong member in associated street relation is ignored + Given the scene building-on-street-corner + And the named place nodes + | osm_id | class | type | geometry + | 1 | place | house | :n-outer + And the named place ways + | osm_id | class | type | street | geometry + | 1 | building | yes | foo | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + And the relations + | id | members | tags + | 1 | N1:house,W1:street,W3:street | 'type' : 'associatedStreet' + When importing + Then table placex contains + | object | parent_place_id + | N1 | W3 + + Scenario: POIs in building inherit address + Given the scene building-on-street-corner + And the named place nodes + | osm_id | class | type | geometry + | 1 | amenity | bank | :n-inner + | 2 | shop | bakery | :n-edge-NS + | 3 | shop | supermarket| :n-edge-WE + And the place ways + | osm_id | class | type | street | addr_place | housenumber | geometry + | 1 | building | yes | foo | nowhere | 3 | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + When importing + Then table placex contains + | object | parent_place_id | street | addr_place | housenumber + | W1 | W3 | foo | nowhere | 3 + | N1 | W3 | foo | nowhere | 3 + | N2 | W3 | foo | nowhere | 3 + | N3 | W3 | foo | nowhere | 3 + + Scenario: POIs don't inherit from streets + Given the scene building-on-street-corner + And the named place nodes + | osm_id | class | type | geometry + | 1 | amenity | bank | :n-inner + And the place ways + | osm_id | class | type | street | addr_place | housenumber | geometry + | 1 | highway | path | foo | nowhere | 3 | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 3 | highway | residential | foo | :w-NS + When importing + Then table placex contains + | object | parent_place_id | street | addr_place | housenumber + | N1 | W3 | None | None | None + + Scenario: POIs with own address do not inherit building address + Given the scene building-on-street-corner + And the named place nodes + | osm_id | class | type | street | geometry + | 1 | amenity | bank | bar | :n-inner + And the named place nodes + | osm_id | class | type | housenumber | geometry + | 2 | shop | bakery | 4 | :n-edge-NS + And the named place nodes + | osm_id | class | type | addr_place | geometry + | 3 | shop | supermarket| nowhere | :n-edge-WE + And the place nodes + | osm_id | class | type | name | geometry + | 4 | place | isolated_dwelling | theplace | :n-outer + And the place ways + | osm_id | class | type | addr_place | housenumber | geometry + | 1 | building | yes | theplace | 3 | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + When importing + Then table placex contains + | object | parent_place_id | street | addr_place | housenumber + | W1 | N4 | None | theplace | 3 + | N1 | W2 | bar | None | None + | N2 | W3 | None | None | 4 + | N3 | W2 | None | nowhere | None + + ### Scenario 20 + Scenario: POIs parent a road if and only if they are attached to it + Given the scene points-on-roads + And the named place nodes + | osm_id | class | type | street | geometry + | 1 | highway | bus_stop | North St | :n-SE + | 2 | highway | bus_stop | South St | :n-NW + | 3 | highway | bus_stop | North St | :n-S-unglued + | 4 | highway | bus_stop | South St | :n-N-unglued + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | secondary | North St | :w-north + | 2 | highway | unclassified | South St | :w-south + And the ways + | id | nodes + | 1 | 100,101,2,103,104 + | 2 | 200,201,1,202,203 + When importing + Then table placex contains + | object | parent_place_id + | N1 | W2 + | N2 | W1 + | N3 | W1 + | N4 | W2 + + Scenario: POIs do not parent non-roads they are attached to + Given the scene points-on-roads + And the named place nodes + | osm_id | class | type | street | geometry + | 1 | highway | bus_stop | North St | :n-SE + | 2 | highway | bus_stop | South St | :n-NW + And the place ways + | osm_id | class | type | name | geometry + | 1 | landuse | residential | North St | :w-north + | 2 | waterway| river | South St | :w-south + And the ways + | id | nodes + | 1 | 100,101,2,103,104 + | 2 | 200,201,1,202,203 + When importing + Then table placex contains + | object | parent_place_id + | N1 | 0 + | N2 | 0 + + Scenario: POIs on building outlines inherit associated street relation + Given the scene building-on-street-corner + And the named place nodes + | osm_id | class | type | geometry + | 1 | place | house | :n-edge-NS + And the named place ways + | osm_id | class | type | geometry + | 1 | building | yes | :w-building + And the place ways + | osm_id | class | type | name | geometry + | 2 | highway | primary | bar | :w-WE + | 3 | highway | residential | foo | :w-NS + And the relations + | id | members | tags + | 1 | W1:house,W2:street | 'type' : 'associatedStreet' + And the ways + | id | nodes + | 1 | 100,1,101,102,100 + When importing + Then table placex contains + | object | parent_place_id + | N1 | W2 + diff --git a/tests/features/db/import/placex.feature b/tests/features/db/import/placex.feature new file mode 100644 index 00000000..a429202a --- /dev/null +++ b/tests/features/db/import/placex.feature @@ -0,0 +1,383 @@ +@DB +Feature: Import into placex + Tests that data in placex is completed correctly. + + Scenario: No country code tag is available + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | highway | primary | 'name' : 'A1' | country:us + When importing + Then table placex contains + | object | country_code | calculated_country_code | + | N1 | None | us | + + Scenario: Location overwrites country code tag + Given the scene country + And the place nodes + | osm_id | class | type | name | country_code | geometry + | 1 | highway | primary | 'name' : 'A1' | de | :us + When importing + Then table placex contains + | object | country_code | calculated_country_code | + | N1 | de | us | + + Scenario: Country code tag overwrites location for countries + Given the place areas + | osm_type | osm_id | class | type | admin_level | name | country_code | geometry + | R | 1 | boundary | administrative | 2 | 'name' : 'foo' | de | (-100 40, -101 40, -101 41, -100 41, -100 40) + When importing + Then table placex contains + | object | country_code | calculated_country_code | + | R1 | de | de | + + Scenario: Illegal country code tag for countries is ignored + And the place areas + | osm_type | osm_id | class | type | admin_level | name | country_code | geometry + | R | 1 | boundary | administrative | 2 | 'name' : 'foo' | xx | (-100 40, -101 40, -101 41, -100 41, -100 40) + When importing + Then table placex contains + | object | country_code | calculated_country_code | + | R1 | xx | us | + + Scenario: admin level is copied over + Given the place nodes + | osm_id | class | type | admin_level | name + | 1 | place | state | 3 | 'name' : 'foo' + When importing + Then table placex contains + | object | admin_level | + | N1 | 3 | + + Scenario: admin level is default 15 + Given the place nodes + | osm_id | class | type | name + | 1 | amenity | prison | 'name' : 'foo' + When importing + Then table placex contains + | object | admin_level | + | N1 | 15 | + + Scenario: admin level is never larger than 15 + Given the place nodes + | osm_id | class | type | name | admin_level + | 1 | amenity | prison | 'name' : 'foo' | 16 + When importing + Then table placex contains + | object | admin_level | + | N1 | 15 | + + + Scenario: postcode node without postcode is dropped + Given the place nodes + | osm_id | class | type + | 1 | place | postcode + When importing + Then table placex has no entry for N1 + + Scenario: postcode boundary without postcode is dropped + Given the place areas + | osm_type | osm_id | class | type | geometry + | R | 1 | boundary | postal_code | poly-area:0.1 + When importing + Then table placex has no entry for R1 + + Scenario: search and address ranks for GB post codes correctly assigned + Given the place nodes + | osm_id | class | type | postcode | geometry + | 1 | place | postcode | E45 2CD | country:gb + | 2 | place | postcode | E45 2 | country:gb + | 3 | place | postcode | Y45 | country:gb + When importing + Then table placex contains + | object | postcode | calculated_country_code | rank_search | rank_address + | N1 | E45 2CD | gb | 25 | 5 + | N2 | E45 2 | gb | 23 | 5 + | N3 | Y45 | gb | 21 | 5 + + Scenario: wrongly formatted GB postcodes are down-ranked + Given the place nodes + | osm_id | class | type | postcode | geometry + | 1 | place | postcode | EA452CD | country:gb + | 2 | place | postcode | E45 23 | country:gb + | 3 | place | postcode | y45 | country:gb + When importing + Then table placex contains + | object | calculated_country_code | rank_search | rank_address + | N1 | gb | 30 | 30 + | N2 | gb | 30 | 30 + | N3 | gb | 30 | 30 + + Scenario: search and address rank for DE postcodes correctly assigned + Given the place nodes + | osm_id | class | type | postcode | geometry + | 1 | place | postcode | 56427 | country:de + | 2 | place | postcode | 5642 | country:de + | 3 | place | postcode | 5642A | country:de + | 4 | place | postcode | 564276 | country:de + When importing + Then table placex contains + | object | calculated_country_code | rank_search | rank_address + | N1 | de | 21 | 11 + | N2 | de | 30 | 30 + | N3 | de | 30 | 30 + | N4 | de | 30 | 30 + + Scenario: search and address rank for other postcodes are correctly assigned + Given the place nodes + | osm_id | class | type | postcode | geometry + | 1 | place | postcode | 1 | country:ca + | 2 | place | postcode | X3 | country:ca + | 3 | place | postcode | 543 | country:ca + | 4 | place | postcode | 54dc | country:ca + | 5 | place | postcode | 12345 | country:ca + | 6 | place | postcode | 55TT667 | country:ca + | 7 | place | postcode | 123-65 | country:ca + | 8 | place | postcode | 12 445 4 | country:ca + | 9 | place | postcode | A1:bc10 | country:ca + When importing + Then table placex contains + | object | calculated_country_code | rank_search | rank_address + | N1 | ca | 21 | 11 + | N2 | ca | 21 | 11 + | N3 | ca | 21 | 11 + | N4 | ca | 21 | 11 + | N5 | ca | 21 | 11 + | N6 | ca | 21 | 11 + | N7 | ca | 25 | 11 + | N8 | ca | 25 | 11 + | N9 | ca | 25 | 11 + + + Scenario: search and address ranks for places are correctly assigned + Given the named place nodes + | osm_id | class | type | + | 1 | foo | bar | + | 11 | place | Continent | + | 12 | place | continent | + | 13 | place | sea | + | 14 | place | country | + | 15 | place | state | + | 16 | place | region | + | 17 | place | county | + | 18 | place | city | + | 19 | place | island | + | 20 | place | town | + | 21 | place | village | + | 22 | place | hamlet | + | 23 | place | municipality | + | 24 | place | district | + | 25 | place | unincorporated_area | + | 26 | place | borough | + | 27 | place | suburb | + | 28 | place | croft | + | 29 | place | subdivision | + | 30 | place | isolated_dwelling | + | 31 | place | farm | + | 32 | place | locality | + | 33 | place | islet | + | 34 | place | mountain_pass | + | 35 | place | neighbourhood | + | 36 | place | house | + | 37 | place | building | + | 38 | place | houses | + And the named place nodes + | osm_id | class | type | extratags + | 100 | place | locality | 'locality' : 'townland' + | 101 | place | city | 'capital' : 'yes' + When importing + Then table placex contains + | object | rank_search | rank_address | + | N1 | 30 | 30 | + | N11 | 30 | 30 | + | N12 | 2 | 2 | + | N13 | 2 | 0 | + | N14 | 4 | 4 | + | N15 | 8 | 8 | + | N16 | 18 | 0 | + | N17 | 12 | 12 | + | N18 | 16 | 16 | + | N19 | 17 | 0 | + | N20 | 18 | 16 | + | N21 | 19 | 16 | + | N22 | 19 | 16 | + | N23 | 19 | 16 | + | N24 | 19 | 16 | + | N25 | 19 | 16 | + | N26 | 19 | 16 | + | N27 | 20 | 20 | + | N28 | 20 | 20 | + | N29 | 20 | 20 | + | N30 | 20 | 20 | + | N31 | 20 | 0 | + | N32 | 20 | 0 | + | N33 | 20 | 0 | + | N34 | 20 | 0 | + | N100 | 20 | 20 | + | N101 | 15 | 16 | + | N35 | 22 | 22 | + | N36 | 30 | 30 | + | N37 | 30 | 30 | + | N38 | 28 | 0 | + + Scenario: search and address ranks for boundaries are correctly assigned + Given the named place nodes + | osm_id | class | type + | 1 | boundary | administrative + And the named place ways + | osm_id | class | type | geometry + | 10 | boundary | administrative | 10 10, 11 11 + And the named place areas + | osm_type | osm_id | class | type | admin_level | geometry + | R | 20 | boundary | administrative | 2 | (1 1, 2 2, 1 2, 1 1) + | R | 21 | boundary | administrative | 32 | (3 3, 4 4, 3 4, 3 3) + | R | 22 | boundary | nature_park | 6 | (0 0, 1 0, 0 1, 0 0) + | R | 23 | boundary | natural_reserve| 10 | (0 0, 1 1, 1 0, 0 0) + When importing + Then table placex has no entry for N1 + And table placex has no entry for W10 + And table placex contains + | object | rank_search | rank_address + | R20 | 4 | 4 + | R21 | 30 | 30 + | R22 | 12 | 0 + | R23 | 20 | 0 + + Scenario Outline: minor highways droped without name, included with + Given the scene roads-with-pois + And a wiped database + And the place ways + | osm_id | class | type | geometry + | 1 | highway | | :w-south + And the named place ways + | osm_id | class | type | geometry + | 2 | highway | | :w-north + When importing + Then table placex has no entry for W1 + And table placex contains + | object | rank_search | rank_address + | W2 | | + + Examples: + | type | rank + | service | 27 + | cycleway | 27 + | path | 27 + | footway | 27 + | steps | 27 + | bridleway | 27 + | track | 26 + | byway | 26 + | motorway_link | 27 + | primary_link | 27 + | trunk_link | 27 + | secondary_link| 27 + | tertiary_link | 27 + + Scenario: search and address ranks for highways correctly assigned + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type + | 1 | highway | bus_stop + And the place ways + | osm_id | class | type | geometry + | 1 | highway | primary | :w-south + | 2 | highway | secondary | :w-south + | 3 | highway | tertiary | :w-south + | 4 | highway | residential | :w-north + | 5 | highway | unclassified | :w-north + | 6 | highway | something | :w-north + When importing + Then table placex contains + | object | rank_search | rank_address + | N1 | 30 | 30 + | W1 | 26 | 26 + | W2 | 26 | 26 + | W3 | 26 | 26 + | W4 | 26 | 26 + | W5 | 26 | 26 + | W6 | 26 | 26 + + Scenario: rank and inclusion of landuses + Given the place nodes + | osm_id | class | type + | 1 | landuse | residential + And the named place nodes + | osm_id | class | type + | 2 | landuse | residential + And the place ways + | osm_id | class | type | geometry + | 1 | landuse | residential | 0 0, 0 1 + And the named place ways + | osm_id | class | type | geometry + | 2 | landuse | residential | 1 1, 1 1.1 + And the place areas + | osm_type | osm_id | class | type | geometry + | W | 3 | landuse | residential | poly-area:0.1 + | R | 1 | landuse | residential | poly-area:0.01 + | R | 10 | landuse | residential | poly-area:0.5 + And the named place areas + | osm_type | osm_id | class | type | geometry + | W | 4 | landuse | residential | poly-area:0.1 + | R | 2 | landuse | residential | poly-area:0.05 + When importing + Then table placex has no entry for N1 + And table placex has no entry for W1 + And table placex has no entry for W3 + And table placex has no entry for R1 + And table placex has no entry for R10 + And table placex contains + | object | rank_search | rank_address + | N2 | 30 | 30 + | W2 | 30 | 30 + | W4 | 22 | 22 + | R2 | 22 | 22 + + Scenario: rank and inclusion of naturals + Given the place nodes + | osm_id | class | type + | 1 | natural | peak + | 3 | natural | volcano + And the named place nodes + | osm_id | class | type + | 2 | natural | peak + | 4 | natural | volcano + | 5 | natural | foobar + And the place ways + | osm_id | class | type | geometry + | 1 | natural | mountain_range | 10 10,11 11 + And the named place ways + | osm_id | class | type | geometry + | 2 | natural | mountain_range | 12 12,11 11 + | 3 | natural | foobar | 13 13,13.1 13 + | 4 | natural | coastline | 14 14,14.1 14 + And the place areas + | osm_type | osm_id | class | type | geometry + | R | 1 | natural | volcano | poly-area:0.1 + | R | 2 | natural | volcano | poly-area:1.0 + And the named place areas + | osm_type | osm_id | class | type | geometry + | R | 3 | natural | volcano | poly-area:0.1 + | R | 4 | natural | foobar | poly-area:0.5 + | R | 5 | natural | sea | poly-area:5.0 + | R | 6 | natural | sea | poly-area:0.01 + | R | 7 | natural | coastline | poly-area:1.0 + When importing + Then table placex has no entry for N1 + And table placex has no entry for N3 + And table placex has no entry for W1 + And table placex has no entry for R1 + And table placex has no entry for R2 + And table placex has no entry for R7 + And table placex has no entry for W4 + And table placex contains + | object | rank_search | rank_address + | N2 | 18 | 0 + | N4 | 18 | 0 + | N5 | 30 | 30 + | W2 | 18 | 0 + | R3 | 18 | 0 + | R4 | 22 | 22 + | R5 | 4 | 4 + | R6 | 4 | 4 + | W3 | 30 | 30 + diff --git a/tests/features/db/import/search_terms.feature b/tests/features/db/import/search_terms.feature new file mode 100644 index 00000000..642b3ea8 --- /dev/null +++ b/tests/features/db/import/search_terms.feature @@ -0,0 +1,28 @@ +@DB +Feature: Creation of search terms + Tests that search_name table is filled correctly + + Scenario: POIs without a name have no search entry + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | geometry + | 1 | place | house | :p-N1 + And the place ways + | osm_id | class | type | geometry + | 1 | highway | residential | :w-north + When importing + Then table search_name has no entry for N1 + + + Scenario: Named POIs inherit address from parent + Given the scene roads-with-pois + And the place nodes + | osm_id | class | type | name | geometry + | 1 | place | house | foo | :p-N1 + And the place ways + | osm_id | class | type | name | geometry + | 1 | highway | residential | the road | :w-north + When importing + Then search_name table contains + | place_id | name_vector | nameaddress_vector + | N1 | foo | the road diff --git a/tests/features/db/import/simple.feature b/tests/features/db/import/simple.feature new file mode 100644 index 00000000..2e2c825a --- /dev/null +++ b/tests/features/db/import/simple.feature @@ -0,0 +1,17 @@ +@DB +Feature: Import of simple objects + Testing simple stuff + + Scenario: Import place node + Given the place nodes: + | osm_id | class | type | name | geometry + | 1 | place | village | 'name' : 'Foo' | 10.0 -10.0 + When importing + Then table placex contains + | object | class | type | name | centroid + | N1 | place | village | 'name' : 'Foo' | 10.0,-10.0 +- 1m + When sending query "Foo" + Then results contain + | ID | osm_type | osm_id + | 0 | N | 1 + diff --git a/tests/features/db/update/linked_places.feature b/tests/features/db/update/linked_places.feature new file mode 100644 index 00000000..6f80f680 --- /dev/null +++ b/tests/features/db/update/linked_places.feature @@ -0,0 +1,92 @@ +@DB +Feature: Updates of linked places + Tests that linked places are correctly added and deleted. + + + Scenario: Add linked place when linking relation is renamed + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | city | foo | 0 0 + And the place areas + | osm_type | osm_id | class | type | name | admin_level | geometry + | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1 + When importing + And sending query "foo" with dups + Then results contain + | osm_type + | R + When updating place areas + | osm_type | osm_id | class | type | name | admin_level | geometry + | R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1 + Then table placex contains + | object | linked_place_id + | N1 | None + When sending query "foo" with dups + Then results contain + | osm_type + | N + + Scenario: Add linked place when linking relation is removed + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | city | foo | 0 0 + And the place areas + | osm_type | osm_id | class | type | name | admin_level | geometry + | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1 + When importing + And sending query "foo" with dups + Then results contain + | osm_type + | R + When marking for delete R1 + Then table placex contains + | object | linked_place_id + | N1 | None + And sending query "foo" with dups + Then results contain + | osm_type + | N + + Scenario: Remove linked place when linking relation is added + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | city | foo | 0 0 + When importing + And sending query "foo" with dups + Then results contain + | osm_type + | N + When updating place areas + | osm_type | osm_id | class | type | name | admin_level | geometry + | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1 + Then table placex contains + | object | linked_place_id + | N1 | R1 + When sending query "foo" with dups + Then results contain + | osm_type + | R + + Scenario: Remove linked place when linking relation is renamed + Given the place nodes + | osm_id | class | type | name | geometry + | 1 | place | city | foo | 0 0 + And the place areas + | osm_type | osm_id | class | type | name | admin_level | geometry + | R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1 + When importing + And sending query "foo" with dups + Then results contain + | osm_type + | N + When updating place areas + | osm_type | osm_id | class | type | name | admin_level | geometry + | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1 + Then table placex contains + | object | linked_place_id + | N1 | R1 + When sending query "foo" with dups + Then results contain + | osm_type + | R + diff --git a/tests/features/db/update/naming.feature b/tests/features/db/update/naming.feature new file mode 100644 index 00000000..261f02dc --- /dev/null +++ b/tests/features/db/update/naming.feature @@ -0,0 +1,39 @@ +@DB +Feature: Update of names in place objects + Test all naming related issues in updates + + + Scenario: Updating postcode in postcode boundaries without ref + Given the place areas + | osm_type | osm_id | class | type | postcode | geometry + | R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0) + When importing + And sending query "12345" + Then results contain + | ID | osm_type | osm_id + | 0 | R | 1 + When updating place areas + | osm_type | osm_id | class | type | postcode | geometry + | R | 1 | boundary | postal_code | 54321 | (0 0, 1 0, 1 1, 0 1, 0 0) + And sending query "12345" + Then exactly 0 results are returned + When sending query "54321" + Then results contain + | ID | osm_type | osm_id + | 0 | R | 1 + + + Scenario: Delete postcode from postcode boundaries without ref + Given the place areas + | osm_type | osm_id | class | type | postcode | geometry + | R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0) + When importing + And sending query "12345" + Then results contain + | ID | osm_type | osm_id + | 0 | R | 1 + When updating place areas + | osm_type | osm_id | class | type | geometry + | R | 1 | boundary | postal_code | (0 0, 1 0, 1 1, 0 1, 0 0) + Then table placex has no entry for R1 + diff --git a/tests/features/db/update/simple.feature b/tests/features/db/update/simple.feature new file mode 100644 index 00000000..4493bffb --- /dev/null +++ b/tests/features/db/update/simple.feature @@ -0,0 +1,55 @@ +@DB +Feature: Update of simple objects + Testing simple stuff + + Scenario: Remove name from a landuse object + Given the place nodes + | osm_id | class | type | name + | 1 | landuse | wood | 'name' : 'Foo' + When importing + Then table placex contains + | object | class | type | name + | N1 | landuse| wood | 'name' : 'Foo' + When updating place nodes + | osm_id | class | type + | 1 | landuse | wood + Then table placex has no entry for N1 + + + Scenario: Do delete small boundary features + Given the place areas + | osm_type | osm_id | class | type | admin_level | geometry + | R | 1 | boundary | administrative | 3 | (0 0, 1 0, 1 1, 0 1, 0 0) + When importing + Then table placex contains + | object | rank_search + | R1 | 6 + When marking for delete R1 + Then table placex has no entry for R1 + + Scenario: Do not delete large boundary features + Given the place areas + | osm_type | osm_id | class | type | admin_level | geometry + | R | 1 | boundary | administrative | 3 | (0 0, 2 0, 2 2.1, 0 2, 0 0) + When importing + Then table placex contains + | object | rank_search + | R1 | 6 + When marking for delete R1 + Then table placex contains + | object | rank_search + | R1 | 6 + + Scenario: Do delete large features of low rank + Given the named place areas + | osm_type | osm_id | class | type | geometry + | W | 1 | place | house | (0 0, 2 0, 2 2.1, 0 2, 0 0) + | R | 1 | boundary | national_park | (0 0, 2 0, 2 2.1, 0 2, 0 0) + When importing + Then table placex contains + | object | rank_address + | R1 | 0 + | W1 | 30 + When marking for delete R1,W1 + Then table placex has no entry for W1 + Then table placex has no entry for R1 diff --git a/tests/features/osm2pgsql/import/relation.feature b/tests/features/osm2pgsql/import/relation.feature new file mode 100644 index 00000000..aba99a47 --- /dev/null +++ b/tests/features/osm2pgsql/import/relation.feature @@ -0,0 +1,13 @@ +@DB +Feature: Import of relations by osm2pgsql + Testing specific relation problems related to members. + + Scenario: Don't import empty waterways + Given the osm nodes: + | id | tags + | 1 | 'amenity' : 'prison', 'name' : 'foo' + And the osm relations: + | id | tags | members + | 1 | 'type' : 'waterway', 'waterway' : 'river', 'name' : 'XZ' | N1 + When loading osm data + Then table place has no entry for R1 diff --git a/tests/features/osm2pgsql/import/simple.feature b/tests/features/osm2pgsql/import/simple.feature new file mode 100644 index 00000000..053c853c --- /dev/null +++ b/tests/features/osm2pgsql/import/simple.feature @@ -0,0 +1,49 @@ +@DB +Feature: Import of simple objects by osm2pgsql + Testing basic functions of osm2pgsql. + + Scenario: Import simple objects + Given the osm nodes: + | id | tags + | 1 | 'amenity' : 'prison', 'name' : 'foo' + Given the osm nodes: + | id | geometry + | 100 | 0 0 + | 101 | 0 0.1 + | 102 | 0.1 0.2 + | 200 | 0 0 + | 201 | 0 1 + | 202 | 1 1 + | 203 | 1 0 + Given the osm ways: + | id | tags | nodes + | 1 | 'shop' : 'toys', 'name' : 'tata' | 100 101 102 + | 2 | 'ref' : '45' | 200 201 202 203 200 + Given the osm relations: + | id | tags | members + | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1,W2 + When loading osm data + Then table place contains + | object | class | type | name + | N1 | amenity | prison | 'name' : 'foo' + | W1 | shop | toys | 'name' : 'tata' + | R1 | tourism | hotel | 'name' : 'XZ' + + Scenario: Import object with two main tags + Given the osm nodes: + | id | tags + | 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo' + When loading osm data + Then table place contains + | object | class | type | name + | N1:tourism | tourism | hotel | 'name' : 'foo' + | N1:amenity | amenity | restaurant | 'name' : 'foo' + + Scenario: Import stand-alone house number with postcode + Given the osm nodes: + | id | tags + | 1 | 'addr:housenumber' : '4', 'addr:postcode' : '3345' + When loading osm data + Then table place contains + | object | class | type + | N1 | place | house diff --git a/tests/features/osm2pgsql/update/relation.feature b/tests/features/osm2pgsql/update/relation.feature new file mode 100644 index 00000000..2e6f9d8a --- /dev/null +++ b/tests/features/osm2pgsql/update/relation.feature @@ -0,0 +1,30 @@ +@DB +Feature: Update of relations by osm2pgsql + Testing relation update by osm2pgsql. + +Scenario: Remove all members of a relation + Given the osm nodes: + | id | tags + | 1 | 'amenity' : 'prison', 'name' : 'foo' + Given the osm nodes: + | id | geometry + | 200 | 0 0 + | 201 | 0 0.0001 + | 202 | 0.0001 0.0001 + | 203 | 0.0001 0 + Given the osm ways: + | id | tags | nodes + | 2 | 'ref' : '45' | 200 201 202 203 200 + Given the osm relations: + | id | tags | members + | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | W2 + When loading osm data + Then table place contains + | object | class | type | name + | R1 | tourism | hotel | 'name' : 'XZ' + Given the osm relations: + | action | id | tags | members + | M | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1 + When updating osm data + Then table place has no entry for R1 + diff --git a/tests/features/osm2pgsql/update/simple.feature b/tests/features/osm2pgsql/update/simple.feature new file mode 100644 index 00000000..e0c9b005 --- /dev/null +++ b/tests/features/osm2pgsql/update/simple.feature @@ -0,0 +1,22 @@ +@DB +Feature: Update of simple objects by osm2pgsql + Testing basic update functions of osm2pgsql. + + Scenario: Import object with two main tags + Given the osm nodes: + | id | tags + | 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo' + When loading osm data + Then table place contains + | object | class | type | name + | N1:tourism | tourism | hotel | 'name' : 'foo' + | N1:amenity | amenity | restaurant | 'name' : 'foo' + Given the osm nodes: + | action | id | tags + | M | 1 | 'tourism' : 'hotel', 'name' : 'foo' + When updating osm data + Then table place has no entry for N1:amenity + And table place contains + | object | class | type | name + | N1:tourism | tourism | hotel | 'name' : 'foo' + diff --git a/tests/scenes/bin/make_scenes.sh b/tests/scenes/bin/make_scenes.sh new file mode 100755 index 00000000..4ad31f31 --- /dev/null +++ b/tests/scenes/bin/make_scenes.sh @@ -0,0 +1,31 @@ +#/bin/bash -e +# +# Regenerates wkts for scenarios. +# + +datadir="$( cd "$( dirname "$0" )" && cd ../data && pwd )" + +if [! -d "$datadir" ]; then + echo "Cannot find data dir."; + exit -1; +fi + +echo "Using datadir $datadir" +pushd $datadir + +# remove old wkts +rm $datadir/*.wkt + +# create wkts from SQL scripts +for fl in *.sql; do + echo "Processing $fl.." + cat $fl | psql -d nominatim -t -o ${fl/.sql/.wkt} +done + +# create wkts from .osm files +for fl in *.osm; do + echo "Processing $fl.." + ../bin/osm2wkt $fl +done + +popd diff --git a/tests/scenes/bin/osm2wkt.cc b/tests/scenes/bin/osm2wkt.cc new file mode 100644 index 00000000..9cf9b578 --- /dev/null +++ b/tests/scenes/bin/osm2wkt.cc @@ -0,0 +1,101 @@ + +// The code in this file is released into the Public Domain. + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +typedef osmium::index::map::StlMap index_type; + +typedef osmium::handler::NodeLocationsForWays location_handler_type; + + +class ExportToWKTHandler : public osmium::handler::Handler { + + osmium::geom::WKTFactory m_factory; + std::unordered_map m_files; + +public: + + void node(const osmium::Node& node) { + print_geometry(node.tags(), m_factory.create_point(node)); + } + + void way(const osmium::Way& way) { + if (!way.is_closed() || !way.tags().get_value_by_key("area")) + print_geometry(way.tags(), m_factory.create_linestring(way)); + } + + void area(const osmium::Area& area) { + if (!area.from_way() || area.tags().get_value_by_key("area")) + print_geometry(area.tags(), m_factory.create_multipolygon(area)); + } + + void close() { + for (auto& fd : m_files) + fd.second.close(); + } + +private: + + void print_geometry(const osmium::TagList& tags, const std::string& wkt) { + const char* scenario = tags.get_value_by_key("test:section"); + const char* id = tags.get_value_by_key("test:id"); + if (scenario && id) { + auto& fd = m_files[std::string(scenario)]; + if (!fd.is_open()) + fd.open(std::string(scenario) + ".wkt"); + fd << id << " | " << wkt << "\n"; + } + } + +}; // class ExportToWKTHandler + +int main(int argc, char* argv[]) { + if (argc != 2) { + std::cerr << "Usage: " << argv[0] << " OSMFILE\n"; + exit(1); + } + + std::string input_filename {argv[1]}; + + typedef osmium::area::Assembler area_assembler_type; + osmium::area::ProblemReporterException problem_reporter; + area_assembler_type assembler(&problem_reporter); + osmium::area::Collector collector(assembler); + + std::cerr << "Pass 1...\n"; + osmium::io::Reader reader1(input_filename); + collector.read_relations(reader1); + std::cerr << "Pass 1 done\n"; + + index_type index_pos; + index_type index_neg; + location_handler_type location_handler(index_pos, index_neg); + + std::cerr << "Pass 2...\n"; + ExportToWKTHandler export_handler; + osmium::io::Reader reader2(input_filename); + osmium::apply(reader2, location_handler, export_handler, collector.handler()); + reader2.close(); + osmium::apply(collector, export_handler); + export_handler.close(); + std::cerr << "Pass 2 done\n"; + + + google::protobuf::ShutdownProtobufLibrary(); + +} + + diff --git a/tests/scenes/data/building-on-street-corner.wkt b/tests/scenes/data/building-on-street-corner.wkt new file mode 100644 index 00000000..4c007e9e --- /dev/null +++ b/tests/scenes/data/building-on-street-corner.wkt @@ -0,0 +1,7 @@ +n-edge-NS | POINT(1.0040019 2.000324) +n-inner | POINT(1.0039385 2.0003548) +n-outer | POINT(1.0039478 2.0004676) +n-edge-WE | POINT(1.0039599 2.0002345) +w-WE | LINESTRING(1.0031759 2.0002316,1.0040361 2.0002211,1.0042735 2.0002264) +w-NS | LINESTRING(1.0040414 2.0001051,1.0040361 2.0002211,1.0040364 2.0006377) +w-building | MULTIPOLYGON(((1.0040019 2.000324,1.0040016 2.0002344,1.0039599 2.0002345,1.0039037 2.0002347,1.0039043 2.0004389,1.0040023 2.0004386,1.0040019 2.000324))) diff --git a/tests/scenes/data/country.sql b/tests/scenes/data/country.sql new file mode 100644 index 00000000..b3b451f7 --- /dev/null +++ b/tests/scenes/data/country.sql @@ -0,0 +1 @@ +select country_code, st_astext(st_pointonsurface(st_collect(geometry))) from country_osm_grid group by country_code order by country_code diff --git a/tests/scenes/data/country.wkt b/tests/scenes/data/country.wkt new file mode 100644 index 00000000..8eded2e7 --- /dev/null +++ b/tests/scenes/data/country.wkt @@ -0,0 +1,250 @@ + ad | POINT(1.58972361752509 42.54241545) + ae | POINT(54.6158905029297 24.8243131637573) + af | POINT(65.9026412963867 34.8470859527588) + ag | POINT(-61.7243069800293 17.069) + ai | POINT(-63.1057155298182 18.2546197) + al | POINT(19.8494176864624 40.2123275624912) + am | POINT(44.6422958374023 40.3782157897949) + ao | POINT(16.2192406654358 -12.7701482772827) + aq | POINT(44.999999975 -75.6569557189941) + ar | POINT(-61.1075973510742 -34.3761558532715) + as | POINT(-170.684700024275 -14.2930755) + at | POINT(14.2574706077576 47.3654232025146) + au | POINT(138.231559753418 -23.7206888198853) + aw | POINT(-69.98255055 12.555) + ax | POINT(19.9183956313477 59.81682435) + az | POINT(48.385555267334 40.6163997650146) + ba | POINT(17.1851491928101 44.2558269500732) + bb | POINT(-59.53342165 13.19) + bd | POINT(89.759895324707 24.3420524597168) + be | POINT(4.90078139305115 50.3468225048828) + bf | POINT(-0.567435041069984 11.9047117233276) + bg | POINT(24.8061628341675 43.0985908508301) + bh | POINT(50.5203291219829 25.94685735) + bi | POINT(29.5456137866089 -2.99057915) + bj | POINT(2.70062518119812 10.0279288291931) + bl | POINT(-62.7934947763772 17.907) + bm | POINT(-64.7740692745195 32.30199165) + bn | POINT(114.521968608887 4.2863885) + bo | POINT(-62.0247344970703 -17.7772369384766) + bq | POINT(-63.1432235610045 17.566) + br | POINT(-45.7706508636475 -9.5868501663208) + bs | POINT(-77.6091675884277 23.8745) + bt | POINT(90.0135078430176 27.281379699707) + bv | POINT(3.35744155625 -54.4215) + bw | POINT(23.5150556564331 -23.4839134216309) + by | POINT(26.7725925445557 53.1588516235352) + bz | POINT(-88.6348991394043 16.3395160487277) + ca | POINT(-107.74817276001 67.1261215209961) + cc | POINT(96.8442066294247 -12.0173443) + cd | POINT(24.0954418182373 -1.67713665962219) + cf | POINT(22.5870132446289 5.98438787460327) + cg | POINT(15.7887516021729 0.403886616230011) + ch | POINT(7.65705513954163 46.5744686126709) + ci | POINT(-6.31190967559814 6.6278383731842) + ck | POINT(-159.778351359569 -21.23349585) + cl | POINT(-70.4179039001465 -53.7718944549561) + cm | POINT(13.260226726532 5.94519567489624) + cn | POINT(96.4428558349609 38.0426063537598) + co | POINT(-72.5295104980469 2.45174860954285) + cr | POINT(-83.8331413269043 9.935142993927) + cu | POINT(-80.8167381286621 21.8885278701782) + cv | POINT(-24.508106575 14.929) + cw | POINT(-68.9640918594077 12.1845) + cx | POINT(105.624119513558 -10.48417) + cy | POINT(32.959223486499 35.37010195) + cz | POINT(16.3209805488586 49.5069274902344) + de | POINT(9.30716800689697 50.2128944396973) + dj | POINT(42.969040422876 11.41542855) + dk | POINT(9.18490123748779 55.5634002685547) + dm | POINT(-61.0035801928854 15.6547055) + do | POINT(-69.6285591125488 18.5884169089722) + dz | POINT(4.24749487638474 25.797215461731) + ec | POINT(-77.4583168029785 -0.982844322919846) + ee | POINT(23.9428863525391 58.439525604248) + eg | POINT(28.952935218811 28.1771860122681) + eh | POINT(-13.6903142929077 25.0124177932739) + er | POINT(39.0122375488281 14.960337638855) + es | POINT(-2.59110307693481 38.7935485839844) + et | POINT(38.6169757843018 7.71399855613708) + fi | POINT(26.8979873657227 63.5619449615479) + fj | POINT(177.918533325195 -17.7423753738403) + fk | POINT(-60.0855102539062 -51.6555919647217) + fm | POINT(151.9535889125 8.5045) + fo | POINT(-6.60483694084778 62.10000995) + fr | POINT(0.284105718135834 47.5104522705078) + ga | POINT(10.8107047080994 -0.0742915570735931) + gb | POINT(-0.928231082856655 52.0161876678467) + gd | POINT(-61.6452430375 12.191) + ge | POINT(44.1666488647461 42.0038585662842) + gf | POINT(-53.4652481079102 3.56188893318176) + gg | POINT(-2.50580395030125 49.5854381) + gh | POINT(-0.463488027453423 7.16051578521729) + gi | POINT(-5.32053155848457 36.1106663) + gl | POINT(-33.8551120758057 74.6635551452637) + gm | POINT(-16.4096023535368 13.25) + gn | POINT(-13.839409828186 10.9629158973694) + gp | POINT(-61.6871265247053 16.23049055) + gq | POINT(10.2397356033325 1.43119311332703) + gr | POINT(23.1785039901733 39.0620670318604) + gs | POINT(-36.4943086948773 -54.4306784) + gt | POINT(-90.7436828613281 15.2042865753174) + gu | POINT(144.733626445767 13.444138) + gw | POINT(-14.8352527618408 11.9248690605164) + gy | POINT(-58.4516773223877 5.73698806762695) + hk | POINT(114.18577775 22.3492361) + hm | POINT(73.6823082266602 -53.22105985) + hn | POINT(-86.9541435241699 15.2382001876831) + hr | POINT(17.499662399292 45.5268955230713) + ht | POINT(-73.5192565917969 18.3249206691162) + hu | POINT(20.3536291122437 47.5172100067139) + id | POINT(123.345050811768 -0.837919592857361) + ie | POINT(-9.00520038604736 52.8772506713867) + il | POINT(35.4631499949707 32.86165655) + im | POINT(-4.86740773691101 54.023) + in | POINT(88.6762087020508 27.86155515) + io | POINT(71.4274391359073 -6.14349685) + iq | POINT(42.5810985565186 34.2610359191895) + ir | POINT(51.268892288208 34.1931705474854) + is | POINT(-17.5178508758545 64.7168769836426) + it | POINT(10.4263944625854 44.8790493011475) + je | POINT(-2.19261599848299 49.1245833) + jm | POINT(-76.8402003547852 18.3935) + jo | POINT(36.5555210113525 30.7574186325073) + jp | POINT(138.725311279297 35.9209995269775) + ke | POINT(36.9060287475586 1.08512867614627) + kg | POINT(76.1557197570801 41.6649742126465) + kh | POINT(104.319019317627 12.9555516242981) + ki | POINT(173.633537933333 0.139) + km | POINT(44.3147485207764 -12.241) + kn | POINT(-62.6937987175 17.2555) + kp | POINT(126.655757904053 39.6457576751709) + kr | POINT(127.277404785156 36.4138870239258) + kw | POINT(47.3068407840576 29.6918055) + ky | POINT(-81.0745526670982 19.2994923579778) + kz | POINT(72.008113861084 49.8885555267334) + la | POINT(102.443916320801 19.8160953521729) + lb | POINT(35.4846443715483 33.4176673878926) + lc | POINT(-60.978944125 13.891) + li | POINT(9.54693948514429 47.15934115) + lk | POINT(80.3852043151855 8.41649961471558) + lr | POINT(-11.169605255127 4.04122126102448) + ls | POINT(28.6698419546997 -29.9453849) + lt | POINT(24.5173501968384 55.4929389953613) + lu | POINT(6.08649672997471 49.81533445) + lv | POINT(23.5103368759155 56.6714401245117) + ly | POINT(15.3684158325195 28.1217727661133) + ma | POINT(-4.0306156873703 33.2169628143311) + mc | POINT(7.47743150426578 43.62917385) + md | POINT(29.6172503477783 46.6651745) + me | POINT(19.7229134314941 43.02441345) + mf | POINT(-63.0666651534257 18.0810209) + mg | POINT(45.8637886047363 -20.5024528503418) + mh | POINT(171.949820566667 5.983) + mk | POINT(21.421085357666 41.0898007597656) + ml | POINT(-1.93310506641865 16.4699301719666) + mm | POINT(95.5462455749512 21.0962018966675) + mn | POINT(99.8113822937012 48.1861572265625) + mo | POINT(113.564416766761 22.16209625) + mp | POINT(145.213452483189 14.1490205) + mq | POINT(-60.8112834227783 14.43706925) + mr | POINT(-9.42324566841125 22.5925149917603) + ms | POINT(-62.1945521583333 16.745) + mt | POINT(14.3836306158583 35.9446731) + mu | POINT(57.551211475 -20.41) + mv | POINT(73.3929214477539 4.19375014305115) + mw | POINT(33.9572296142578 -12.2821822166443) + mx | POINT(-105.892219543457 25.8682699203491) + my | POINT(112.711540222168 2.10098683834076) + mz | POINT(37.5868968963623 -15.5801844596863) + na | POINT(16.6856970787048 -21.4657220840454) + nc | POINT(164.953224182129 -20.3888988494873) + ne | POINT(10.060417175293 19.0827360153198) + nf | POINT(167.95718166875 -29.0645) + ng | POINT(10.1778125762939 10.1780409812927) + ni | POINT(-85.8797492980957 13.2171587944031) + nl | POINT(-68.5706209441406 12.041) + no | POINT(23.1155624389648 70.0993499755859) + np | POINT(83.3625984191895 28.1310758590698) + nr | POINT(166.934792270833 -0.5275) + nu | POINT(-169.848737911905 -19.05305275) + nz | POINT(167.972099304199 -45.1305675506592) + om | POINT(56.8605518341064 20.4741315841675) + pa | POINT(-79.4016036987305 8.80656003952026) + pe | POINT(-78.6654052734375 -7.54711985588074) + pf | POINT(-145.057191213086 -16.7086236) + pg | POINT(146.646003723145 -7.37427568435669) + ph | POINT(121.483592987061 15.0996527671814) + pk | POINT(72.1134796142578 31.1462965011597) + pl | POINT(17.8813629150391 52.771821975708) + pm | POINT(-56.1951589074841 46.7832469) + pn | POINT(-130.106425528029 -25.0695595) + pr | POINT(-65.8875553967285 18.3716905) + ps | POINT(35.3980153741943 32.24773475) + pt | POINT(-8.45743942260742 40.1115436553955) + pw | POINT(134.496454875 7.3245) + py | POINT(-59.5178718566895 -22.4128150939941) + qa | POINT(51.4990362304443 24.9981677) + re | POINT(55.7734550547607 -21.3638828) + ro | POINT(26.3763284683228 45.3612003326416) + rs | POINT(20.4037199020386 44.5641384124756) + ru | POINT(116.440608978271 59.0678024291992) + rw | POINT(29.5788261333252 -1.6240443) + sa | POINT(47.7316932678223 22.4379062652588) + sb | POINT(164.638946533203 -10.2360653877258) + sc | POINT(46.3656697 -9.454) + sd | POINT(28.1472072601318 14.5642309188843) + se | POINT(15.6866798400879 60.3556804656982) + sg | POINT(103.84187219299 1.304) + sh | POINT(-12.2815573611979 -37.11546755) + si | POINT(14.0473856628607 46.390855) + sj | POINT(15.2755260467529 79.2336540222168) + sk | POINT(20.416033744812 48.869701385498) + sl | POINT(-11.4777312278748 8.78156280517578) + sm | POINT(12.4606268797657 43.9427969) + sn | POINT(-15.3711128234863 14.9947791099548) + so | POINT(46.9338359832764 9.34094429016113) + sr | POINT(-56.4855213165283 4.5773549079895) + ss | POINT(28.1357345581055 8.50933408737183) + st | POINT(6.61025854583333 0.2215) + sv | POINT(-89.3666543301004 13.4307287) + sx | POINT(-63.1539330807882 17.9345) + sy | POINT(38.1551322937012 35.3422107696533) + sz | POINT(31.782634398523 -26.14244365) + tc | POINT(-71.325541342334 21.35) + td | POINT(17.4209251403809 13.4622311592102) + tf | POINT(137.5 -67.5) + tg | POINT(1.0698350071907 7.87677597999573) + th | POINT(102.008777618408 16.4231028556824) + tj | POINT(71.9134941101074 39.0152739312988) + tk | POINT(-171.826039878679 -9.209903) + tl | POINT(126.225208282471 -8.72636747360229) + tm | POINT(57.7160358428955 39.9253444671631) + tn | POINT(9.04958724975586 34.8419933319092) + to | POINT(-176.993202209473 -23.1110429763794) + tr | POINT(32.8200283050537 39.8635063171387) + tt | POINT(-60.70793924375 11.1385) + tv | POINT(178.774993896484 -9.41685771942139) + tw | POINT(120.300746917725 23.1700229644775) + tz | POINT(33.5389289855957 -5.01840615272522) + ua | POINT(33.4433536529541 49.3061904907227) + ug | POINT(32.9652328491211 2.08584922552109) + um | POINT(-169.509930872296 16.74605815) + us | POINT(-116.395355224609 40.7137908935547) + uy | POINT(-56.4650554656982 -33.6265888214111) + uz | POINT(61.3552989959717 42.9610729217529) + va | POINT(12.3319785703086 42.0493197) + vc | POINT(-61.0990541737305 13.316) + ve | POINT(-64.8832321166992 7.69849991798401) + vg | POINT(-64.6247911940199 18.419) + vi | POINT(-64.8895090795187 18.3226325) + vn | POINT(104.201791331787 10.27644235) + vu | POINT(167.319198608398 -15.8868751525879) + wf | POINT(-176.207816222208 -13.28535775) + ws | POINT(-172.109667323427 -13.850938) + ye | POINT(45.945629119873 16.1633830070496) + yt | POINT(44.9377459760742 -12.6088246) + za | POINT(23.1948881149292 -30.4327602386475) + zm | POINT(26.3861808776855 -14.3996663093567) + zw | POINT(30.1241998672485 -19.8690795898438) + diff --git a/tests/scenes/data/points-on-roads.wkt b/tests/scenes/data/points-on-roads.wkt new file mode 100644 index 00000000..17990dce --- /dev/null +++ b/tests/scenes/data/points-on-roads.wkt @@ -0,0 +1,8 @@ +n-N-unglued | POINT(1.004922 2.0005155) +n-S-unglued | POINT(1.0046259 2.0002949) +n-NE | POINT(1.0050661 2.0006118) +n-SE | POINT(1.0051339 2.0003349) +n-NW | POINT(1.0047583 2.0004087) +n-SW | POINT(1.0047275 2.0003564) +w-north | LINESTRING(1.0044996 2.0004302,1.0046259 2.0003841,1.0047583 2.0004087,1.004922 2.0005155,1.0050661 2.0006118,1.0053155 2.0006241) +w-south | LINESTRING(1.0045243 2.0002241,1.0046259 2.0002949,1.0047275 2.0003564,1.004826 2.0002918,1.0049368 2.0002641,1.0051339 2.0003349,1.0053278 2.0003687) diff --git a/tests/scenes/data/poly-area.wkt b/tests/scenes/data/poly-area.wkt new file mode 100644 index 00000000..a8fb045d --- /dev/null +++ b/tests/scenes/data/poly-area.wkt @@ -0,0 +1,11 @@ +0.0001 | MULTIPOLYGON(((0.001 0,0 0,0 0.1,0.001 0.1,0.001 0))) +0.0005 | MULTIPOLYGON(((0.005 0,0 0,0 0.1,0.005 0.1,0.005 0))) +0.001 | MULTIPOLYGON(((0.01 0,0 0,0 0.1,0.01 0.1,0.01 0))) +0.005 | MULTIPOLYGON(((0.05 0,0 0,0 0.1,0.05 0.1,0.05 0))) +0.01 | MULTIPOLYGON(((0.1 0,0 0,0 0.1,0.1 0.1,0.1 0))) +0.05 | MULTIPOLYGON(((0.5 0,0 0,0 0.1,0.5 0.1,0.5 0))) +0.1 | MULTIPOLYGON(((0.1 0,0 0,0 1,0.1 1,0.1 0))) +0.5 | MULTIPOLYGON(((0.5 0,0 0,0 1,0.5 1,0.5 0))) +1.0 | MULTIPOLYGON(((1 0,0 0,0 1,1 1,1 0))) +2.0 | MULTIPOLYGON(((2 0,0 0,0 1,2 1,2 0))) +5.0 | MULTIPOLYGON(((5 0,0 0,0 1,5 1,5 0))) diff --git a/tests/scenes/data/poly-areas.osm b/tests/scenes/data/poly-areas.osm new file mode 100644 index 00000000..917946b7 --- /dev/null +++ b/tests/scenes/data/poly-areas.osm @@ -0,0 +1,168 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/scenes/data/road-with-alley.wkt b/tests/scenes/data/road-with-alley.wkt new file mode 100644 index 00000000..100b3733 --- /dev/null +++ b/tests/scenes/data/road-with-alley.wkt @@ -0,0 +1,6 @@ +n-main-east | POINT(1.0024481 2.0003542) +n-main-west | POINT(1.001552 2.0002662) +n-alley | POINT(1.0019235 2.0005463) +n-corner | POINT(1.0019235 2.0003542) +w-alley | LINESTRING(1.0019594 2.0003086,1.0019594 2.0005756) +w-main | LINESTRING(1.0013435 2.0003118,1.0016759 2.0003053,1.0019594 2.0003086,1.0021255 2.0003151,1.0023699 2.0003118,1.0026078 2.0002988) diff --git a/tests/scenes/data/roads-with-pois.wkt b/tests/scenes/data/roads-with-pois.wkt new file mode 100644 index 00000000..d4addbb8 --- /dev/null +++ b/tests/scenes/data/roads-with-pois.wkt @@ -0,0 +1,6 @@ +p-N2 | POINT(1.0003904 2.0003399) +p-S1 | POINT(1.0008104 2.0002927) +p-N1 | POINT(1.0005321 2.0005288) +p-S2 | POINT(1.0006398 2.0001064) +w-north | LINESTRING(1.0001174 2.0004055,1.0004298 2.0003976,1.0006608 2.0004579,1.0010624 2.0005419) +w-south | LINESTRING(1.0001384 2.0001903,1.0007212 2.0001982,1.0010677 2.0002192) diff --git a/tests/scenes/data/roads.osm b/tests/scenes/data/roads.osm new file mode 100644 index 00000000..b642fa62 --- /dev/null +++ b/tests/scenes/data/roads.osm @@ -0,0 +1,300 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/scenes/data/split-road.wkt b/tests/scenes/data/split-road.wkt new file mode 100644 index 00000000..4435a131 --- /dev/null +++ b/tests/scenes/data/split-road.wkt @@ -0,0 +1,6 @@ +w-5 | LINESTRING(1.0056855 2.0005616,1.0056087 2.0005669,1.0055106 2.0005245) +w-4a | LINESTRING(1.0062843 2.0005139,1.0061359 2.0004954,1.0060538 2.0005113,1.0059107 2.000506,1.0057358 2.0005007,1.0056855 2.0005616) +w-3 | LINESTRING(1.0061995 2.0003391,1.0062816 2.0002624,1.0063585 2.0002968,1.0063717 2.0004715,1.0062843 2.0005139) +w-2 | LINESTRING(1.0057941 2.0002809,1.0058869 2.0003259,1.0060034 2.0003497,1.0061041 2.0003577,1.0061995 2.0003391) +w-4b | LINESTRING(1.0062843 2.0005139,1.0061306 2.0005324,1.0060511 2.000543,1.0058975 2.000543,1.0057491 2.0005351,1.0056855 2.0005616) +w-1 | LINESTRING(1.0054709 2.0003603,1.0056352 2.0002782,1.0057941 2.0002809) diff --git a/tests/steps/api_result.py b/tests/steps/api_result.py new file mode 100644 index 00000000..d83aa83d --- /dev/null +++ b/tests/steps/api_result.py @@ -0,0 +1,232 @@ +""" Steps for checking the results of queries. +""" + +from nose.tools import * +from lettuce import * +from tidylib import tidy_document +from collections import OrderedDict +import json +import logging +import re +from xml.dom.minidom import parseString + +logger = logging.getLogger(__name__) + +def _parse_xml(): + """ Puts the DOM structure into more convenient python + with a similar structure as the json document, so + that the same the semantics can be used. It does not + check if the content is valid (or at least not more than + necessary to transform it into a dict structure). + """ + page = parseString(world.page).documentElement + + # header info + world.result_header = OrderedDict(page.attributes.items()) + logger.debug('Result header: %r' % (world.result_header)) + world.results = [] + + # results + if page.nodeName == 'searchresults': + for node in page.childNodes: + if node.nodeName != "#text": + assert_equals(node.nodeName, 'place', msg="Unexpected element '%s'" % node.nodeName) + newresult = OrderedDict(node.attributes.items()) + assert_not_in('address', newresult) + assert_not_in('geokml', newresult) + address = OrderedDict() + for sub in node.childNodes: + if sub.nodeName == 'geokml': + newresult['geokml'] = sub.childNodes[0].toxml() + elif sub.nodeName == '#text': + pass + else: + address[sub.nodeName] = sub.firstChild.nodeValue.strip() + if address: + newresult['address'] = address + world.results.append(newresult) + elif page.nodeName == 'reversegeocode': + haserror = False + address = {} + for node in page.childNodes: + if node.nodeName == 'result': + assert_equals(len(world.results), 0) + assert (not haserror) + world.results.append(OrderedDict(node.attributes.items())) + assert_not_in('display_name', world.results[0]) + assert_not_in('address', world.results[0]) + world.results[0]['display_name'] = node.firstChild.nodeValue.strip() + elif node.nodeName == 'error': + assert_equals(len(world.results), 0) + haserror = True + elif node.nodeName == 'addressparts': + assert (not haserror) + address = OrderedDict() + for sub in node.childNodes: + address[sub.nodeName] = sub.firstChild.nodeValue.strip() + world.results[0]['address'] = address + elif node.nodeName == "#text": + pass + else: + assert False, "Unknown content '%s' in XML" % node.nodeName + else: + assert False, "Unknown document node name %s in XML" % page.nodeName + + logger.debug("The following was parsed out of XML:") + logger.debug(world.results) + +@step(u'a HTTP (\d+) is returned') +def api_result_http_error(step, error): + assert_equals(world.returncode, int(error)) + +@step(u'the result is valid( \w+)?') +def api_result_is_valid(step, fmt): + assert_equals(world.returncode, 200) + + if world.response_format == 'html': + document, errors = tidy_document(world.page, + options={'char-encoding' : 'utf8'}) + assert(len(errors) == 0), "Errors found in HTML document:\n%s" % errors + world.results = document + elif world.response_format == 'xml': + _parse_xml() + elif world.response_format == 'json': + world.results = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(world.page) + else: + assert False, "Unknown page format: %s" % (world.response_format) + + if fmt: + assert_equals (fmt.strip(), world.response_format) + + +def compare(operator, op1, op2): + if operator == 'less than': + return op1 < op2 + elif operator == 'more than': + return op1 > op2 + elif operator == 'exactly': + return op1 == op2 + elif operator == 'at least': + return op1 >= op2 + elif operator == 'at most': + return op1 <= op2 + else: + raise Exception("unknown operator '%s'" % operator) + +@step(u'(less than|more than|exactly|at least|at most) (\d+) results? (?:is|are) returned') +def validate_result_number(step, operator, number): + step.given('the result is valid') + numres = len(world.results) + assert compare(operator, numres, int(number)), \ + "Bad number of results: expected %s %s, got %d." % (operator, number, numres) + +@step(u'result (\d+) has( not)? attributes (\S+)') +def search_check_for_result_attribute(step, num, invalid, attrs): + num = int(num) + step.given('at least %d results are returned' % (num + 1)) + res = world.results[num] + for attr in attrs.split(','): + if invalid: + assert_not_in(attr.strip(), res) + else: + assert_in(attr.strip(),res) + +@step(u'there is a json wrapper "([^"]*)"') +def api_result_check_json_wrapper(step, wrapper): + step.given('the result is valid json') + assert_equals(world.json_callback, wrapper) + +@step(u'result header contains') +def api_result_header_contains(step): + step.given('the result is valid') + for line in step.hashes: + assert_in(line['attr'], world.result_header) + m = re.match("%s$" % (line['value'],), world.result_header[line['attr']]) + +@step(u'results contain$') +def api_result_contains(step): + step.given('at least 1 result is returned') + for line in step.hashes: + if 'ID' in line: + reslist = (world.results[int(line['ID'])],) + else: + reslist = world.results + for k,v in line.iteritems(): + if k == 'latlon': + for curres in reslist: + world.match_geometry((float(curres['lat']), float(curres['lon'])), v) + elif k != 'ID': + for curres in reslist: + assert_in(k, curres) + if v[0] in '<>=': + # mathematical operation + evalexp = '%s %s' % (curres[k], v) + res = eval(evalexp) + logger.debug('Evaluating: %s = %s' % (res, evalexp)) + assert_true(res, "Evaluation failed: %s" % (evalexp, )) + else: + # regex match + m = re.match("%s$" % (v,), curres[k]) + assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k])) + + +@step(u'result addresses contain$') +def api_result_address_contains(step): + step.given('the result is valid') + for line in step.hashes: + if 'ID' in line: + reslist = (world.results[int(line['ID'])],) + else: + reslist = world.results + for k,v in line.iteritems(): + if k != 'ID': + for res in reslist: + curres = res['address'] + assert_in(k, curres) + m = re.match("%s$" % (v,), curres[k]) + assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k])) + + +@step(u'address of result (\d+) contains') +def api_result_address_exact(step, resid): + resid = int(resid) + step.given('at least %d results are returned' % (resid + 1)) + addr = world.results[resid]['address'] + for line in step.hashes: + assert_in(line['type'], addr) + assert_equals(line['value'], addr[line['type']]) + +@step(u'address of result (\d+) does not contain (.*)') +def api_result_address_details_missing(step, resid, types): + resid = int(resid) + step.given('at least %d results are returned' % (resid + 1)) + addr = world.results[resid]['address'] + for t in types.split(','): + assert_not_in(t.strip(), addr) + + +@step(u'address of result (\d+) is') +def api_result_address_exact(step, resid): + resid = int(resid) + step.given('at least %d results are returned' % (resid + 1)) + result = world.results[resid] + linenr = 0 + assert_equals(len(step.hashes), len(result['address'])) + for k,v in result['address'].iteritems(): + assert_equals(step.hashes[linenr]['type'], k) + assert_equals(step.hashes[linenr]['value'], v) + linenr += 1 + + +@step('there are( no)? duplicates') +def api_result_check_for_duplicates(step, nodups=None): + step.given('at least 1 result is returned') + resarr = [] + for res in world.results: + resarr.append((res['osm_type'], res['class'], + res['type'], res['display_name'])) + + if nodups is None: + assert len(resarr) > len(set(resarr)) + else: + assert_equal(len(resarr), len(set(resarr))) diff --git a/tests/steps/api_setup.py b/tests/steps/api_setup.py new file mode 100644 index 00000000..9accf420 --- /dev/null +++ b/tests/steps/api_setup.py @@ -0,0 +1,114 @@ +""" Steps for setting up and sending API requests. +""" + +from nose.tools import * +from lettuce import * +import urllib +import urllib2 +import logging + +logger = logging.getLogger(__name__) + +def api_call(requesttype): + world.json_callback = None + data = urllib.urlencode(world.params) + url = "%s/%s?%s" % (world.config.base_url, requesttype, data) + req = urllib2.Request(url=url, headers=world.header) + try: + fd = urllib2.urlopen(req) + world.page = fd.read() + world.returncode = 200 + except urllib2.HTTPError, ex: + world.returncode = ex.code + world.page = None + return + + pageinfo = fd.info() + assert_equal('utf-8', pageinfo.getparam('charset').lower()) + pagetype = pageinfo.gettype() + + fmt = world.params.get('format') + if fmt == 'html': + assert_equals('text/html', pagetype) + world.response_format = fmt + elif fmt == 'xml': + assert_equals('text/xml', pagetype) + world.response_format = fmt + elif fmt in ('json', 'jsonv2'): + if 'json_callback' in world.params: + world.json_callback = world.params['json_callback'] + assert world.page.startswith(world.json_callback + '(') + assert world.page.endswith(')') + world.page = world.page[(len(world.json_callback)+1):-1] + assert_equals('application/javascript', pagetype) + else: + assert_equals('application/json', pagetype) + world.response_format = 'json' + else: + if requesttype == 'reverse': + assert_equals('text/xml', pagetype) + world.response_format = 'xml' + else: + assert_equals('text/html', pagetype) + world.response_format = 'html' + logger.debug("Page received (%s):" % world.response_format) + logger.debug(world.page) + + api_setup_prepare_params(None) + +@before.each_scenario +def api_setup_prepare_params(scenario): + world.results = [] + world.params = {} + world.header = {} + +@step(u'the request parameters$') +def api_setup_parameters(step): + """Define the parameters of the request as a hash. + Resets parameter list. + """ + world.params = step.hashes[0] + +@step(u'the HTTP header$') +def api_setup_parameters(step): + """Define additional HTTP header parameters as a hash. + Resets parameter list. + """ + world.header = step.hashes[0] + + +@step(u'sending( \w+)? search query "([^"]*)"( with address)?') +def api_setup_search(step, fmt, query, doaddr): + world.params['q'] = query.encode('utf8') + if doaddr: + world.params['addressdetails'] = 1 + if fmt: + world.params['format'] = fmt.strip() + api_call('search') + +@step(u'sending( \w+)? structured query( with address)?$') +def api_setup_structured_search(step, fmt, doaddr): + world.params.update(step.hashes[0]) + if doaddr: + world.params['addressdetails'] = 1 + if fmt: + world.params['format'] = fmt.strip() + api_call('search') + +@step(u'looking up (\w+ )?coordinates ([-\d.]+),([-\d.]+)') +def api_setup_reverse(step, fmt, lat, lon): + world.params['lat'] = lat + world.params['lon'] = lon + if fmt and fmt.strip(): + world.params['format'] = fmt.strip() + api_call('reverse') + +@step(u'looking up details for ([NRW]?\d+)') +def api_setup_details(step, obj): + if obj[0] in ('N', 'R', 'W'): + # an osm id + world.params['osmtype'] = obj[0] + world.params['osmid'] = obj[1:] + else: + world.params['place_id'] = obj + api_call('details') diff --git a/tests/steps/db_results.py b/tests/steps/db_results.py new file mode 100644 index 00000000..9da1ad6b --- /dev/null +++ b/tests/steps/db_results.py @@ -0,0 +1,124 @@ +""" Steps for checking the DB after import and update tests. + + There are two groups of test here. The first group tests + the contents of db tables directly, the second checks + query results by using the command line query tool. +""" + +from nose.tools import * +from lettuce import * +import psycopg2 +import psycopg2.extensions +import psycopg2.extras +import os +import subprocess +import random +import json +import re +import logging +from collections import OrderedDict + +logger = logging.getLogger(__name__) + +@step(u'table placex contains as names for (N|R|W)(\d+)') +def check_placex_names(step, osmtyp, osmid): + """ Check for the exact content of the name hstaore in placex. + """ + cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) + cur.execute('SELECT name FROM placex where osm_type = %s and osm_id =%s', (osmtyp, int(osmid))) + for line in cur: + names = dict(line['name']) + for name in step.hashes: + assert_in(name['k'], names) + assert_equals(names[name['k']], name['v']) + del names[name['k']] + assert_equals(len(names), 0) + + + +@step(u'table ([a-z_]+) contains$') +def check_placex_content(step, tablename): + """ check that the given lines are in the given table + Entries are searched by osm_type/osm_id and then all + given columns are tested. If there is more than one + line for an OSM object, they must match in these columns. + """ + cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) + for line in step.hashes: + osmtype, osmid, cls = world.split_id(line['object']) + q = 'SELECT *' + if tablename == 'placex': + q = q + ", ST_X(centroid) as clat, ST_Y(centroid) as clon" + q = q + ' FROM %s where osm_type = %%s and osm_id = %%s' % (tablename,) + if cls is None: + params = (osmtype, osmid) + else: + q = q + ' and class = %s' + params = (osmtype, osmid, cls) + cur.execute(q, params) + assert(cur.rowcount > 0) + for res in cur: + for k,v in line.iteritems(): + if not k == 'object': + assert_in(k, res) + if type(res[k]) is dict: + val = world.make_hash(v) + assert_equals(res[k], val) + elif k in ('parent_place_id', 'linked_place_id'): + pid = world.get_placeid(v) + assert_equals(pid, res[k], "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, pid, res[k])) + elif k == 'centroid': + world.match_geometry((res['clat'], res['clon']), v) + else: + assert_equals(str(res[k]), v, "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, str(res[k]), v)) + +@step(u'table (placex?) has no entry for (N|R|W)(\d+)(:\w+)?') +def check_placex_missing(step, tablename, osmtyp, osmid, placeclass): + cur = world.conn.cursor() + q = 'SELECT count(*) FROM %s where osm_type = %%s and osm_id = %%s' % (tablename, ) + args = [osmtyp, int(osmid)] + if placeclass is not None: + q = q + ' and class = %s' + args.append(placeclass[1:]) + cur.execute(q, args) + numres = cur.fetchone()[0] + assert_equals (numres, 0) + +@step(u'search_name table contains$') +def check_search_name_content(step): + cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) + for line in step.hashes: + placeid = world.get_placeid(line['place_id']) + cur.execute('SELECT * FROM search_name WHERE place_id = %s', (placeid,)) + assert(cur.rowcount > 0) + for res in cur: + for k,v in line.iteritems(): + if k in ('search_rank', 'address_rank'): + assert_equals(int(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k])) + elif k in ('importance'): + assert_equals(float(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k])) + elif k in ('name_vector', 'nameaddress_vector'): + terms = [x.strip().replace('#', ' ') for x in v.split(',')] + cur.execute('SELECT word_id, word_token FROM word, (SELECT unnest(%s) as term) t WHERE word_token = make_standard_name(t.term)', (terms,)) + assert cur.rowcount >= len(terms) + for wid in cur: + assert_in(wid['word_id'], res[k], "Missing term for %s/%s: %s" % (line['place_id'], k, wid['word_token'])) + elif k in ('country_code'): + assert_equals(v, res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k])) + elif k == 'place_id': + pass + else: + raise Exception("Cannot handle field %s in search_name table" % (k, )) + + +@step(u'table search_name has no entry for (.*)') +def check_placex_missing(step, osmid): + """ Checks if there is an entry in the search index for the + given place object. + """ + cur = world.conn.cursor() + placeid = world.get_placeid(osmid) + cur.execute('SELECT count(*) FROM search_name WHERE place_id =%s', (placeid,)) + numres = cur.fetchone()[0] + assert_equals (numres, 0) + diff --git a/tests/steps/db_setup.py b/tests/steps/db_setup.py new file mode 100644 index 00000000..7d1ea8d3 --- /dev/null +++ b/tests/steps/db_setup.py @@ -0,0 +1,272 @@ +""" Steps for setting up a test database with imports and updates. + + There are two ways to state geometries for test data: with coordinates + and via scenes. + + Coordinates should be given as a wkt without the enclosing type name. + + Scenes are prepared geometries which can be found in the scenes/data/ + directory. Each scene is saved in a .wkt file with its name, which + contains a list of id/wkt pairs. A scene can be set globally + for a scene by using the step `the scene `. Then each + object should be refered to as `:`. A geometry can also + be referred to without loading the scene by explicitly stating the + scene: `:`. +""" + +from nose.tools import * +from lettuce import * +import psycopg2 +import psycopg2.extensions +import psycopg2.extras +import os +import subprocess +import random +import base64 + +psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) + +@before.each_scenario +def setup_test_database(scenario): + """ Creates a new test database from the template database + that was set up earlier in terrain.py. Will be done only + for scenarios whose feature is tagged with 'DB'. + """ + if scenario.feature.tags is not None and 'DB' in scenario.feature.tags: + world.db_template_setup() + world.write_nominatim_config(world.config.test_db) + conn = psycopg2.connect(database=world.config.template_db) + conn.set_isolation_level(0) + cur = conn.cursor() + cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, )) + cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db)) + conn.close() + world.conn = psycopg2.connect(database=world.config.test_db) + psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True) + +@step('a wiped database') +def db_setup_wipe_db(step): + """Explicit DB scenario setup only needed + to work around a bug where scenario outlines don't call + before_each_scenario correctly. + """ + if hasattr(world, 'conn'): + world.conn.close() + conn = psycopg2.connect(database=world.config.template_db) + conn.set_isolation_level(0) + cur = conn.cursor() + cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, )) + cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db)) + conn.close() + world.conn = psycopg2.connect(database=world.config.test_db) + psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True) + + +@after.each_scenario +def tear_down_test_database(scenario): + """ Drops any previously created test database. + """ + if hasattr(world, 'conn'): + world.conn.close() + if scenario.feature.tags is not None and 'DB' in scenario.feature.tags and not world.config.keep_scenario_db: + conn = psycopg2.connect(database=world.config.template_db) + conn.set_isolation_level(0) + cur = conn.cursor() + cur.execute('DROP DATABASE %s' % (world.config.test_db,)) + conn.close() + + +def _format_placex_cols(cols, geomtype, force_name): + if 'name' in cols: + if cols['name'].startswith("'"): + cols['name'] = world.make_hash(cols['name']) + else: + cols['name'] = { 'name' : cols['name'] } + elif force_name: + cols['name'] = { 'name' : base64.urlsafe_b64encode(os.urandom(int(random.random()*30))) } + if 'extratags' in cols: + cols['extratags'] = world.make_hash(cols['extratags']) + if 'admin_level' not in cols: + cols['admin_level'] = 100 + if 'geometry' in cols: + coords = world.get_scene_geometry(cols['geometry']) + if coords is None: + coords = "'%s(%s)'::geometry" % (geomtype, cols['geometry']) + else: + coords = "'%s'::geometry" % coords.wkt + cols['geometry'] = coords + + +def _insert_place_table_nodes(places, force_name): + cur = world.conn.cursor() + for line in places: + cols = dict(line) + cols['osm_type'] = 'N' + _format_placex_cols(cols, 'POINT', force_name) + if 'geometry' in cols: + coords = cols.pop('geometry') + else: + coords = "ST_Point(%f, %f)" % (random.random()*360 - 180, random.random()*180 - 90) + + query = 'INSERT INTO place (%s,geometry) values(%s, ST_SetSRID(%s, 4326))' % ( + ','.join(cols.iterkeys()), + ','.join(['%s' for x in range(len(cols))]), + coords + ) + cur.execute(query, cols.values()) + world.conn.commit() + + +def _insert_place_table_objects(places, geomtype, force_name): + cur = world.conn.cursor() + for line in places: + cols = dict(line) + if 'osm_type' not in cols: + cols['osm_type'] = 'W' + _format_placex_cols(cols, geomtype, force_name) + coords = cols.pop('geometry') + + query = 'INSERT INTO place (%s, geometry) values(%s, ST_SetSRID(%s, 4326))' % ( + ','.join(cols.iterkeys()), + ','.join(['%s' for x in range(len(cols))]), + coords + ) + cur.execute(query, cols.values()) + world.conn.commit() + +@step(u'the scene (.*)') +def import_set_scene(step, scene): + world.load_scene(scene) + +@step(u'the (named )?place (node|way|area)s') +def import_place_table_nodes(step, named, osmtype): + """Insert a list of nodes into the placex table. + Expects a table where columns are named in the same way as placex. + """ + cur = world.conn.cursor() + cur.execute('ALTER TABLE place DISABLE TRIGGER place_before_insert') + if osmtype == 'node': + _insert_place_table_nodes(step.hashes, named is not None) + elif osmtype == 'way' : + _insert_place_table_objects(step.hashes, 'LINESTRING', named is not None) + elif osmtype == 'area' : + _insert_place_table_objects(step.hashes, 'POLYGON', named is not None) + cur.execute('ALTER TABLE place ENABLE TRIGGER place_before_insert') + cur.close() + world.conn.commit() + + +@step(u'the relations') +def import_fill_planet_osm_rels(step): + """Adds a raw relation to the osm2pgsql table. + Three columns need to be suplied: id, tags, members. + """ + cur = world.conn.cursor() + for line in step.hashes: + members = [] + parts = { 'n' : [], 'w' : [], 'r' : [] } + if line['members'].strip(): + for mem in line['members'].split(','): + memparts = mem.strip().split(':', 2) + memid = memparts[0].lower() + parts[memid[0]].append(int(memid[1:])) + members.append(memid) + if len(memparts) == 2: + members.append(memparts[1]) + else: + members.append('') + tags = [] + for k,v in world.make_hash(line['tags']).iteritems(): + tags.extend((k,v)) + if not members: + members = None + + cur.execute("""INSERT INTO planet_osm_rels + (id, way_off, rel_off, parts, members, tags, pending) + VALUES (%s, %s, %s, %s, %s, %s, false)""", + (line['id'], len(parts['n']), len(parts['n']) + len(parts['w']), + parts['n'] + parts['w'] + parts['r'], members, tags)) + world.conn.commit() + + +@step(u'the ways') +def import_fill_planet_osm_ways(step): + cur = world.conn.cursor() + for line in step.hashes: + if 'tags' in line: + tags = world.make_hash(line['tags']) + else: + tags = None + nodes = [int(x.strip()) for x in line['nodes'].split(',')] + + cur.execute("""INSERT INTO planet_osm_ways + (id, nodes, tags, pending) + VALUES (%s, %s, %s, false)""", + (line['id'], nodes, tags)) + world.conn.commit() + +############### import and update steps ####################################### + +@step(u'importing') +def import_database(step): + """ Runs the actual indexing. """ + world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions') + cur = world.conn.cursor() + cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level, + housenumber, street, addr_place, isin, postcode, country_code, extratags, + geometry) select * from place""") + world.conn.commit() + world.run_nominatim_script('setup', 'index', 'index-noanalyse') + #world.db_dump_table('placex') + + +@step(u'updating place (node|way|area)s') +def update_place_table_nodes(step, osmtype): + """ Replace a geometry in place by reinsertion and reindex database. + """ + world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates') + if osmtype == 'node': + _insert_place_table_nodes(step.hashes, False) + elif osmtype == 'way': + _insert_place_table_objects(step.hashes, 'LINESTRING', False) + elif osmtype == 'area': + _insert_place_table_objects(step.hashes, 'POLYGON', False) + world.run_nominatim_script('update', 'index') + +@step(u'marking for delete (.*)') +def update_delete_places(step, places): + """ Remove an entry from place and reindex database. + """ + world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates') + cur = world.conn.cursor() + for place in places.split(','): + osmtype, osmid, cls = world.split_id(place) + if cls is None: + q = "delete from place where osm_type = %s and osm_id = %s" + params = (osmtype, osmid) + else: + q = "delete from place where osm_type = %s and osm_id = %s and class = %s" + params = (osmtype, osmid, cls) + cur.execute(q, params) + world.conn.commit() + #world.db_dump_table('placex') + world.run_nominatim_script('update', 'index') + + + +@step(u'sending query "(.*)"( with dups)?$') +def query_cmd(step, query, with_dups): + """ Results in standard query output. The same tests as for API queries + can be used. + """ + cmd = [os.path.join(world.config.source_dir, 'utils', 'query.php'), + '--search', query] + if with_dups is not None: + cmd.append('--nodedupe') + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, err) = proc.communicate() + assert (proc.returncode == 0), "query.php failed with message: %s" % err + world.page = outp + world.response_format = 'json' + world.returncode = 200 + diff --git a/tests/steps/osm2pgsql_setup.py b/tests/steps/osm2pgsql_setup.py new file mode 100644 index 00000000..40a6b39e --- /dev/null +++ b/tests/steps/osm2pgsql_setup.py @@ -0,0 +1,212 @@ +""" Steps for setting up a test database for osm2pgsql import. + + Note that osm2pgsql features need a database and therefore need + to be tagged with @DB. +""" + +from nose.tools import * +from lettuce import * + +import logging +import random +import tempfile +import os +import subprocess + +logger = logging.getLogger(__name__) + +@before.each_scenario +def osm2pgsql_setup_test(scenario): + world.osm2pgsql = [] + +@step(u'the osm nodes:') +def osm2pgsql_import_nodes(step): + """ Define a list of OSM nodes to be imported, given as a table. + Each line describes one node with all its attributes. + 'id' is mendatory, all other fields are filled with random values + when not given. If 'tags' is missing an empty tag list is assumed. + For updates, a mandatory 'action' column needs to contain 'A' (add), + 'M' (modify), 'D' (delete). + """ + for line in step.hashes: + node = { 'type' : 'N', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z", + 'changeset' : "11470653", 'uid' : "122294", 'user' : "foo" + } + node.update(line) + node['id'] = int(node['id']) + if 'geometry' in node: + lat, lon = node['geometry'].split(' ') + node['lat'] = float(lat) + node['lon'] = float(lon) + else: + node['lon'] = random.random()*360 - 180 + node['lat'] = random.random()*180 - 90 + if 'tags' in node: + node['tags'] = world.make_hash(line['tags']) + else: + node['tags'] = {} + + world.osm2pgsql.append(node) + + +@step(u'the osm ways:') +def osm2pgsql_import_ways(step): + """ Define a list of OSM ways to be imported. + """ + for line in step.hashes: + way = { 'type' : 'W', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z", + 'changeset' : "11470653", 'uid' : "122294", 'user' : "foo" + } + way.update(line) + + way['id'] = int(way['id']) + if 'tags' in way: + way['tags'] = world.make_hash(line['tags']) + else: + way['tags'] = None + way['nodes'] = way['nodes'].strip().split() + + world.osm2pgsql.append(way) + +membertype = { 'N' : 'node', 'W' : 'way', 'R' : 'relation' } + +@step(u'the osm relations:') +def osm2pgsql_import_rels(step): + """ Define a list of OSM relation to be imported. + """ + for line in step.hashes: + rel = { 'type' : 'R', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z", + 'changeset' : "11470653", 'uid' : "122294", 'user' : "foo" + } + rel.update(line) + + rel['id'] = int(rel['id']) + if 'tags' in rel: + rel['tags'] = world.make_hash(line['tags']) + else: + rel['tags'] = {} + members = [] + if rel['members'].strip(): + for mem in line['members'].split(','): + memparts = mem.strip().split(':', 2) + memid = memparts[0].upper() + members.append((membertype[memid[0]], + memid[1:], + memparts[1] if len(memparts) == 2 else '' + )) + rel['members'] = members + + world.osm2pgsql.append(rel) + + + +def _sort_xml_entries(x, y): + if x['type'] == y['type']: + return cmp(x['id'], y['id']) + else: + return cmp('NWR'.find(x['type']), 'NWR'.find(y['type'])) + +def write_osm_obj(fd, obj): + if obj['type'] == 'N': + fd.write('\n') + else: + fd.write('>\n') + for k,v in obj['tags'].iteritems(): + fd.write(' \n' % (k, v)) + fd.write('\n') + elif obj['type'] == 'W': + fd.write('\n' % obj) + for nd in obj['nodes']: + fd.write('\n' % (nd,)) + for k,v in obj['tags'].iteritems(): + fd.write(' \n' % (k, v)) + fd.write('\n') + elif obj['type'] == 'R': + fd.write('\n' % obj) + for mem in obj['members']: + fd.write(' \n' % mem) + for k,v in obj['tags'].iteritems(): + fd.write(' \n' % (k, v)) + fd.write('\n') + +@step(u'loading osm data') +def osm2pgsql_load_place(step): + """Imports the previously defined OSM data into a fresh copy of a + Nominatim test database. + """ + + world.osm2pgsql.sort(cmp=_sort_xml_entries) + + # create a OSM file in /tmp + with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd: + fname = fd.name + fd.write("\n") + fd.write('\n') + fd.write('\t\n') + + for obj in world.osm2pgsql: + write_osm_obj(fd, obj) + + fd.write('\n') + + logger.debug( "Filename: %s" % fname) + + cmd = [os.path.join(world.config.source_dir, 'utils', 'setup.php')] + cmd.extend(['--osm-file', fname, '--import-data']) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, outerr) = proc.communicate() + assert (proc.returncode == 0), "OSM data import failed:\n%s\n%s\n" % (outp, outerr) + + ### reintroduce the triggers/indexes we've lost by having osm2pgsql set up place again + cur = world.conn.cursor() + cur.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place + FOR EACH ROW EXECUTE PROCEDURE place_delete()""") + cur.execute("""CREATE TRIGGER place_before_insert BEFORE INSERT ON place + FOR EACH ROW EXECUTE PROCEDURE place_insert()""") + cur.execute("""CREATE UNIQUE INDEX idx_place_osm_unique on place using btree(osm_id,osm_type,class,type)""") + world.conn.commit() + + + os.remove(fname) + world.osm2pgsql = [] + +actiontypes = { 'C' : 'create', 'M' : 'modify', 'D' : 'delete' } + +@step(u'updating osm data') +def osm2pgsql_update_place(step): + """Creates an osc file from the previously defined data and imports it + into the database. + """ + world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions') + cur = world.conn.cursor() + cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level, + housenumber, street, addr_place, isin, postcode, country_code, extratags, + geometry) select * from place""") + world.conn.commit() + world.run_nominatim_script('setup', 'index', 'index-noanalyse') + world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates') + + with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd: + fname = fd.name + fd.write("\n") + fd.write('\n') + + for obj in world.osm2pgsql: + fd.write('<%s>\n' % (actiontypes[obj['action']], )) + write_osm_obj(fd, obj) + fd.write('\n' % (actiontypes[obj['action']], )) + + fd.write('\n') + + logger.debug( "Filename: %s" % fname) + + cmd = [os.path.join(world.config.source_dir, 'utils', 'update.php')] + cmd.extend(['--import-diff', fname]) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (outp, outerr) = proc.communicate() + assert (proc.returncode == 0), "OSM data update failed:\n%s\n%s\n" % (outp, outerr) + + os.remove(fname) + world.osm2pgsql = [] diff --git a/tests/steps/terrain.py b/tests/steps/terrain.py new file mode 100644 index 00000000..52b7e31e --- /dev/null +++ b/tests/steps/terrain.py @@ -0,0 +1,246 @@ +from lettuce import * +from nose.tools import * +import logging +import os +import subprocess +import psycopg2 +import re +from haversine import haversine +from shapely.wkt import loads as wkt_load +from shapely.ops import linemerge + +logger = logging.getLogger(__name__) + +class NominatimConfig: + + def __init__(self): + # logging setup + loglevel = getattr(logging, os.environ.get('LOGLEVEL','info').upper()) + if 'LOGFILE' in os.environ: + logging.basicConfig(filename=os.environ.get('LOGFILE','run.log'), + level=loglevel) + else: + logging.basicConfig(level=loglevel) + # Nominatim test setup + self.base_url = os.environ.get('NOMINATIM_SERVER', 'http://localhost/nominatim') + self.source_dir = os.path.abspath(os.environ.get('NOMINATIM_DIR', '../Nominatim')) + self.template_db = os.environ.get('TEMPLATE_DB', 'test_template_nominatim') + self.test_db = os.environ.get('TEST_DB', 'test_nominatim') + self.local_settings_file = os.environ.get('NOMINATIM_SETTINGS', '/tmp/nominatim_settings.php') + self.reuse_template = 'NOMINATIM_REUSE_TEMPLATE' in os.environ + self.keep_scenario_db = 'NOMINATIM_KEEP_SCENARIO_DB' in os.environ + os.environ['NOMINATIM_SETTINGS'] = '/tmp/nominatim_settings.php' + + scriptpath = os.path.dirname(os.path.abspath(__file__)) + self.scene_path = os.environ.get('SCENE_PATH', + os.path.join(scriptpath, '..', 'scenes', 'data')) + + + def __str__(self): + return 'Server URL: %s\nSource dir: %s\n' % (self.base_url, self.source_dir) + +world.config = NominatimConfig() + +@world.absorb +def write_nominatim_config(dbname): + f = open(world.config.local_settings_file, 'w') + f.write("[:class]. + """ + oid = oid.strip() + if oid == 'None': + return None, None, None + osmtype = oid[0] + assert_in(osmtype, ('R','N','W')) + if ':' in oid: + osmid, cls = oid[1:].split(':') + return (osmtype, int(osmid), cls) + else: + return (osmtype, int(oid[1:]), None) + +@world.absorb +def get_placeid(oid): + """ Tries to retrive the place_id for a unique identifier. """ + if oid[0].isdigit(): + return int(oid) + + osmtype, osmid, cls = world.split_id(oid) + if osmtype is None: + return None + cur = world.conn.cursor() + if cls is None: + q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s' + params = (osmtype, osmid) + else: + q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s and class = %s' + params = (osmtype, osmid, cls) + cur.execute(q, params) + assert_equals (cur.rowcount, 1) + return cur.fetchone()[0] + + +@world.absorb +def match_geometry(coord, matchstring): + m = re.match(r'([-0-9.]+),\s*([-0-9.]+)\s*(?:\+-([0-9.]+)([a-z]+)?)?', matchstring) + assert_is_not_none(m, "Invalid match string") + + logger.debug("Distmatch: %s/%s %s %s" % (m.group(1), m.group(2), m.group(3), m.group(4) )) + dist = haversine(coord, (float(m.group(1)), float(m.group(2)))) + + if m.group(3) is not None: + expdist = float(m.group(3)) + if m.group(4) is not None: + if m.group(4) == 'm': + expdist = expdist/1000 + elif m.group(4) == 'km': + pass + else: + raise Exception("Unknown unit '%s' in geometry match" % (m.group(4), )) + else: + expdist = 0 + + logger.debug("Distances expected: %f, got: %f" % (expdist, dist)) + assert dist <= expdist, "Geometry too far away, expected: %f, got: %f" % (expdist, dist) + + + +@world.absorb +def db_dump_table(table): + cur = world.conn.cursor() + cur.execute('SELECT * FROM %s' % table) + print '<<<<<<< BEGIN OF TABLE DUMP %s' % table + for res in cur: + print res + print '<<<<<<< END OF TABLE DUMP %s' % table + +@world.absorb +def db_drop_database(name): + conn = psycopg2.connect(database='postgres') + conn.set_isolation_level(0) + cur = conn.cursor() + cur.execute('DROP DATABASE IF EXISTS %s' % (name, )) + conn.close() + + +world.is_template_set_up = False + +@world.absorb +def db_template_setup(): + """ Set up a template database, containing all tables + but not yet any functions. + """ + if world.is_template_set_up: + return + + world.is_template_set_up = True + world.write_nominatim_config(world.config.template_db) + if world.config.reuse_template: + # check that the template is there + conn = psycopg2.connect(database='postgres') + cur = conn.cursor() + cur.execute('select count(*) from pg_database where datname = %s', + (world.config.template_db,)) + if cur.fetchone()[0] == 1: + return + else: + # just in case... make sure a previous table has been dropped + world.db_drop_database(world.config.template_db) + # call the first part of database setup + world.run_nominatim_script('setup', 'create-db', 'setup-db') + # remove external data to speed up indexing for tests + conn = psycopg2.connect(database=world.config.template_db) + psycopg2.extras.register_hstore(conn, globally=False, unicode=True) + cur = conn.cursor() + for table in ('gb_postcode', 'us_postcode', 'us_state', 'us_statecounty'): + cur.execute('TRUNCATE TABLE %s' % (table,)) + conn.commit() + conn.close() + # execute osm2pgsql on an empty file to get the right tables + osm2pgsql = os.path.join(world.config.source_dir, 'osm2pgsql', 'osm2pgsql') + proc = subprocess.Popen([osm2pgsql, '-lsc', '-O', 'gazetteer', '-d', world.config.template_db, '-'], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + [outstr, errstr] = proc.communicate(input='') + world.run_nominatim_script('setup', 'create-functions', 'create-tables', 'create-partition-tables', 'create-partition-functions', 'load-data', 'create-search-indices') + + +# Leave the table around so it can be reused again after a non-reuse test round. +#@after.all +def db_template_teardown(total): + """ Set up a template database, containing all tables + but not yet any functions. + """ + if world.is_template_set_up: + # remove template DB + if not world.config.reuse_template: + world.db_drop_database(world.config.template_db) + try: + os.remove(world.config.local_settings_file) + except OSError: + pass # ignore missing file + + +########################################################################## +# +# Data scene handling +# + +world.scenes = {} +world.current_scene = None + +@world.absorb +def load_scene(name): + if name in world.scenes: + world.current_scene = world.scenes[name] + else: + with open(os.path.join(world.config.scene_path, "%s.wkt" % name), 'r') as fd: + scene = {} + for line in fd: + if line.strip(): + obj, wkt = line.split('|', 2) + wkt = wkt.strip() + scene[obj.strip()] = wkt_load(wkt) + world.scenes[name] = scene + world.current_scene = scene + +@world.absorb +def get_scene_geometry(name): + if not ':' in name: + # Not a scene description + return None + + geoms = [] + for obj in name.split('+'): + oname = obj.strip() + if oname.startswith(':'): + geoms.append(world.current_scene[oname[1:]]) + else: + scene, obj = oname.split(':', 2) + oldscene = world.current_scene + world.load_scene(scene) + wkt = world.current_scene[obj] + world.current_scene = oldscene + geoms.append(wkt) + + if len(geoms) == 1: + return geoms[0] + else: + return linemerge(geoms) -- 2.45.2