From: Sarah Hoffmann Date: Wed, 23 Aug 2023 21:41:30 +0000 (+0200) Subject: Merge pull request #3139 from mtmail/update-search-examples X-Git-Tag: v4.3.0~19 X-Git-Url: https://git.openstreetmap.org/nominatim.git/commitdiff_plain/c5836c80905200fb7a857f3704d441c9992b69f6?hp=5683f556467650ef06949187fc2822c3658b97c5 Merge pull request #3139 from mtmail/update-search-examples Search.md - update XML and JSON example output --- diff --git a/.github/actions/build-nominatim/action.yml b/.github/actions/build-nominatim/action.yml index 281b5128..70392d79 100644 --- a/.github/actions/build-nominatim/action.yml +++ b/.github/actions/build-nominatim/action.yml @@ -25,7 +25,7 @@ runs: shell: bash - name: Install${{ matrix.flavour }} prerequisites run: | - sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson + sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson nlohmann-json3-dev if [ "$FLAVOUR" == "oldstuff" ]; then pip3 install MarkupSafe==2.0.1 python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 sqlalchemy==1.4.31 datrie asyncpg else diff --git a/docs/admin/Installation.md b/docs/admin/Installation.md index d85359fa..abcd89d8 100644 --- a/docs/admin/Installation.md +++ b/docs/admin/Installation.md @@ -35,6 +35,7 @@ For compiling: * [bzip2](http://www.bzip.org/) * [zlib](https://www.zlib.net/) * [ICU](http://site.icu-project.org/) + * [nlohmann/json](https://json.nlohmann.me/) * [Boost libraries](https://www.boost.org/), including system and filesystem * PostgreSQL client libraries * a recent C++ compiler (gcc 5+ or Clang 3.8+) diff --git a/lib-php/website/details.php b/lib-php/website/details.php index 99307bbd..98fb6ef7 100644 --- a/lib-php/website/details.php +++ b/lib-php/website/details.php @@ -23,7 +23,7 @@ $aLangPrefOrder = $oParams->getPreferredLanguages(); $sPlaceId = $oParams->getString('place_id'); $sOsmType = $oParams->getSet('osmtype', array('N', 'W', 'R')); -$iOsmId = $oParams->getInt('osmid', -1); +$iOsmId = $oParams->getInt('osmid', 0); $sClass = $oParams->getString('class'); $bIncludeKeywords = $oParams->getBool('keywords', false); @@ -38,7 +38,7 @@ $oDB->connect(); $sLanguagePrefArraySQL = $oDB->getArraySQL($oDB->getDBQuotedList($aLangPrefOrder)); -if ($sOsmType && $iOsmId > 0) { +if ($sOsmType && $iOsmId !== 0) { $sSQL = 'SELECT place_id FROM placex WHERE osm_type = :type AND osm_id = :id'; $aSQLParams = array(':type' => $sOsmType, ':id' => $iOsmId); // osm_type and osm_id are not unique enough diff --git a/lib-sql/functions/utils.sql b/lib-sql/functions/utils.sql index f5be7b61..b2771ba1 100644 --- a/lib-sql/functions/utils.sql +++ b/lib-sql/functions/utils.sql @@ -273,8 +273,8 @@ BEGIN END IF; RETURN ST_Envelope(ST_Collect( - ST_Project(geom, radius, 0.785398)::geometry, - ST_Project(geom, radius, 3.9269908)::geometry)); + ST_Project(geom::geography, radius, 0.785398)::geometry, + ST_Project(geom::geography, radius, 3.9269908)::geometry)); END; $$ LANGUAGE plpgsql IMMUTABLE; diff --git a/nominatim/api/connection.py b/nominatim/api/connection.py index 72cabf78..bf217314 100644 --- a/nominatim/api/connection.py +++ b/nominatim/api/connection.py @@ -7,7 +7,8 @@ """ Extended SQLAlchemy connection class that also includes access to the schema. """ -from typing import cast, Any, Mapping, Sequence, Union, Dict, Optional, Set +from typing import cast, Any, Mapping, Sequence, Union, Dict, Optional, Set, \ + Awaitable, Callable, TypeVar import sqlalchemy as sa from sqlalchemy.ext.asyncio import AsyncConnection @@ -17,6 +18,8 @@ from nominatim.db.sqlalchemy_schema import SearchTables from nominatim.db.sqlalchemy_types import Geometry from nominatim.api.logging import log +T = TypeVar('T') + class SearchConnection: """ An extended SQLAlchemy connection class, that also contains then table definitions. The underlying asynchronous SQLAlchemy @@ -61,11 +64,10 @@ class SearchConnection: Raises a ValueError if the property does not exist. """ - if name.startswith('DB:'): - raise ValueError(f"Illegal property value '{name}'.") + lookup_name = f'DBPROP:{name}' - if cached and name in self._property_cache: - return cast(str, self._property_cache[name]) + if cached and lookup_name in self._property_cache: + return cast(str, self._property_cache[lookup_name]) sql = sa.select(self.t.properties.c.value)\ .where(self.t.properties.c.property == name) @@ -74,7 +76,7 @@ class SearchConnection: if value is None: raise ValueError(f"Property '{name}' not found in database.") - self._property_cache[name] = cast(str, value) + self._property_cache[lookup_name] = cast(str, value) return cast(str, value) @@ -92,6 +94,29 @@ class SearchConnection: return self._property_cache['DB:server_version'] + async def get_cached_value(self, group: str, name: str, + factory: Callable[[], Awaitable[T]]) -> T: + """ Access the cache for this Nominatim instance. + Each cache value needs to belong to a group and have a name. + This function is for internal API use only. + + `factory` is an async callback function that produces + the value if it is not already cached. + + Returns the cached value or the result of factory (also caching + the result). + """ + full_name = f'{group}:{name}' + + if full_name in self._property_cache: + return cast(T, self._property_cache[full_name]) + + value = await factory() + self._property_cache[full_name] = value + + return value + + async def get_class_table(self, cls: str, typ: str) -> Optional[SaFromClause]: """ Lookup up if there is a classtype table for the given category and return a SQLAlchemy table for it, if it exists. diff --git a/nominatim/api/lookup.py b/nominatim/api/lookup.py index 0e1fd9ce..a46cdb69 100644 --- a/nominatim/api/lookup.py +++ b/nominatim/api/lookup.py @@ -207,16 +207,16 @@ async def get_simple_place(conn: SearchConnection, place: ntyp.PlaceRef, out = [] if details.geometry_simplification > 0.0: - col = col.ST_SimplifyPreserveTopology(details.geometry_simplification) + col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification) if details.geometry_output & ntyp.GeometryFormat.GEOJSON: - out.append(col.ST_AsGeoJSON().label('geometry_geojson')) + out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson')) if details.geometry_output & ntyp.GeometryFormat.TEXT: - out.append(col.ST_AsText().label('geometry_text')) + out.append(sa.func.ST_AsText(col).label('geometry_text')) if details.geometry_output & ntyp.GeometryFormat.KML: - out.append(col.ST_AsKML().label('geometry_kml')) + out.append(sa.func.ST_AsKML(col).label('geometry_kml')) if details.geometry_output & ntyp.GeometryFormat.SVG: - out.append(col.ST_AsSVG().label('geometry_svg')) + out.append(sa.func.ST_AsSVG(col).label('geometry_svg')) return sql.add_columns(*out) diff --git a/nominatim/api/reverse.py b/nominatim/api/reverse.py index 4a5f9c03..63836b49 100644 --- a/nominatim/api/reverse.py +++ b/nominatim/api/reverse.py @@ -87,7 +87,7 @@ def _locate_interpolation(table: SaFromClause) -> SaLabel: def _is_address_point(table: SaFromClause) -> SaColumn: return sa.and_(table.c.rank_address == 30, sa.or_(table.c.housenumber != None, - table.c.name.has_key('housename'))) + table.c.name.has_key('addr:housename'))) def _get_closest(*rows: Optional[SaRow]) -> Optional[SaRow]: diff --git a/nominatim/api/search/db_search_builder.py b/nominatim/api/search/db_search_builder.py index 8dd435d0..377c4be7 100644 --- a/nominatim/api/search/db_search_builder.py +++ b/nominatim/api/search/db_search_builder.py @@ -111,9 +111,11 @@ class SearchBuilder: penalty = min(categories.penalties) categories.penalties = [p - penalty for p in categories.penalties] for search in builder: - yield dbs.NearSearch(penalty, categories, search) + yield dbs.NearSearch(penalty + assignment.penalty, categories, search) else: - yield from builder + for search in builder: + search.penalty += assignment.penalty + yield search def build_poi_search(self, sdata: dbf.SearchData) -> Iterator[dbs.AbstractSearch]: @@ -206,12 +208,11 @@ class SearchBuilder: and all(t.is_indexed for t in addr_partials) exp_count = min(t.count for t in name_partials) - if (len(name_partials) > 3 or exp_count < 1000) and partials_indexed: + if (len(name_partials) > 3 or exp_count < 3000) and partials_indexed: yield penalty, exp_count, dbf.lookup_by_names(name_tokens, addr_tokens) return - exp_count = min(exp_count, min(t.count for t in addr_partials)) \ - if addr_partials else exp_count + exp_count = exp_count / (2**len(addr_partials)) if addr_partials else exp_count # Partial term to frequent. Try looking up by rare full names first. name_fulls = self.query.get_tokens(name, TokenType.WORD) diff --git a/nominatim/api/search/db_searches.py b/nominatim/api/search/db_searches.py index 3f402436..34a4037a 100644 --- a/nominatim/api/search/db_searches.py +++ b/nominatim/api/search/db_searches.py @@ -111,7 +111,7 @@ def _filter_by_layer(table: SaFromClause, layers: DataLayer) -> SaColumn: orexpr.append(table.c.rank_address.between(1, 29)) orexpr.append(sa.and_(table.c.rank_address == 30, sa.or_(table.c.housenumber != None, - table.c.address.has_key('housename')))) + table.c.address.has_key('addr:housename')))) elif layers & DataLayer.POI: orexpr.append(sa.and_(table.c.rank_address == 30, table.c.class_.not_in(('place', 'building')))) @@ -317,7 +317,7 @@ class PoiSearch(AbstractSearch): """ def __init__(self, sdata: SearchData) -> None: super().__init__(sdata.penalty) - self.categories = sdata.qualifiers + self.qualifiers = sdata.qualifiers self.countries = sdata.countries @@ -339,7 +339,7 @@ class PoiSearch(AbstractSearch): .order_by(t.c.centroid.ST_Distance(NEAR_PARAM)) \ .limit(LIMIT_PARAM) - classtype = self.categories.values + classtype = self.qualifiers.values if len(classtype) == 1: cclass, ctype = classtype[0] sql: SaLambdaSelect = sa.lambda_stmt(lambda: _base_query() @@ -358,7 +358,7 @@ class PoiSearch(AbstractSearch): rows.extend(await conn.execute(sql, bind_params)) else: # use the class type tables - for category in self.categories.values: + for category in self.qualifiers.values: table = await conn.get_class_table(*category) if table is not None: sql = _select_placex(t)\ @@ -384,7 +384,7 @@ class PoiSearch(AbstractSearch): for row in rows: result = nres.create_from_placex_row(row, nres.SearchResult) assert result - result.accuracy = self.penalty + self.categories.get_penalty((row.class_, row.type)) + result.accuracy = self.penalty + self.qualifiers.get_penalty((row.class_, row.type)) result.bbox = Bbox.from_wkb(row.bbox) results.append(result) @@ -427,6 +427,7 @@ class CountrySearch(AbstractSearch): result = nres.create_from_placex_row(row, nres.SearchResult) assert result result.accuracy = self.penalty + self.countries.get_penalty(row.country_code, 5.0) + result.bbox = Bbox.from_wkb(row.bbox) results.append(result) return results or await self.lookup_in_country_table(conn, details) @@ -622,7 +623,10 @@ class PlaceSearch(AbstractSearch): if details.viewbox is not None: if details.bounded_viewbox: - sql = sql.where(tsearch.c.centroid.intersects(VIEWBOX_PARAM)) + if details.viewbox.area < 0.2: + sql = sql.where(tsearch.c.centroid.intersects(VIEWBOX_PARAM)) + else: + sql = sql.where(tsearch.c.centroid.ST_Intersects_no_index(VIEWBOX_PARAM)) else: penalty += sa.case((t.c.geometry.intersects(VIEWBOX_PARAM), 0.0), (t.c.geometry.intersects(VIEWBOX2_PARAM), 1.0), @@ -630,8 +634,12 @@ class PlaceSearch(AbstractSearch): if details.near is not None: if details.near_radius is not None: - sql = sql.where(tsearch.c.centroid.ST_DWithin(NEAR_PARAM, NEAR_RADIUS_PARAM)) - sql = sql.add_columns(-tsearch.c.centroid.ST_Distance(NEAR_PARAM) + if details.near_radius < 0.1: + sql = sql.where(tsearch.c.centroid.ST_DWithin(NEAR_PARAM, NEAR_RADIUS_PARAM)) + else: + sql = sql.where(tsearch.c.centroid.ST_DWithin_no_index(NEAR_PARAM, + NEAR_RADIUS_PARAM)) + sql = sql.add_columns((-tsearch.c.centroid.ST_Distance(NEAR_PARAM)) .label('importance')) sql = sql.order_by(sa.desc(sa.text('importance'))) else: @@ -663,7 +671,7 @@ class PlaceSearch(AbstractSearch): .where(thnr.c.indexed_status == 0) if details.excluded: - place_sql = place_sql.where(_exclude_places(thnr)) + place_sql = place_sql.where(thnr.c.place_id.not_in(sa.bindparam('excluded'))) if self.qualifiers: place_sql = place_sql.where(self.qualifiers.sql_restrict(thnr)) diff --git a/nominatim/api/search/geocoder.py b/nominatim/api/search/geocoder.py index d341b6cd..564e3d8d 100644 --- a/nominatim/api/search/geocoder.py +++ b/nominatim/api/search/geocoder.py @@ -152,7 +152,8 @@ class ForwardGeocoder: # pylint: disable=invalid-name,too-many-locals def _dump_searches(searches: List[AbstractSearch], query: QueryStruct, start: int = 0) -> Iterator[Optional[List[Any]]]: - yield ['Penalty', 'Lookups', 'Housenr', 'Postcode', 'Countries', 'Qualifier', 'Rankings'] + yield ['Penalty', 'Lookups', 'Housenr', 'Postcode', 'Countries', + 'Qualifier', 'Catgeory', 'Rankings'] def tk(tl: List[int]) -> str: tstr = [f"{query.find_lookup_word_by_id(t)}({t})" for t in tl] @@ -182,11 +183,18 @@ def _dump_searches(searches: List[AbstractSearch], query: QueryStruct, for search in searches[start:]: fields = ('lookups', 'rankings', 'countries', 'housenumbers', - 'postcodes', 'qualifier') - iters = itertools.zip_longest([f"{search.penalty:.3g}"], - *(getattr(search, attr, []) for attr in fields), - fillvalue= '') - for penalty, lookup, rank, cc, hnr, pc, qual in iters: + 'postcodes', 'qualifiers') + if hasattr(search, 'search'): + iters = itertools.zip_longest([f"{search.penalty:.3g}"], + *(getattr(search.search, attr, []) for attr in fields), + getattr(search, 'categories', []), + fillvalue='') + else: + iters = itertools.zip_longest([f"{search.penalty:.3g}"], + *(getattr(search, attr, []) for attr in fields), + [], + fillvalue='') + for penalty, lookup, rank, cc, hnr, pc, qual, cat in iters: yield [penalty, fmt_lookup(lookup), fmt_cstr(hnr), - fmt_cstr(pc), fmt_cstr(cc), fmt_cstr(qual), fmt_ranking(rank)] + fmt_cstr(pc), fmt_cstr(cc), fmt_cstr(qual), fmt_cstr(cat), fmt_ranking(rank)] yield None diff --git a/nominatim/api/search/icu_tokenizer.py b/nominatim/api/search/icu_tokenizer.py index f259995d..b68e8d10 100644 --- a/nominatim/api/search/icu_tokenizer.py +++ b/nominatim/api/search/icu_tokenizer.py @@ -83,7 +83,7 @@ class ICUToken(qmod.Token): seq = difflib.SequenceMatcher(a=self.lookup_word, b=norm) distance = 0 for tag, afrom, ato, bfrom, bto in seq.get_opcodes(): - if tag == 'delete' and (afrom == 0 or ato == len(self.lookup_word)): + if tag in ('delete', 'insert') and (afrom == 0 or ato == len(self.lookup_word)): distance += 1 elif tag == 'replace': distance += max((ato-afrom), (bto-bfrom)) @@ -133,10 +133,19 @@ class ICUQueryAnalyzer(AbstractQueryAnalyzer): async def setup(self) -> None: """ Set up static data structures needed for the analysis. """ - rules = await self.conn.get_property('tokenizer_import_normalisation') - self.normalizer = Transliterator.createFromRules("normalization", rules) - rules = await self.conn.get_property('tokenizer_import_transliteration') - self.transliterator = Transliterator.createFromRules("transliteration", rules) + async def _make_normalizer() -> Any: + rules = await self.conn.get_property('tokenizer_import_normalisation') + return Transliterator.createFromRules("normalization", rules) + + self.normalizer = await self.conn.get_cached_value('ICUTOK', 'normalizer', + _make_normalizer) + + async def _make_transliterator() -> Any: + rules = await self.conn.get_property('tokenizer_import_transliteration') + return Transliterator.createFromRules("transliteration", rules) + + self.transliterator = await self.conn.get_cached_value('ICUTOK', 'transliterator', + _make_transliterator) if 'word' not in self.conn.t.meta.tables: sa.Table('word', self.conn.t.meta, diff --git a/nominatim/api/search/token_assignment.py b/nominatim/api/search/token_assignment.py index 0ae2cd43..3f0e737b 100644 --- a/nominatim/api/search/token_assignment.py +++ b/nominatim/api/search/token_assignment.py @@ -253,6 +253,8 @@ class _TokenSequence: priors = sum(1 for t in self.seq[hnrpos+1:] if t.ttype == qmod.TokenType.PARTIAL) if not self._adapt_penalty_from_priors(priors, 1): return False + if any(t.ttype == qmod.TokenType.CATEGORY for t in self.seq): + self.penalty += 1.0 return True diff --git a/nominatim/api/v1/format.py b/nominatim/api/v1/format.py index 1e37b4c7..53156fdd 100644 --- a/nominatim/api/v1/format.py +++ b/nominatim/api/v1/format.py @@ -141,7 +141,7 @@ def _format_details_json(result: napi.DetailedResult, options: Mapping[str, Any] if result.address_rows is not None: _add_address_rows(out, 'address', result.address_rows, locales) - if result.linked_rows is not None: + if result.linked_rows: _add_address_rows(out, 'linked_places', result.linked_rows, locales) if result.name_keywords is not None or result.address_keywords is not None: diff --git a/nominatim/api/v1/server_glue.py b/nominatim/api/v1/server_glue.py index 80bf38a4..cf9bc3af 100644 --- a/nominatim/api/v1/server_glue.py +++ b/nominatim/api/v1/server_glue.py @@ -302,7 +302,7 @@ async def details_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) -> result = await api.details(place, address_details=params.get_bool('addressdetails', False), - linked_places=params.get_bool('linkedplaces', False), + linked_places=params.get_bool('linkedplaces', True), parented_places=params.get_bool('hierarchy', False), keywords=params.get_bool('keywords', False), geometry_output = napi.GeometryFormat.GEOJSON diff --git a/nominatim/db/sqlalchemy_types.py b/nominatim/db/sqlalchemy_types.py index 7d3789aa..7b959036 100644 --- a/nominatim/db/sqlalchemy_types.py +++ b/nominatim/db/sqlalchemy_types.py @@ -66,7 +66,16 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg] def ST_DWithin(self, other: SaColumn, distance: SaColumn) -> SaColumn: - return sa.func.ST_DWithin(self, other, distance, type_=sa.Float) + return sa.func.ST_DWithin(self, other, distance, type_=sa.Boolean) + + + def ST_DWithin_no_index(self, other: SaColumn, distance: SaColumn) -> SaColumn: + return sa.func.ST_DWithin(sa.func.coalesce(sa.null(), self), + other, distance, type_=sa.Boolean) + + + def ST_Intersects_no_index(self, other: SaColumn) -> 'sa.Operators': + return sa.func.coalesce(sa.null(), self).op('&&')(other) def ST_Distance(self, other: SaColumn) -> SaColumn: diff --git a/osm2pgsql b/osm2pgsql index 4facd1ae..ea0178e9 160000 --- a/osm2pgsql +++ b/osm2pgsql @@ -1 +1 @@ -Subproject commit 4facd1aea451cea220261c361698b8e5f18a9327 +Subproject commit ea0178e97d5b69a87a8b9c35210c8be4674e60e6 diff --git a/test/python/api/search/test_db_search_builder.py b/test/python/api/search/test_db_search_builder.py index d1ad320b..c93b8ead 100644 --- a/test/python/api/search/test_db_search_builder.py +++ b/test/python/api/search/test_db_search_builder.py @@ -161,7 +161,7 @@ def test_category_only(kwargs): search = searches[0] assert isinstance(search, dbs.PoiSearch) - assert search.categories.values == [('this', 'that')] + assert search.qualifiers.values == [('this', 'that')] @pytest.mark.parametrize('kwargs', [{'viewbox': '0,0,1,1'}, @@ -384,7 +384,7 @@ def test_frequent_partials_in_name_and_address(): def test_too_frequent_partials_in_name_and_address(): - searches = make_counted_searches(10000, 1, 10000, 1) + searches = make_counted_searches(20000, 1, 10000, 1) assert len(searches) == 1 diff --git a/test/python/api/test_api_lookup.py b/test/python/api/test_api_lookup.py index 619bc747..8f5dd17c 100644 --- a/test/python/api/test_api_lookup.py +++ b/test/python/api/test_api_lookup.py @@ -100,3 +100,54 @@ def test_lookup_multiple_places(apiobj): assert len(result) == 2 assert set(r.place_id for r in result) == {332, 4924} + + +@pytest.mark.parametrize('gtype', list(napi.GeometryFormat)) +def test_simple_place_with_geometry(apiobj, gtype): + apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, + class_='highway', type='residential', + name={'name': 'Road'}, address={'city': 'Barrow'}, + extratags={'surface': 'paved'}, + parent_place_id=34, linked_place_id=55, + admin_level=15, country_code='gb', + housenumber='4', + postcode='34425', wikipedia='en:Faa', + rank_search=27, rank_address=26, + importance=0.01, + centroid=(23, 34), + geometry='POLYGON((23 34, 23.1 34, 23.1 34.1, 23 34))') + + result = apiobj.api.lookup([napi.OsmID('W', 4)], + geometry_output=gtype) + + assert len(result) == 1 + assert result[0].place_id == 332 + + if gtype == napi.GeometryFormat.NONE: + assert list(result[0].geometry.keys()) == [] + else: + assert list(result[0].geometry.keys()) == [gtype.name.lower()] + + +def test_simple_place_with_geometry_simplified(apiobj): + apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, + class_='highway', type='residential', + name={'name': 'Road'}, address={'city': 'Barrow'}, + extratags={'surface': 'paved'}, + parent_place_id=34, linked_place_id=55, + admin_level=15, country_code='gb', + housenumber='4', + postcode='34425', wikipedia='en:Faa', + rank_search=27, rank_address=26, + importance=0.01, + centroid=(23, 34), + geometry='POLYGON((23 34, 22.999 34, 23.1 34, 23.1 34.1, 23 34))') + + result = apiobj.api.lookup([napi.OsmID('W', 4)], + geometry_output=napi.GeometryFormat.TEXT, + geometry_simplification=0.1) + + assert len(result) == 1 + assert result[0].place_id == 332 + assert result[0].geometry == {'text': 'POLYGON((23 34,23.1 34,23.1 34.1,23 34))'} + diff --git a/test/python/api/test_api_reverse.py b/test/python/api/test_api_reverse.py index 3296e98f..d9ab2cb8 100644 --- a/test/python/api/test_api_reverse.py +++ b/test/python/api/test_api_reverse.py @@ -60,7 +60,8 @@ def test_reverse_ignore_unindexed(apiobj): (0.7, napi.DataLayer.RAILWAY, 226), (0.7, napi.DataLayer.NATURAL, 227), (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225), - (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225)]) + (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225), + (5, napi.DataLayer.ADDRESS, 229)]) def test_reverse_rank_30_layers(apiobj, y, layer, place_id): apiobj.add_placex(place_id=223, class_='place', type='house', housenumber='1', @@ -83,6 +84,11 @@ def test_reverse_rank_30_layers(apiobj, y, layer, place_id): rank_address=0, rank_search=30, centroid=(1.3, 0.70005)) + apiobj.add_placex(place_id=229, class_='place', type='house', + name={'addr:housename': 'Old Cottage'}, + rank_address=30, + rank_search=30, + centroid=(1.3, 5)) assert apiobj.api.reverse((1.3, y), layers=layer).place_id == place_id diff --git a/vagrant/Install-on-Ubuntu-20.sh b/vagrant/Install-on-Ubuntu-20.sh index 0f664da2..78c42007 100755 --- a/vagrant/Install-on-Ubuntu-20.sh +++ b/vagrant/Install-on-Ubuntu-20.sh @@ -23,7 +23,7 @@ export DEBIAN_FRONTEND=noninteractive #DOCS: sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \ libboost-filesystem-dev libexpat1-dev zlib1g-dev \ libbz2-dev libpq-dev liblua5.3-dev lua5.3 lua-dkjson \ - postgresql-12-postgis-3 \ + nlohmann-json3-dev postgresql-12-postgis-3 \ postgresql-contrib-12 postgresql-12-postgis-3-scripts \ php-cli php-pgsql php-intl libicu-dev python3-dotenv \ python3-psycopg2 python3-psutil python3-jinja2 python3-pip \ diff --git a/vagrant/Install-on-Ubuntu-22.sh b/vagrant/Install-on-Ubuntu-22.sh index b170daad..19e698e0 100755 --- a/vagrant/Install-on-Ubuntu-22.sh +++ b/vagrant/Install-on-Ubuntu-22.sh @@ -23,12 +23,12 @@ export DEBIAN_FRONTEND=noninteractive #DOCS: sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \ libboost-filesystem-dev libexpat1-dev zlib1g-dev \ libbz2-dev libpq-dev liblua5.3-dev lua5.3 lua-dkjson \ - postgresql-server-dev-14 postgresql-14-postgis-3 \ + nlohmann-json3-dev postgresql-14-postgis-3 \ postgresql-contrib-14 postgresql-14-postgis-3-scripts \ php-cli php-pgsql php-intl libicu-dev python3-dotenv \ python3-psycopg2 python3-psutil python3-jinja2 \ python3-icu python3-datrie python3-sqlalchemy \ - python3-asyncpg git + python3-asyncpg python3-yaml git # # System Configuration