]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/setup.py
move word table and normalisation SQL into tokenizer
[nominatim.git] / nominatim / clicmd / setup.py
1 """
2 Implementation of the 'import' subcommand.
3 """
4 import logging
5 from pathlib import Path
6
7 import psutil
8
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12 from nominatim.errors import UsageError
13
14 # Do not repeat documentation of subcommand classes.
15 # pylint: disable=C0111
16 # Using non-top-level imports to avoid eventually unused imports.
17 # pylint: disable=E0012,C0415
18
19 LOG = logging.getLogger()
20
21 class SetupAll:
22     """\
23     Create a new Nominatim database from an OSM file.
24     """
25
26     @staticmethod
27     def add_args(parser):
28         group_name = parser.add_argument_group('Required arguments')
29         group = group_name.add_mutually_exclusive_group(required=True)
30         group.add_argument('--osm-file', metavar='FILE',
31                            help='OSM file to be imported.')
32         group.add_argument('--continue', dest='continue_at',
33                            choices=['load-data', 'indexing', 'db-postprocess'],
34                            help='Continue an import that was interrupted')
35         group = parser.add_argument_group('Optional arguments')
36         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
37                            help='Size of cache to be used by osm2pgsql (in MB)')
38         group.add_argument('--reverse-only', action='store_true',
39                            help='Do not create tables and indexes for searching')
40         group.add_argument('--no-partitions', action='store_true',
41                            help=("Do not partition search indices "
42                                  "(speeds up import of single country extracts)"))
43         group.add_argument('--no-updates', action='store_true',
44                            help="Do not keep tables that are only needed for "
45                                 "updating the database later")
46         group = parser.add_argument_group('Expert options')
47         group.add_argument('--ignore-errors', action='store_true',
48                            help='Continue import even when errors in SQL are present')
49         group.add_argument('--index-noanalyse', action='store_true',
50                            help='Do not perform analyse operations during index')
51
52
53     @staticmethod
54     def run(args): # pylint: disable=too-many-statements
55         from ..tools import database_import
56         from ..tools import refresh
57         from ..indexer.indexer import Indexer
58         from ..tools import postcodes
59         from ..tokenizer import factory as tokenizer_factory
60
61         if args.osm_file and not Path(args.osm_file).is_file():
62             LOG.fatal("OSM file '%s' does not exist.", args.osm_file)
63             raise UsageError('Cannot access file.')
64
65         if args.continue_at is None:
66             database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
67                                                     args.data_dir,
68                                                     args.no_partitions,
69                                                     rouser=args.config.DATABASE_WEBUSER)
70
71             LOG.warning('Importing OSM data file')
72             database_import.import_osm_data(Path(args.osm_file),
73                                             args.osm2pgsql_options(0, 1),
74                                             drop=args.no_updates,
75                                             ignore_errors=args.ignore_errors)
76
77             with connect(args.config.get_libpq_dsn()) as conn:
78                 LOG.warning('Create functions (1st pass)')
79                 refresh.create_functions(conn, args.config, False, False)
80                 LOG.warning('Create tables')
81                 database_import.create_tables(conn, args.config,
82                                               reverse_only=args.reverse_only)
83                 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
84                 LOG.warning('Create functions (2nd pass)')
85                 refresh.create_functions(conn, args.config, False, False)
86                 LOG.warning('Create table triggers')
87                 database_import.create_table_triggers(conn, args.config)
88                 LOG.warning('Create partition tables')
89                 database_import.create_partition_tables(conn, args.config)
90                 LOG.warning('Create functions (3rd pass)')
91                 refresh.create_functions(conn, args.config, False, False)
92
93             LOG.warning('Importing wikipedia importance data')
94             data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
95             if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
96                                                  data_path) > 0:
97                 LOG.error('Wikipedia importance dump file not found. '
98                           'Will be using default importances.')
99
100         if args.continue_at is None or args.continue_at == 'load-data':
101             LOG.warning('Initialise tables')
102             with connect(args.config.get_libpq_dsn()) as conn:
103                 database_import.truncate_data_tables(conn)
104
105             LOG.warning('Load data into placex table')
106             database_import.load_data(args.config.get_libpq_dsn(),
107                                       args.threads or psutil.cpu_count() or 1)
108
109         LOG.warning("Setting up tokenizer")
110         if args.continue_at is None or args.continue_at == 'load-data':
111             # (re)initialise the tokenizer data
112             tokenizer = tokenizer_factory.create_tokenizer(args.config)
113         else:
114             # just load the tokenizer
115             tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
116
117         if args.continue_at is None or args.continue_at == 'load-data':
118             LOG.warning('Calculate postcodes')
119             postcodes.import_postcodes(args.config.get_libpq_dsn(), args.project_dir)
120
121         if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
122             if args.continue_at is not None and args.continue_at != 'load-data':
123                 with connect(args.config.get_libpq_dsn()) as conn:
124                     SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
125             LOG.warning('Indexing places')
126             indexer = Indexer(args.config.get_libpq_dsn(),
127                               args.threads or psutil.cpu_count() or 1)
128             indexer.index_full(analyse=not args.index_noanalyse)
129
130         LOG.warning('Post-process tables')
131         with connect(args.config.get_libpq_dsn()) as conn:
132             database_import.create_search_indices(conn, args.config,
133                                                   drop=args.no_updates)
134             LOG.warning('Create search index for default country names.')
135             database_import.create_country_names(conn, args.config)
136
137         webdir = args.project_dir / 'website'
138         LOG.warning('Setup website at %s', webdir)
139         refresh.setup_website(webdir, args.config)
140
141         with connect(args.config.get_libpq_dsn()) as conn:
142             try:
143                 dbdate = status.compute_database_date(conn)
144                 status.set_status(conn, dbdate)
145                 LOG.info('Database is at %s.', dbdate)
146             except Exception as exc: # pylint: disable=broad-except
147                 LOG.error('Cannot determine date of database: %s', exc)
148
149             properties.set_property(conn, 'database_version',
150                                     '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
151
152         return 0
153
154
155     @staticmethod
156     def _create_pending_index(conn, tablespace):
157         """ Add a supporting index for finding places still to be indexed.
158
159             This index is normally created at the end of the import process
160             for later updates. When indexing was partially done, then this
161             index can greatly improve speed going through already indexed data.
162         """
163         if conn.index_exists('idx_placex_pendingsector'):
164             return
165
166         with conn.cursor() as cur:
167             LOG.warning('Creating support index')
168             if tablespace:
169                 tablespace = 'TABLESPACE ' + tablespace
170             cur.execute("""CREATE INDEX idx_placex_pendingsector
171                            ON placex USING BTREE (rank_address,geometry_sector)
172                            {} WHERE indexed_status > 0
173                         """.format(tablespace))
174         conn.commit()