]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/setup.py
687279726a303036e43f08beea8f50529fe7393e
[nominatim.git] / nominatim / clicmd / setup.py
1 """
2 Implementation of the 'import' subcommand.
3 """
4 import logging
5 from pathlib import Path
6
7 import psutil
8
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12 from nominatim.errors import UsageError
13
14 # Do not repeat documentation of subcommand classes.
15 # pylint: disable=C0111
16 # Using non-top-level imports to avoid eventually unused imports.
17 # pylint: disable=E0012,C0415
18
19 LOG = logging.getLogger()
20
21 class SetupAll:
22     """\
23     Create a new Nominatim database from an OSM file.
24     """
25
26     @staticmethod
27     def add_args(parser):
28         group_name = parser.add_argument_group('Required arguments')
29         group = group_name.add_mutually_exclusive_group(required=True)
30         group.add_argument('--osm-file', metavar='FILE',
31                            help='OSM file to be imported.')
32         group.add_argument('--continue', dest='continue_at',
33                            choices=['load-data', 'indexing', 'db-postprocess'],
34                            help='Continue an import that was interrupted')
35         group = parser.add_argument_group('Optional arguments')
36         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
37                            help='Size of cache to be used by osm2pgsql (in MB)')
38         group.add_argument('--reverse-only', action='store_true',
39                            help='Do not create tables and indexes for searching')
40         group.add_argument('--no-partitions', action='store_true',
41                            help=("Do not partition search indices "
42                                  "(speeds up import of single country extracts)"))
43         group.add_argument('--no-updates', action='store_true',
44                            help="Do not keep tables that are only needed for "
45                                 "updating the database later")
46         group = parser.add_argument_group('Expert options')
47         group.add_argument('--ignore-errors', action='store_true',
48                            help='Continue import even when errors in SQL are present')
49         group.add_argument('--index-noanalyse', action='store_true',
50                            help='Do not perform analyse operations during index')
51
52
53     @staticmethod
54     def run(args): # pylint: disable=too-many-statements
55         from ..tools import database_import
56         from ..tools import refresh
57         from ..indexer.indexer import Indexer
58         from ..tools import postcodes
59         from ..tokenizer import factory as tokenizer_factory
60
61         if args.osm_file and not Path(args.osm_file).is_file():
62             LOG.fatal("OSM file '%s' does not exist.", args.osm_file)
63             raise UsageError('Cannot access file.')
64
65         if args.continue_at is None:
66             database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
67                                                     args.data_dir,
68                                                     args.no_partitions,
69                                                     rouser=args.config.DATABASE_WEBUSER)
70
71             LOG.warning('Installing database module')
72             with connect(args.config.get_libpq_dsn()) as conn:
73                 database_import.install_module(args.module_dir, args.project_dir,
74                                                args.config.DATABASE_MODULE_PATH,
75                                                conn=conn)
76
77             LOG.warning('Importing OSM data file')
78             database_import.import_osm_data(Path(args.osm_file),
79                                             args.osm2pgsql_options(0, 1),
80                                             drop=args.no_updates,
81                                             ignore_errors=args.ignore_errors)
82
83             with connect(args.config.get_libpq_dsn()) as conn:
84                 LOG.warning('Create functions (1st pass)')
85                 refresh.create_functions(conn, args.config, False, False)
86                 LOG.warning('Create tables')
87                 database_import.create_tables(conn, args.config,
88                                               reverse_only=args.reverse_only)
89                 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
90                 LOG.warning('Create functions (2nd pass)')
91                 refresh.create_functions(conn, args.config, False, False)
92                 LOG.warning('Create table triggers')
93                 database_import.create_table_triggers(conn, args.config)
94                 LOG.warning('Create partition tables')
95                 database_import.create_partition_tables(conn, args.config)
96                 LOG.warning('Create functions (3rd pass)')
97                 refresh.create_functions(conn, args.config, False, False)
98
99             LOG.warning('Importing wikipedia importance data')
100             data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
101             if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
102                                                  data_path) > 0:
103                 LOG.error('Wikipedia importance dump file not found. '
104                           'Will be using default importances.')
105
106         if args.continue_at is None or args.continue_at == 'load-data':
107             LOG.warning('Initialise tables')
108             with connect(args.config.get_libpq_dsn()) as conn:
109                 database_import.truncate_data_tables(conn, args.config.MAX_WORD_FREQUENCY)
110
111             LOG.warning('Load data into placex table')
112             database_import.load_data(args.config.get_libpq_dsn(),
113                                       args.data_dir,
114                                       args.threads or psutil.cpu_count() or 1)
115
116         LOG.warning("Setting up tokenizer")
117         tokenizer = tokenizer_factory.create_tokenizer(args.config)
118
119         if args.continue_at is None or args.continue_at == 'load-data':
120             LOG.warning('Calculate postcodes')
121             postcodes.import_postcodes(args.config.get_libpq_dsn(), args.project_dir)
122
123         if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
124             if args.continue_at is not None and args.continue_at != 'load-data':
125                 with connect(args.config.get_libpq_dsn()) as conn:
126                     SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
127             LOG.warning('Indexing places')
128             indexer = Indexer(args.config.get_libpq_dsn(),
129                               args.threads or psutil.cpu_count() or 1)
130             indexer.index_full(analyse=not args.index_noanalyse)
131
132         LOG.warning('Post-process tables')
133         with connect(args.config.get_libpq_dsn()) as conn:
134             database_import.create_search_indices(conn, args.config,
135                                                   drop=args.no_updates)
136             LOG.warning('Create search index for default country names.')
137             database_import.create_country_names(conn, args.config)
138
139         webdir = args.project_dir / 'website'
140         LOG.warning('Setup website at %s', webdir)
141         refresh.setup_website(webdir, args.config)
142
143         with connect(args.config.get_libpq_dsn()) as conn:
144             try:
145                 dbdate = status.compute_database_date(conn)
146                 status.set_status(conn, dbdate)
147                 LOG.info('Database is at %s.', dbdate)
148             except Exception as exc: # pylint: disable=broad-except
149                 LOG.error('Cannot determine date of database: %s', exc)
150
151             properties.set_property(conn, 'database_version',
152                                     '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
153
154         return 0
155
156
157     @staticmethod
158     def _create_pending_index(conn, tablespace):
159         """ Add a supporting index for finding places still to be indexed.
160
161             This index is normally created at the end of the import process
162             for later updates. When indexing was partially done, then this
163             index can greatly improve speed going through already indexed data.
164         """
165         if conn.index_exists('idx_placex_pendingsector'):
166             return
167
168         with conn.cursor() as cur:
169             LOG.warning('Creating support index')
170             if tablespace:
171                 tablespace = 'TABLESPACE ' + tablespace
172             cur.execute("""CREATE INDEX idx_placex_pendingsector
173                            ON placex USING BTREE (rank_address,geometry_sector)
174                            {} WHERE indexed_status > 0
175                         """.format(tablespace))
176         conn.commit()