]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/setup.py
2fc1587b11e2f1b119e3cfd93d7fde8a4a9bb46e
[nominatim.git] / nominatim / clicmd / setup.py
1 """
2 Implementation of the 'import' subcommand.
3 """
4 import logging
5 from pathlib import Path
6
7 import psutil
8
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12 from nominatim.errors import UsageError
13
14 # Do not repeat documentation of subcommand classes.
15 # pylint: disable=C0111
16 # Using non-top-level imports to avoid eventually unused imports.
17 # pylint: disable=E0012,C0415
18
19 LOG = logging.getLogger()
20
21 class SetupAll:
22     """\
23     Create a new Nominatim database from an OSM file.
24     """
25
26     @staticmethod
27     def add_args(parser):
28         group_name = parser.add_argument_group('Required arguments')
29         group = group_name.add_mutually_exclusive_group(required=True)
30         group.add_argument('--osm-file', metavar='FILE', action='append',
31                            help='OSM file to be imported'
32                                 ' (repeat for importing multiple files.')
33         group.add_argument('--continue', dest='continue_at',
34                            choices=['load-data', 'indexing', 'db-postprocess'],
35                            help='Continue an import that was interrupted')
36         group = parser.add_argument_group('Optional arguments')
37         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
38                            help='Size of cache to be used by osm2pgsql (in MB)')
39         group.add_argument('--reverse-only', action='store_true',
40                            help='Do not create tables and indexes for searching')
41         group.add_argument('--no-partitions', action='store_true',
42                            help=("Do not partition search indices "
43                                  "(speeds up import of single country extracts)"))
44         group.add_argument('--no-updates', action='store_true',
45                            help="Do not keep tables that are only needed for "
46                                 "updating the database later")
47         group = parser.add_argument_group('Expert options')
48         group.add_argument('--ignore-errors', action='store_true',
49                            help='Continue import even when errors in SQL are present')
50         group.add_argument('--index-noanalyse', action='store_true',
51                            help='Do not perform analyse operations during index')
52
53
54     @staticmethod
55     def run(args): # pylint: disable=too-many-statements
56         from ..tools import database_import, refresh, postcodes, freeze
57         from ..indexer.indexer import Indexer
58         from ..tokenizer import factory as tokenizer_factory
59
60         if args.osm_file:
61             files = [Path(f) for f in args.osm_file]
62             for fname in files:
63                 if not fname.is_file():
64                     LOG.fatal("OSM file '%s' does not exist.", fname)
65                     raise UsageError('Cannot access file.')
66
67         if args.continue_at is None:
68             database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
69                                                     args.data_dir,
70                                                     args.no_partitions,
71                                                     rouser=args.config.DATABASE_WEBUSER)
72
73             LOG.warning('Importing OSM data file')
74             database_import.import_osm_data(files,
75                                             args.osm2pgsql_options(0, 1),
76                                             drop=args.no_updates,
77                                             ignore_errors=args.ignore_errors)
78
79             with connect(args.config.get_libpq_dsn()) as conn:
80                 LOG.warning('Create functions (1st pass)')
81                 refresh.create_functions(conn, args.config, False, False)
82                 LOG.warning('Create tables')
83                 database_import.create_tables(conn, args.config,
84                                               reverse_only=args.reverse_only)
85                 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
86                 LOG.warning('Create functions (2nd pass)')
87                 refresh.create_functions(conn, args.config, False, False)
88                 LOG.warning('Create table triggers')
89                 database_import.create_table_triggers(conn, args.config)
90                 LOG.warning('Create partition tables')
91                 database_import.create_partition_tables(conn, args.config)
92                 LOG.warning('Create functions (3rd pass)')
93                 refresh.create_functions(conn, args.config, False, False)
94
95             LOG.warning('Importing wikipedia importance data')
96             data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
97             if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
98                                                  data_path) > 0:
99                 LOG.error('Wikipedia importance dump file not found. '
100                           'Will be using default importances.')
101
102         if args.continue_at is None or args.continue_at == 'load-data':
103             LOG.warning('Initialise tables')
104             with connect(args.config.get_libpq_dsn()) as conn:
105                 database_import.truncate_data_tables(conn)
106
107             LOG.warning('Load data into placex table')
108             database_import.load_data(args.config.get_libpq_dsn(),
109                                       args.threads or psutil.cpu_count() or 1)
110
111         LOG.warning("Setting up tokenizer")
112         if args.continue_at is None or args.continue_at == 'load-data':
113             # (re)initialise the tokenizer data
114             tokenizer = tokenizer_factory.create_tokenizer(args.config)
115         else:
116             # just load the tokenizer
117             tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
118
119         if args.continue_at is None or args.continue_at == 'load-data':
120             LOG.warning('Calculate postcodes')
121             postcodes.update_postcodes(args.config.get_libpq_dsn(),
122                                        args.project_dir, tokenizer)
123
124         if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
125             if args.continue_at is not None and args.continue_at != 'load-data':
126                 with connect(args.config.get_libpq_dsn()) as conn:
127                     SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
128             LOG.warning('Indexing places')
129             indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
130                               args.threads or psutil.cpu_count() or 1)
131             indexer.index_full(analyse=not args.index_noanalyse)
132
133         LOG.warning('Post-process tables')
134         with connect(args.config.get_libpq_dsn()) as conn:
135             database_import.create_search_indices(conn, args.config,
136                                                   drop=args.no_updates)
137             LOG.warning('Create search index for default country names.')
138             database_import.create_country_names(conn, tokenizer,
139                                                  args.config.LANGUAGES)
140             conn.commit()
141             if args.no_updates:
142                 freeze.drop_update_tables(conn)
143         tokenizer.finalize_import(args.config)
144
145
146         webdir = args.project_dir / 'website'
147         LOG.warning('Setup website at %s', webdir)
148         with connect(args.config.get_libpq_dsn()) as conn:
149             refresh.setup_website(webdir, args.config, conn)
150
151         with connect(args.config.get_libpq_dsn()) as conn:
152             try:
153                 dbdate = status.compute_database_date(conn)
154                 status.set_status(conn, dbdate)
155                 LOG.info('Database is at %s.', dbdate)
156             except Exception as exc: # pylint: disable=broad-except
157                 LOG.error('Cannot determine date of database: %s', exc)
158
159             properties.set_property(conn, 'database_version',
160                                     '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
161
162         return 0
163
164
165     @staticmethod
166     def _create_pending_index(conn, tablespace):
167         """ Add a supporting index for finding places still to be indexed.
168
169             This index is normally created at the end of the import process
170             for later updates. When indexing was partially done, then this
171             index can greatly improve speed going through already indexed data.
172         """
173         if conn.index_exists('idx_placex_pendingsector'):
174             return
175
176         with conn.cursor() as cur:
177             LOG.warning('Creating support index')
178             if tablespace:
179                 tablespace = 'TABLESPACE ' + tablespace
180             cur.execute("""CREATE INDEX idx_placex_pendingsector
181                            ON placex USING BTREE (rank_address,geometry_sector)
182                            {} WHERE indexed_status > 0
183                         """.format(tablespace))
184         conn.commit()