]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/setup.py
replace add-data function with native Python code
[nominatim.git] / nominatim / clicmd / setup.py
1 """
2 Implementation of the 'import' subcommand.
3 """
4 import logging
5 from pathlib import Path
6
7 import psutil
8
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12 from nominatim.errors import UsageError
13
14 # Do not repeat documentation of subcommand classes.
15 # pylint: disable=C0111
16 # Using non-top-level imports to avoid eventually unused imports.
17 # pylint: disable=E0012,C0415
18
19 LOG = logging.getLogger()
20
21 class SetupAll:
22     """\
23     Create a new Nominatim database from an OSM file.
24     """
25
26     @staticmethod
27     def add_args(parser):
28         group_name = parser.add_argument_group('Required arguments')
29         group = group_name.add_mutually_exclusive_group(required=True)
30         group.add_argument('--osm-file', metavar='FILE',
31                            help='OSM file to be imported.')
32         group.add_argument('--continue', dest='continue_at',
33                            choices=['load-data', 'indexing', 'db-postprocess'],
34                            help='Continue an import that was interrupted')
35         group = parser.add_argument_group('Optional arguments')
36         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
37                            help='Size of cache to be used by osm2pgsql (in MB)')
38         group.add_argument('--reverse-only', action='store_true',
39                            help='Do not create tables and indexes for searching')
40         group.add_argument('--no-partitions', action='store_true',
41                            help=("Do not partition search indices "
42                                  "(speeds up import of single country extracts)"))
43         group.add_argument('--no-updates', action='store_true',
44                            help="Do not keep tables that are only needed for "
45                                 "updating the database later")
46         group = parser.add_argument_group('Expert options')
47         group.add_argument('--ignore-errors', action='store_true',
48                            help='Continue import even when errors in SQL are present')
49         group.add_argument('--index-noanalyse', action='store_true',
50                            help='Do not perform analyse operations during index')
51
52
53     @staticmethod
54     def run(args): # pylint: disable=too-many-statements
55         from ..tools import database_import, refresh, postcodes, freeze
56         from ..indexer.indexer import Indexer
57         from ..tokenizer import factory as tokenizer_factory
58
59         if args.osm_file and not Path(args.osm_file).is_file():
60             LOG.fatal("OSM file '%s' does not exist.", args.osm_file)
61             raise UsageError('Cannot access file.')
62
63         if args.continue_at is None:
64             database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
65                                                     args.data_dir,
66                                                     args.no_partitions,
67                                                     rouser=args.config.DATABASE_WEBUSER)
68
69             LOG.warning('Importing OSM data file')
70             database_import.import_osm_data(Path(args.osm_file),
71                                             args.osm2pgsql_options(0, 1),
72                                             drop=args.no_updates,
73                                             ignore_errors=args.ignore_errors)
74
75             with connect(args.config.get_libpq_dsn()) as conn:
76                 LOG.warning('Create functions (1st pass)')
77                 refresh.create_functions(conn, args.config, False, False)
78                 LOG.warning('Create tables')
79                 database_import.create_tables(conn, args.config,
80                                               reverse_only=args.reverse_only)
81                 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
82                 LOG.warning('Create functions (2nd pass)')
83                 refresh.create_functions(conn, args.config, False, False)
84                 LOG.warning('Create table triggers')
85                 database_import.create_table_triggers(conn, args.config)
86                 LOG.warning('Create partition tables')
87                 database_import.create_partition_tables(conn, args.config)
88                 LOG.warning('Create functions (3rd pass)')
89                 refresh.create_functions(conn, args.config, False, False)
90
91             LOG.warning('Importing wikipedia importance data')
92             data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
93             if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
94                                                  data_path) > 0:
95                 LOG.error('Wikipedia importance dump file not found. '
96                           'Will be using default importances.')
97
98         if args.continue_at is None or args.continue_at == 'load-data':
99             LOG.warning('Initialise tables')
100             with connect(args.config.get_libpq_dsn()) as conn:
101                 database_import.truncate_data_tables(conn)
102
103             LOG.warning('Load data into placex table')
104             database_import.load_data(args.config.get_libpq_dsn(),
105                                       args.threads or psutil.cpu_count() or 1)
106
107         LOG.warning("Setting up tokenizer")
108         if args.continue_at is None or args.continue_at == 'load-data':
109             # (re)initialise the tokenizer data
110             tokenizer = tokenizer_factory.create_tokenizer(args.config)
111         else:
112             # just load the tokenizer
113             tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
114
115         if args.continue_at is None or args.continue_at == 'load-data':
116             LOG.warning('Calculate postcodes')
117             postcodes.update_postcodes(args.config.get_libpq_dsn(),
118                                        args.project_dir, tokenizer)
119
120         if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
121             if args.continue_at is not None and args.continue_at != 'load-data':
122                 with connect(args.config.get_libpq_dsn()) as conn:
123                     SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
124             LOG.warning('Indexing places')
125             indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
126                               args.threads or psutil.cpu_count() or 1)
127             indexer.index_full(analyse=not args.index_noanalyse)
128
129         LOG.warning('Post-process tables')
130         with connect(args.config.get_libpq_dsn()) as conn:
131             database_import.create_search_indices(conn, args.config,
132                                                   drop=args.no_updates)
133             LOG.warning('Create search index for default country names.')
134             database_import.create_country_names(conn, tokenizer,
135                                                  args.config.LANGUAGES)
136             conn.commit()
137             if args.no_updates:
138                 freeze.drop_update_tables(conn)
139         tokenizer.finalize_import(args.config)
140
141
142         webdir = args.project_dir / 'website'
143         LOG.warning('Setup website at %s', webdir)
144         with connect(args.config.get_libpq_dsn()) as conn:
145             refresh.setup_website(webdir, args.config, conn)
146
147         with connect(args.config.get_libpq_dsn()) as conn:
148             try:
149                 dbdate = status.compute_database_date(conn)
150                 status.set_status(conn, dbdate)
151                 LOG.info('Database is at %s.', dbdate)
152             except Exception as exc: # pylint: disable=broad-except
153                 LOG.error('Cannot determine date of database: %s', exc)
154
155             properties.set_property(conn, 'database_version',
156                                     '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
157
158         return 0
159
160
161     @staticmethod
162     def _create_pending_index(conn, tablespace):
163         """ Add a supporting index for finding places still to be indexed.
164
165             This index is normally created at the end of the import process
166             for later updates. When indexing was partially done, then this
167             index can greatly improve speed going through already indexed data.
168         """
169         if conn.index_exists('idx_placex_pendingsector'):
170             return
171
172         with conn.cursor() as cur:
173             LOG.warning('Creating support index')
174             if tablespace:
175                 tablespace = 'TABLESPACE ' + tablespace
176             cur.execute("""CREATE INDEX idx_placex_pendingsector
177                            ON placex USING BTREE (rank_address,geometry_sector)
178                            {} WHERE indexed_status > 0
179                         """.format(tablespace))
180         conn.commit()