]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/transition.py
Ported tiger-data-import to python and Added Tarball Support
[nominatim.git] / nominatim / clicmd / transition.py
1 """
2 Implementation of the 'transition' subcommand.
3
4 This subcommand provides standins for functions that were available
5 through the PHP scripts but are now no longer directly accessible.
6 This module will be removed as soon as the transition phase is over.
7 """
8 import logging
9 from pathlib import Path
10
11 from ..db.connection import connect
12 from ..db import status
13 from ..errors import UsageError
14
15 # Do not repeat documentation of subcommand classes.
16 # pylint: disable=C0111
17 # Using non-top-level imports to avoid eventually unused imports.
18 # pylint: disable=E0012,C0415
19
20 LOG = logging.getLogger()
21
22 class AdminTransition:
23     """\
24     Internal functions for code transition. Do not use.
25     """
26
27     @staticmethod
28     def add_args(parser):
29         group = parser.add_argument_group('Sub-functions')
30         group.add_argument('--create-db', action='store_true',
31                            help='Create nominatim db')
32         group.add_argument('--setup-db', action='store_true',
33                            help='Build a blank nominatim db')
34         group.add_argument('--import-data', action='store_true',
35                            help='Import a osm file')
36         group.add_argument('--load-data', action='store_true',
37                            help='Copy data to live tables from import table')
38         group.add_argument('--create-tables', action='store_true',
39                            help='Create main tables')
40         group.add_argument('--create-partition-tables', action='store_true',
41                            help='Create required partition tables')
42         group.add_argument('--index', action='store_true',
43                            help='Index the data')
44         group.add_argument('--create-search-indices', action='store_true',
45                            help='Create additional indices required for search and update')
46         group = parser.add_argument_group('Options')
47         group.add_argument('--no-partitions', action='store_true',
48                            help='Do not partition search indices')
49         group.add_argument('--osm-file', metavar='FILE',
50                            help='File to import')
51         group.add_argument('--drop', action='store_true',
52                            help='Drop tables needed for updates, making the database readonly')
53         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
54                            help='Size of cache to be used by osm2pgsql (in MB)')
55         group.add_argument('--no-analyse', action='store_true',
56                            help='Do not perform analyse operations during index')
57         group.add_argument('--ignore-errors', action='store_true',
58                            help="Ignore certain erros on import.")
59         group.add_argument('--reverse-only', action='store_true',
60                            help='Do not create search tables and indexes')
61         group.add_argument('--tiger-data', metavar='FILE',
62                            help='File to import')
63
64     @staticmethod
65     def run(args):
66         from ..tools import database_import, tiger_data
67         from ..tools import refresh
68
69         if args.create_db:
70             LOG.warning('Create DB')
71             database_import.create_db(args.config.get_libpq_dsn())
72
73         if args.setup_db:
74             LOG.warning('Setup DB')
75
76             with connect(args.config.get_libpq_dsn()) as conn:
77                 database_import.setup_extensions(conn)
78                 database_import.install_module(args.module_dir, args.project_dir,
79                                                args.config.DATABASE_MODULE_PATH,
80                                                conn=conn)
81
82             database_import.import_base_data(args.config.get_libpq_dsn(),
83                                              args.data_dir, args.no_partitions)
84
85         if args.import_data:
86             LOG.warning('Import data')
87             if not args.osm_file:
88                 raise UsageError('Missing required --osm-file argument')
89             database_import.import_osm_data(Path(args.osm_file),
90                                             args.osm2pgsql_options(0, 1),
91                                             drop=args.drop,
92                                             ignore_errors=args.ignore_errors)
93
94         if args.create_tables:
95             LOG.warning('Create Tables')
96             with connect(args.config.get_libpq_dsn()) as conn:
97                 database_import.create_tables(conn, args.config, args.sqllib_dir, args.reverse_only)
98                 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
99                 refresh.create_functions(conn, args.config, args.sqllib_dir,
100                                          enable_diff_updates=False)
101                 database_import.create_table_triggers(conn, args.config, args.sqllib_dir)
102
103         if args.create_partition_tables:
104             LOG.warning('Create Partition Tables')
105             with connect(args.config.get_libpq_dsn()) as conn:
106                 database_import.create_partition_tables(conn, args.config, args.sqllib_dir)
107
108         if args.load_data:
109             LOG.warning('Load data')
110             with connect(args.config.get_libpq_dsn()) as conn:
111                 database_import.truncate_data_tables(conn, args.config.MAX_WORD_FREQUENCY)
112             database_import.load_data(args.config.get_libpq_dsn(),
113                                       args.data_dir,
114                                       args.threads or 1)
115
116             with connect(args.config.get_libpq_dsn()) as conn:
117                 try:
118                     status.set_status(conn, status.compute_database_date(conn))
119                 except Exception as exc: # pylint: disable=broad-except
120                     LOG.error('Cannot determine date of database: %s', exc)
121
122         if args.index:
123             LOG.warning('Indexing')
124             from ..indexer.indexer import Indexer
125             indexer = Indexer(args.config.get_libpq_dsn(), args.threads or 1)
126             indexer.index_full()
127
128         if args.create_search_indices:
129             LOG.warning('Create Search indices')
130             with connect(args.config.get_libpq_dsn()) as conn:
131                 database_import.create_search_indices(conn, args.config, args.sqllib_dir, args.drop)
132         
133         if args.tiger_data:
134             LOG.warning('Tiger data')
135             tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
136                                       args.tiger_data,
137                                       args.threads or 1,
138                                       args.config,
139                                       args.sqllib_dir)