]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/refresh.py
fbc23350c7212478f8c8b0c9fa484ec154f9f070
[nominatim.git] / nominatim / clicmd / refresh.py
1 """
2 Implementation of 'refresh' subcommand.
3 """
4 import logging
5 from pathlib import Path
6
7 from nominatim.db.connection import connect
8
9 # Do not repeat documentation of subcommand classes.
10 # pylint: disable=C0111
11 # Using non-top-level imports to avoid eventually unused imports.
12 # pylint: disable=E0012,C0415
13
14 LOG = logging.getLogger()
15
16 class UpdateRefresh:
17     """\
18     Recompute auxiliary data used by the indexing process.
19
20     These functions must not be run in parallel with other update commands.
21     """
22     def __init__(self):
23         self.tokenizer = None
24
25     @staticmethod
26     def add_args(parser):
27         group = parser.add_argument_group('Data arguments')
28         group.add_argument('--postcodes', action='store_true',
29                            help='Update postcode centroid table')
30         group.add_argument('--word-counts', action='store_true',
31                            help='Compute frequency of full-word search terms')
32         group.add_argument('--address-levels', action='store_true',
33                            help='Reimport address level configuration')
34         group.add_argument('--functions', action='store_true',
35                            help='Update the PL/pgSQL functions in the database')
36         group.add_argument('--wiki-data', action='store_true',
37                            help='Update Wikipedia/data importance numbers.')
38         group.add_argument('--importance', action='store_true',
39                            help='Recompute place importances (expensive!)')
40         group.add_argument('--website', action='store_true',
41                            help='Refresh the directory that serves the scripts for the web API')
42         group = parser.add_argument_group('Arguments for function refresh')
43         group.add_argument('--no-diff-updates', action='store_false', dest='diffs',
44                            help='Do not enable code for propagating updates')
45         group.add_argument('--enable-debug-statements', action='store_true',
46                            help='Enable debug warning statements in functions')
47
48
49     def run(self, args):
50         from ..tools import refresh, postcodes
51         from ..indexer.indexer import Indexer
52
53
54         if args.postcodes:
55             if postcodes.can_compute(args.config.get_libpq_dsn()):
56                 LOG.warning("Update postcodes centroid")
57                 tokenizer = self._get_tokenizer(args.config)
58                 postcodes.update_postcodes(args.config.get_libpq_dsn(),
59                                            args.project_dir, tokenizer)
60                 indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
61                                   args.threads or 1)
62                 indexer.index_postcodes()
63             else:
64                 LOG.error("The place table doesn\'t exist. " \
65                           "Postcode updates on a frozen database is not possible.")
66
67         if args.word_counts:
68             LOG.warning('Recompute frequency of full-word search terms')
69             refresh.recompute_word_counts(args.config.get_libpq_dsn(), args.sqllib_dir)
70
71         if args.address_levels:
72             cfg = Path(args.config.ADDRESS_LEVEL_CONFIG)
73             LOG.warning('Updating address levels from %s', cfg)
74             with connect(args.config.get_libpq_dsn()) as conn:
75                 refresh.load_address_levels_from_file(conn, cfg)
76
77         if args.functions:
78             LOG.warning('Create functions')
79             with connect(args.config.get_libpq_dsn()) as conn:
80                 refresh.create_functions(conn, args.config,
81                                          args.diffs, args.enable_debug_statements)
82                 self._get_tokenizer(args.config).update_sql_functions(args.config)
83
84         if args.wiki_data:
85             data_path = Path(args.config.WIKIPEDIA_DATA_PATH
86                              or args.project_dir)
87             LOG.warning('Import wikipdia article importance from %s', data_path)
88             if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
89                                                  data_path) > 0:
90                 LOG.fatal('FATAL: Wikipedia importance dump file not found')
91                 return 1
92
93         # Attention: importance MUST come after wiki data import.
94         if args.importance:
95             LOG.warning('Update importance values for database')
96             with connect(args.config.get_libpq_dsn()) as conn:
97                 refresh.recompute_importance(conn)
98
99         if args.website:
100             webdir = args.project_dir / 'website'
101             LOG.warning('Setting up website directory at %s', webdir)
102             with connect(args.config.get_libpq_dsn()) as conn:
103                 refresh.setup_website(webdir, args.config, conn)
104
105         return 0
106
107
108     def _get_tokenizer(self, config):
109         if self.tokenizer is None:
110             from ..tokenizer import factory as tokenizer_factory
111
112             self.tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
113
114         return self.tokenizer