]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/replication.py
convert connect() into a context manager
[nominatim.git] / nominatim / clicmd / replication.py
1 """
2 Implementation of the 'replication' sub-command.
3 """
4 import datetime as dt
5 import logging
6 import socket
7 import time
8
9 from ..db import status
10 from ..db.connection import connect
11 from ..errors import UsageError
12
13 LOG = logging.getLogger()
14
15 # Do not repeat documentation of subcommand classes.
16 # pylint: disable=C0111
17 # Using non-top-level imports to make pyosmium optional for replication only.
18 # pylint: disable=E0012,C0415
19
20 def _osm2pgsql_options_from_args(args, default_cache, default_threads):
21     """ Set up the standard osm2pgsql from the command line arguments.
22     """
23     return dict(osm2pgsql=args.osm2pgsql_path,
24                 osm2pgsql_cache=args.osm2pgsql_cache or default_cache,
25                 osm2pgsql_style=args.config.get_import_style_file(),
26                 threads=args.threads or default_threads,
27                 dsn=args.config.get_libpq_dsn(),
28                 flatnode_file=args.config.FLATNODE_FILE)
29
30
31 class UpdateReplication:
32     """\
33     Update the database using an online replication service.
34     """
35
36     @staticmethod
37     def add_args(parser):
38         group = parser.add_argument_group('Arguments for initialisation')
39         group.add_argument('--init', action='store_true',
40                            help='Initialise the update process')
41         group.add_argument('--no-update-functions', dest='update_functions',
42                            action='store_false',
43                            help="""Do not update the trigger function to
44                                    support differential updates.""")
45         group = parser.add_argument_group('Arguments for updates')
46         group.add_argument('--check-for-updates', action='store_true',
47                            help='Check if new updates are available and exit')
48         group.add_argument('--once', action='store_true',
49                            help="""Download and apply updates only once. When
50                                    not set, updates are continuously applied""")
51         group.add_argument('--no-index', action='store_false', dest='do_index',
52                            help="""Do not index the new data. Only applicable
53                                    together with --once""")
54         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
55                            help='Size of cache to be used by osm2pgsql (in MB)')
56         group = parser.add_argument_group('Download parameters')
57         group.add_argument('--socket-timeout', dest='socket_timeout', type=int, default=60,
58                            help='Set timeout for file downloads.')
59
60     @staticmethod
61     def _init_replication(args):
62         from ..tools import replication, refresh
63
64         LOG.warning("Initialising replication updates")
65         with connect(args.config.get_libpq_dsn()) as conn:
66             replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
67             if args.update_functions:
68                 LOG.warning("Create functions")
69                 refresh.create_functions(conn, args.config, args.sqllib_dir,
70                                          True, False)
71         return 0
72
73
74     @staticmethod
75     def _check_for_updates(args):
76         from ..tools import replication
77
78         with connect(args.config.get_libpq_dsn()) as conn:
79             return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL)
80
81     @staticmethod
82     def _report_update(batchdate, start_import, start_index):
83         def round_time(delta):
84             return dt.timedelta(seconds=int(delta.total_seconds()))
85
86         end = dt.datetime.now(dt.timezone.utc)
87         LOG.warning("Update completed. Import: %s. %sTotal: %s. Remaining backlog: %s.",
88                     round_time((start_index or end) - start_import),
89                     "Indexing: {} ".format(round_time(end - start_index))
90                     if start_index else '',
91                     round_time(end - start_import),
92                     round_time(end - batchdate))
93
94     @staticmethod
95     def _update(args):
96         from ..tools import replication
97         from ..indexer.indexer import Indexer
98
99         params = _osm2pgsql_options_from_args(args, 2000, 1)
100         params.update(base_url=args.config.REPLICATION_URL,
101                       update_interval=args.config.get_int('REPLICATION_UPDATE_INTERVAL'),
102                       import_file=args.project_dir / 'osmosischange.osc',
103                       max_diff_size=args.config.get_int('REPLICATION_MAX_DIFF'),
104                       indexed_only=not args.once)
105
106         # Sanity check to not overwhelm the Geofabrik servers.
107         if 'download.geofabrik.de'in params['base_url']\
108            and params['update_interval'] < 86400:
109             LOG.fatal("Update interval too low for download.geofabrik.de.\n"
110                       "Please check install documentation "
111                       "(https://nominatim.org/release-docs/latest/admin/Import-and-Update#"
112                       "setting-up-the-update-process).")
113             raise UsageError("Invalid replication update interval setting.")
114
115         if not args.once:
116             if not args.do_index:
117                 LOG.fatal("Indexing cannot be disabled when running updates continuously.")
118                 raise UsageError("Bad argument '--no-index'.")
119             recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
120
121         while True:
122             with connect(args.config.get_libpq_dsn()) as conn:
123                 start = dt.datetime.now(dt.timezone.utc)
124                 state = replication.update(conn, params)
125                 if state is not replication.UpdateState.NO_CHANGES:
126                     status.log_status(conn, start, 'import')
127                 batchdate, _, _ = status.get_status(conn)
128
129             if state is not replication.UpdateState.NO_CHANGES and args.do_index:
130                 index_start = dt.datetime.now(dt.timezone.utc)
131                 indexer = Indexer(args.config.get_libpq_dsn(),
132                                   args.threads or 1)
133                 indexer.index_boundaries(0, 30)
134                 indexer.index_by_rank(0, 30)
135
136                 with connect(args.config.get_libpq_dsn()) as conn:
137                     status.set_indexed(conn, True)
138                     status.log_status(conn, index_start, 'index')
139             else:
140                 index_start = None
141
142             if LOG.isEnabledFor(logging.WARNING):
143                 UpdateReplication._report_update(batchdate, start, index_start)
144
145             if args.once:
146                 break
147
148             if state is replication.UpdateState.NO_CHANGES:
149                 LOG.warning("No new changes. Sleeping for %d sec.", recheck_interval)
150                 time.sleep(recheck_interval)
151
152
153     @staticmethod
154     def run(args):
155         socket.setdefaulttimeout(args.socket_timeout)
156
157         if args.init:
158             return UpdateReplication._init_replication(args)
159
160         if args.check_for_updates:
161             return UpdateReplication._check_for_updates(args)
162
163         UpdateReplication._update(args)
164         return 0