# Database Migrations
-This page describes database migrations necessary to update existing databases
-to newer versions of Nominatim.
+Since version 3.7.0 Nominatim offers automatic migrations. Please follow
+the following steps:
-SQL statements should be executed from the PostgreSQL commandline. Execute
-`psql nominatim` to enter command line mode.
+* stop any updates that are potentially running
+* update Nominatim to the nwer version
+* goto your project directory and run `nominatim admin --migrate`
+* (optionally) restart updates
-## 3.6.0 -> master
-
-### Status table contains now time zone information
+Below you find additional migrations and hints about other structural and
+breaking changes.
-The `import_status` table has been changed to include timezone information
-with the time stamp. You need to alter an existing table before running
-any replication functions with:
+!!! note
+ If you are migrating from a version <3.6, then you still have to follow
+ the manual migration steps up to 3.6.
-```sql
-ALTER TABLE import_status ALTER COLUMN lastimportdate TYPE timestamp with time zone;
-```
+## 3.6.0 -> master
### New location for data files
$aSplitResults = Result::splitResults($aResults);
Debug::printVar('Split results', $aSplitResults);
if ($iGroupLoop <= 4
- && reset($aSplitResults['head'])->iResultRank > 0) {
+ && reset($aSplitResults['head'])->iResultRank > 0
+ && $iGroupedRank !== array_key_last($aGroupedSearches)) {
// Haven't found an exact match for the query yet.
// Therefore add result from the next group level.
$aNextResults = $aSplitResults['head'];
// - increase score for finding it anywhere else (optimisation)
if (!$bLastToken) {
$oSearch->iSearchRank += 5;
+ $oSearch->iNamePhrase = -1;
}
$aNewSearches[] = $oSearch;
}
) {
$oSearch = clone $this;
$oSearch->iSearchRank++;
+ $oSearch->iNamePhrase = -1;
if (strlen($oSearchTerm->sPostcode) < 4) {
$oSearch->iSearchRank += 4 - strlen($oSearchTerm->sPostcode);
}
if (!$this->sHouseNumber && $this->iOperator != Operator::POSTCODE) {
$oSearch = clone $this;
$oSearch->iSearchRank++;
+ $oSearch->iNamePhrase = -1;
$oSearch->sHouseNumber = $oSearchTerm->sToken;
+ if ($this->iOperator != Operator::NONE) {
+ $oSearch->iSearchRank++;
+ }
// sanity check: if the housenumber is not mainly made
// up of numbers, add a penalty
if (preg_match('/\\d/', $oSearch->sHouseNumber) === 0
) {
if ($this->iOperator == Operator::NONE) {
$oSearch = clone $this;
- $oSearch->iSearchRank++;
+ $oSearch->iSearchRank += 2;
+ $oSearch->iNamePhrase = -1;
$iOp = $oSearchTerm->iOperator;
if ($iOp == Operator::NONE) {
$iOp = Operator::NEAR;
}
$oSearch->iSearchRank += 2;
+ } elseif (!$bFirstToken && !$bLastToken) {
+ $oSearch->iSearchRank += 2;
+ }
+ if ($this->sHouseNumber) {
+ $oSearch->iSearchRank++;
}
$oSearch->setPoiSearch(
if (!empty($this->aName) || !($bFirstPhrase || $sPhraseType == '')) {
if (($sPhraseType == '' || !$bFirstPhrase) && !$bHasPartial) {
$oSearch = clone $this;
+ $oSearch->iNamePhrase = -1;
$oSearch->iSearchRank += 3 * $oSearchTerm->iTermCount;
$oSearch->aAddress[$iWordID] = $iWordID;
$aNewSearches[] = $oSearch;
}
- } else {
+ } elseif (empty($this->aNameNonSearch)) {
$oSearch = clone $this;
$oSearch->iSearchRank++;
$oSearch->aName = array($iWordID => $iWordID);
if ((!$bStructuredPhrases || $iPhrase > 0)
&& (!empty($this->aName))
- && strpos($sToken, ' ') === false
) {
+ $oSearch = clone $this;
+ $oSearch->iSearchRank++;
+ if (preg_match('#^[0-9 ]+$#', $sToken)) {
+ $oSearch->iSearchRank++;
+ }
if ($oSearchTerm->iSearchNameCount < CONST_Max_Word_Frequency) {
- $oSearch = clone $this;
- $oSearch->iSearchRank += $oSearchTerm->iTermCount + 1;
- if (empty($this->aName)) {
- $oSearch->iSearchRank++;
- }
- if (preg_match('#^[0-9]+$#', $sToken)) {
- $oSearch->iSearchRank++;
- }
$oSearch->aAddress[$iWordID] = $iWordID;
- $aNewSearches[] = $oSearch;
} else {
- $oSearch = clone $this;
- $oSearch->iSearchRank += $oSearchTerm->iTermCount + 1;
$oSearch->aAddressNonSearch[$iWordID] = $iWordID;
if (!empty($aFullTokens)) {
$oSearch->iSearchRank++;
}
- $aNewSearches[] = $oSearch;
-
- // revert to the token version?
- foreach ($aFullTokens as $oSearchTermToken) {
- if (is_a($oSearchTermToken, '\Nominatim\Token\Word')) {
- $oSearch = clone $this;
- $oSearch->iSearchRank += 3;
- $oSearch->aAddress[$oSearchTermToken->iId]
- = $oSearchTermToken->iId;
- $aNewSearches[] = $oSearch;
- }
- }
}
+ $aNewSearches[] = $oSearch;
}
if ((!$this->sPostcode && !$this->aAddress && !$this->aAddressNonSearch)
- && (empty($this->aName) || $this->iNamePhrase == $iPhrase)
+ && ((empty($this->aName) && empty($this->aNameNonSearch)) || $this->iNamePhrase == $iPhrase)
+ && strpos($sToken, ' ') === false
) {
$oSearch = clone $this;
- $oSearch->iSearchRank += 2;
- if (empty($this->aName)) {
- $oSearch->iSearchRank += 1;
+ $oSearch->iSearchRank++;
+ if (empty($this->aName) && empty($this->aNameNonSearch)) {
+ $oSearch->iSearchRank++;
}
- if (preg_match('#^[0-9]+$#', $sToken)) {
- $oSearch->iSearchRank += 2;
+ if (preg_match('#^[0-9 ]+$#', $sToken)) {
+ $oSearch->iSearchRank++;
}
if ($oSearchTerm->iSearchNameCount < CONST_Max_Word_Frequency) {
if (empty($this->aName)
}
$oSearch->aName[$iWordID] = $iWordID;
} else {
+ if (!empty($aFullTokens)) {
+ $oSearch->iSearchRank++;
+ }
$oSearch->aNameNonSearch[$iWordID] = $iWordID;
}
$oSearch->iNamePhrase = $iPhrase;
if ($aCMDResult['import-tiger-data']) {
$bDidSomething = true;
$sTigerPath = getSetting('TIGER_DATA_PATH', CONST_InstallDir.'/tiger');
- $oSetup->importTigerData($sTigerPath);
+ run((clone($oNominatimCmd))->addParams('transition', '--tiger-data', $sTigerPath));
}
if ($aCMDResult['calculate-postcodes'] || $aCMDResult['all']) {
}
}
- public function importTigerData($sTigerPath)
- {
- info('Import Tiger data');
-
- $aFilenames = glob($sTigerPath.'/*.sql');
- info('Found '.count($aFilenames).' SQL files in path '.$sTigerPath);
- if (empty($aFilenames)) {
- warn('Tiger data import selected but no files found in path '.$sTigerPath);
- return;
- }
- $sTemplate = file_get_contents(CONST_SqlDir.'/tiger_import_start.sql');
- $sTemplate = $this->replaceSqlPatterns($sTemplate);
-
- $this->pgsqlRunScript($sTemplate, false);
-
- $aDBInstances = array();
- for ($i = 0; $i < $this->iInstances; $i++) {
- // https://secure.php.net/manual/en/function.pg-connect.php
- $DSN = getSetting('DATABASE_DSN');
- $DSN = preg_replace('/^pgsql:/', '', $DSN);
- $DSN = preg_replace('/;/', ' ', $DSN);
- $aDBInstances[$i] = pg_connect($DSN, PGSQL_CONNECT_FORCE_NEW | PGSQL_CONNECT_ASYNC);
- pg_ping($aDBInstances[$i]);
- }
-
- foreach ($aFilenames as $sFile) {
- echo $sFile.': ';
- $hFile = fopen($sFile, 'r');
- $sSQL = fgets($hFile, 100000);
- $iLines = 0;
- while (true) {
- for ($i = 0; $i < $this->iInstances; $i++) {
- if (!pg_connection_busy($aDBInstances[$i])) {
- while (pg_get_result($aDBInstances[$i]));
- $sSQL = fgets($hFile, 100000);
- if (!$sSQL) break 2;
- if (!pg_send_query($aDBInstances[$i], $sSQL)) fail(pg_last_error($aDBInstances[$i]));
- $iLines++;
- if ($iLines == 1000) {
- echo '.';
- $iLines = 0;
- }
- }
- }
- usleep(10);
- }
- fclose($hFile);
-
- $bAnyBusy = true;
- while ($bAnyBusy) {
- $bAnyBusy = false;
- for ($i = 0; $i < $this->iInstances; $i++) {
- if (pg_connection_busy($aDBInstances[$i])) $bAnyBusy = true;
- }
- usleep(10);
- }
- echo "\n";
- }
-
- for ($i = 0; $i < $this->iInstances; $i++) {
- pg_close($aDBInstances[$i]);
- }
-
- info('Creating indexes on Tiger data');
- $sTemplate = file_get_contents(CONST_SqlDir.'/tiger_import_finish.sql');
- $sTemplate = $this->replaceSqlPatterns($sTemplate);
-
- $this->pgsqlRunScript($sTemplate, false);
- }
-
public function calculatePostcodes($bCMDResultAll)
{
info('Calculate Postcodes');
--index only on parent_place_id
-CREATE INDEX idx_location_property_tiger_parent_place_id_imp ON location_property_tiger_import (parent_place_id) {ts:aux-index};
-CREATE UNIQUE INDEX idx_location_property_tiger_place_id_imp ON location_property_tiger_import (place_id) {ts:aux-index};
+CREATE INDEX {{sql.if_index_not_exists}} idx_location_property_tiger_place_id_imp
+ ON location_property_tiger_import (parent_place_id) {{db.tablespace.aux_index}};
+CREATE UNIQUE INDEX {{sql.if_index_not_exists}} idx_location_property_tiger_place_id_imp
+ ON location_property_tiger_import (place_id) {{db.tablespace.aux_index}};
-GRANT SELECT ON location_property_tiger_import TO "{www-user}";
+GRANT SELECT ON location_property_tiger_import TO "{{config.DATABASE_WEBUSER}}";
DROP TABLE IF EXISTS location_property_tiger;
ALTER TABLE location_property_tiger_import RENAME TO location_property_tiger;
-ALTER INDEX idx_location_property_tiger_parent_place_id_imp RENAME TO idx_location_property_tiger_housenumber_parent_place_id;
-ALTER INDEX idx_location_property_tiger_place_id_imp RENAME TO idx_location_property_tiger_place_id;
+ALTER INDEX IF EXISTS idx_location_property_tiger_parent_place_id_imp RENAME TO idx_location_property_tiger_housenumber_parent_place_id;
+ALTER INDEX IF EXISTS idx_location_property_tiger_place_id_imp RENAME TO idx_location_property_tiger_place_id;
DROP FUNCTION tiger_line_import (linegeo geometry, in_startnumber integer, in_endnumber integer, interpolationtype text, in_street text, in_isin text, in_postcode text);
from .errors import UsageError
from . import clicmd
from .clicmd.args import NominatimArgs
+from .tools import tiger_data
LOG = logging.getLogger()
@staticmethod
def run(args):
if args.tiger_data:
- os.environ['NOMINATIM_TIGER_DATA_PATH'] = args.tiger_data
- return run_legacy_script('setup.php', '--import-tiger-data', nominatim_env=args)
+ return tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
+ args.tiger_data,
+ args.threads or 1,
+ args.config,
+ args.sqllib_dir)
params = ['update.php']
if args.file:
@staticmethod
def add_args(parser):
- group = parser.add_argument_group('Admin task arguments')
- group.add_argument('--warm', action='store_true',
- help='Warm database caches for search and reverse queries.')
- group.add_argument('--check-database', action='store_true',
- help='Check that the database is complete and operational.')
- group.add_argument('--analyse-indexing', action='store_true',
- help='Print performance analysis of the indexing process.')
+ group = parser.add_argument_group('Admin tasks')
+ objs = group.add_mutually_exclusive_group(required=True)
+ objs.add_argument('--warm', action='store_true',
+ help='Warm database caches for search and reverse queries.')
+ objs.add_argument('--check-database', action='store_true',
+ help='Check that the database is complete and operational.')
+ objs.add_argument('--migrate', action='store_true',
+ help='Migrate the database to a new software version.')
+ objs.add_argument('--analyse-indexing', action='store_true',
+ help='Print performance analysis of the indexing process.')
group = parser.add_argument_group('Arguments for cache warming')
group.add_argument('--search-only', action='store_const', dest='target',
const='search',
@staticmethod
def run(args):
if args.warm:
- AdminFuncs._warm(args)
+ return AdminFuncs._warm(args)
if args.check_database:
LOG.warning('Checking database')
from ..tools import admin
with connect(args.config.get_libpq_dsn()) as conn:
admin.analyse_indexing(conn, osm_id=args.osm_id, place_id=args.place_id)
+ return 0
- return 0
+ if args.migrate:
+ LOG.warning('Checking for necessary database migrations')
+ from ..tools import migration
+ return migration.migrate(args.config, args)
+
+ return 1
@staticmethod
help="Ignore certain erros on import.")
group.add_argument('--reverse-only', action='store_true',
help='Do not create search tables and indexes')
+ group.add_argument('--tiger-data', metavar='FILE',
+ help='File to import')
@staticmethod
def run(args):
- from ..tools import database_import
+ from ..tools import database_import, tiger_data
from ..tools import refresh
if args.create_db:
LOG.warning('Create Search indices')
with connect(args.config.get_libpq_dsn()) as conn:
database_import.create_search_indices(conn, args.config, args.sqllib_dir, args.drop)
+
+ if args.tiger_data:
+ LOG.warning('Tiger data')
+ tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
+ args.tiger_data,
+ args.threads or 1,
+ args.config,
+ args.sqllib_dir)
--- /dev/null
+"""
+Functions for database migration to newer software versions.
+"""
+import logging
+
+from ..db import properties
+from ..db.connection import connect
+from ..version import NOMINATIM_VERSION
+from . import refresh, database_import
+from ..errors import UsageError
+
+LOG = logging.getLogger()
+
+_MIGRATION_FUNCTIONS = []
+
+def migrate(config, paths):
+ """ Check for the current database version and execute migrations,
+ if necesssary.
+ """
+ with connect(config.get_libpq_dsn()) as conn:
+ if conn.table_exists('nominatim_properties'):
+ db_version_str = properties.get_property(conn, 'database_version')
+ else:
+ db_version_str = None
+
+ if db_version_str is not None:
+ parts = db_version_str.split('.')
+ db_version = tuple([int(x) for x in parts[:2] + parts[2].split('-')])
+
+ if db_version == NOMINATIM_VERSION:
+ LOG.warning("Database already at latest version (%s)", db_version_str)
+ return 0
+
+ LOG.info("Detected database version: %s", db_version_str)
+ else:
+ db_version = _guess_version(conn)
+
+
+ has_run_migration = False
+ for version, func in _MIGRATION_FUNCTIONS:
+ if db_version <= version:
+ LOG.warning("Runnning: %s (%s)", func.__doc__.split('\n', 1)[0],
+ '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(version))
+ kwargs = dict(conn=conn, config=config, paths=paths)
+ func(**kwargs)
+ has_run_migration = True
+
+ if has_run_migration:
+ LOG.warning('Updating SQL functions.')
+ refresh.create_functions(conn, config, paths.sqllib_dir)
+
+ properties.set_property(conn, 'database_version',
+ '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
+
+ conn.commit()
+
+ return 0
+
+
+def _guess_version(conn):
+ """ Guess a database version when there is no property table yet.
+ Only migrations for 3.6 and later are supported, so bail out
+ when the version seems older.
+ """
+ with conn.cursor() as cur:
+ # In version 3.6, the country_name table was updated. Check for that.
+ cnt = cur.scalar("""SELECT count(*) FROM
+ (SELECT svals(name) FROM country_name
+ WHERE country_code = 'gb')x;
+ """)
+ if cnt < 100:
+ LOG.fatal('It looks like your database was imported with a version '
+ 'prior to 3.6.0. Automatic migration not possible.')
+ raise UsageError('Migration not possible.')
+
+ return (3, 5, 0, 99)
+
+
+
+def _migration(major, minor, patch=0, dbpatch=0):
+ """ Decorator for a single migration step. The parameters describe the
+ version after which the migration is applicable, i.e before changing
+ from the given version to the next, the migration is required.
+
+ All migrations are run in the order in which they are defined in this
+ file. Do not run global SQL scripts for migrations as you cannot be sure
+ that these scripts do the same in later versions.
+
+ Functions will always be reimported in full at the end of the migration
+ process, so the migration functions may leave a temporary state behind
+ there.
+ """
+ def decorator(func):
+ _MIGRATION_FUNCTIONS.append(((major, minor, patch, dbpatch), func))
+
+ return decorator
+
+
+@_migration(3, 5, 0, 99)
+def import_status_timestamp_change(conn, **_):
+ """ Add timezone to timestamp in status table.
+
+ The import_status table has been changed to include timezone information
+ with the time stamp.
+ """
+ with conn.cursor() as cur:
+ cur.execute("""ALTER TABLE import_status ALTER COLUMN lastimportdate
+ TYPE timestamp with time zone;""")
+
+
+@_migration(3, 5, 0, 99)
+def install_database_module_in_project_directory(conn, config, paths, **_):
+ """ Install database module in project directory.
+
+ The database module needs to be present in the project directory
+ since those were introduced.
+ """
+ database_import.install_module(paths.module_dir, paths.project_dir,
+ config.DATABASE_MODULE_PATH, conn=conn)
+
+
+@_migration(3, 5, 0, 99)
+def add_nominatim_property_table(conn, config, **_):
+ """ Add nominatim_property table.
+ """
+ if not conn.table_exists('nominatim_properties'):
+ with conn.cursor() as cur:
+ cur.execute("""CREATE TABLE nominatim_properties (
+ property TEXT,
+ value TEXT);
+ GRANT SELECT ON TABLE nominatim_properties TO "{}";
+ """.format(config.DATABASE_WEBUSER))
--- /dev/null
+"""
+Functions for importing tiger data and handling tarbar and directory files
+"""
+import logging
+import os
+import tarfile
+import selectors
+
+from ..db.connection import connect
+from ..db.async_connection import DBConnection
+from ..db.sql_preprocessor import SQLPreprocessor
+
+
+LOG = logging.getLogger()
+
+
+def handle_tarfile_or_directory(data_dir):
+ """ Handles tarfile or directory for importing tiger data
+ """
+
+ tar = None
+ if data_dir.endswith('.tar.gz'):
+ tar = tarfile.open(data_dir)
+ sql_files = [i for i in tar.getmembers() if i.name.endswith('.sql')]
+ LOG.warning("Found %d SQL files in tarfile with path %s", len(sql_files), data_dir)
+ if not sql_files:
+ LOG.warning("Tiger data import selected but no files in tarfile's path %s", data_dir)
+ return None, None
+ else:
+ files = os.listdir(data_dir)
+ sql_files = [os.path.join(data_dir, i) for i in files if i.endswith('.sql')]
+ LOG.warning("Found %d SQL files in path %s", len(sql_files), data_dir)
+ if not sql_files:
+ LOG.warning("Tiger data import selected but no files found in path %s", data_dir)
+ return None, None
+
+ return sql_files, tar
+
+
+def handle_threaded_sql_statements(sel, file):
+ """ Handles sql statement with multiplexing
+ """
+
+ lines = 0
+ end_of_file = False
+ # Using pool of database connections to execute sql statements
+ while not end_of_file:
+ for key, _ in sel.select(1):
+ conn = key.data
+ try:
+ if conn.is_done():
+ sql_query = file.readline()
+ lines += 1
+ if not sql_query:
+ end_of_file = True
+ break
+ conn.perform(sql_query)
+ if lines == 1000:
+ print('. ', end='', flush=True)
+ lines = 0
+ except Exception as exc: # pylint: disable=broad-except
+ LOG.info('Wrong SQL statement: %s', exc)
+
+
+def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
+ """ Import tiger data from directory or tar file
+ """
+
+ sql_files, tar = handle_tarfile_or_directory(data_dir)
+
+ if not sql_files:
+ return
+
+ with connect(dsn) as conn:
+ sql = SQLPreprocessor(conn, config, sqllib_dir)
+ sql.run_sql_file(conn, 'tiger_import_start.sql')
+
+ # Reading sql_files and then for each file line handling
+ # sql_query in <threads - 1> chunks.
+ sel = selectors.DefaultSelector()
+ place_threads = max(1, threads - 1)
+
+ # Creates a pool of database connections
+ for _ in range(place_threads):
+ conn = DBConnection(dsn)
+ conn.connect()
+ sel.register(conn, selectors.EVENT_WRITE, conn)
+
+ for sql_file in sql_files:
+ if not tar:
+ file = open(sql_file)
+ else:
+ file = tar.extractfile(sql_file)
+
+ handle_threaded_sql_statements(sel, file)
+
+ # Unregistering pool of database connections
+ while place_threads > 0:
+ for key, _ in sel.select(1):
+ conn = key.data
+ sel.unregister(conn)
+ conn.wait()
+ conn.close()
+ place_threads -= 1
+
+ if tar:
+ tar.close()
+ print('\n')
+ LOG.warning("Creating indexes on Tiger data")
+ with connect(dsn) as conn:
+ sql = SQLPreprocessor(conn, config, sqllib_dir)
+ sql.run_sql_file(conn, 'tiger_import_finish.sql')
from nominatim.config import Configuration
from nominatim.db import connection
+from nominatim.db.sql_preprocessor import SQLPreprocessor
class _TestingCursor(psycopg2.extras.DictCursor):
""" Extension to the DictCursor class that provides execution
flatnode_file='',
tablespaces=dict(slim_data='', slim_index='',
main_data='', main_index=''))
+
+@pytest.fixture
+def sql_preprocessor(temp_db_conn, tmp_path, def_config, monkeypatch, table_factory):
+ monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', '.')
+ table_factory('country_name', 'partition INT', (0, 1, 2))
+ return SQLPreprocessor(temp_db_conn, def_config, tmp_path)
@pytest.mark.parametrize("command,script", [
(('special-phrases',), 'specialphrases'),
- (('add-data', '--tiger-data', 'tiger'), 'setup'),
(('add-data', '--file', 'foo.osm'), 'update'),
(('export',), 'export')
])
import pytest
-from nominatim.db.sql_preprocessor import SQLPreprocessor
-
@pytest.fixture
def sql_factory(tmp_path):
def _mk_sql(sql_body):
return _mk_sql
-
-@pytest.fixture
-def sql_preprocessor(temp_db_conn, tmp_path, def_config, monkeypatch, table_factory):
- monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', '.')
- table_factory('country_name', 'partition INT', (0, 1, 2))
- return SQLPreprocessor(temp_db_conn, def_config, tmp_path)
-
@pytest.mark.parametrize("expr,ret", [
("'a'", 'a'),
("'{{db.partitions|join}}'", '012'),
--- /dev/null
+"""
+Test for tiger data function
+"""
+from pathlib import Path
+
+import pytest
+import tarfile
+
+from nominatim.tools import tiger_data, database_import
+
+
+@pytest.mark.parametrize("threads", (1, 5))
+def test_add_tiger_data(dsn, src_dir, def_config, tmp_path, sql_preprocessor,
+ temp_db_cursor, threads, temp_db):
+ temp_db_cursor.execute('CREATE EXTENSION hstore')
+ temp_db_cursor.execute('CREATE EXTENSION postgis')
+ temp_db_cursor.execute('CREATE TABLE place (id INT)')
+ sqlfile = tmp_path / '1010.sql'
+ sqlfile.write_text("""INSERT INTO place values (1)""")
+ tiger_data.add_tiger_data(dsn, str(tmp_path), threads, def_config, src_dir / 'lib-sql')
+
+ assert temp_db_cursor.table_rows('place') == 1
+
+@pytest.mark.parametrize("threads", (1, 5))
+def test_add_tiger_data_tarfile(dsn, src_dir, def_config, tmp_path,
+ temp_db_cursor, threads, temp_db, sql_preprocessor):
+ temp_db_cursor.execute('CREATE EXTENSION hstore')
+ temp_db_cursor.execute('CREATE EXTENSION postgis')
+ temp_db_cursor.execute('CREATE TABLE place (id INT)')
+ sqlfile = tmp_path / '1010.sql'
+ sqlfile.write_text("""INSERT INTO place values (1)""")
+ tar = tarfile.open("sample.tar.gz", "w:gz")
+ tar.add(sqlfile)
+ tar.close()
+ tiger_data.add_tiger_data(dsn, str(src_dir / 'sample.tar.gz'), threads, def_config, src_dir / 'lib-sql')
+
+ assert temp_db_cursor.table_rows('place') == 1
\ No newline at end of file