5 def create(dsn, data_dir):
6 """ Create a new instance of the tokenizer provided by this module.
8 return DummyTokenizer(dsn, data_dir)
12 def __init__(self, dsn, data_dir):
14 self.data_dir = data_dir
15 self.init_state = None
16 self.analyser_cache = {}
19 def init_new_db(self, *args, **kwargs):
20 assert self.init_state is None
21 self.init_state = "new"
24 def init_from_project(self):
25 assert self.init_state is None
26 self.init_state = "loaded"
30 def finalize_import(_):
34 def name_analyzer(self):
35 return DummyNameAnalyzer(self.analyser_cache)
38 class DummyNameAnalyzer:
43 def __exit__(self, exc_type, exc_value, traceback):
47 def __init__(self, cache):
48 self.analyser_cache = cache
49 cache['countries'] = []
56 def normalize_postcode(postcode):
60 def update_postcodes_from_db():
63 def update_special_phrases(self, phrases, should_replace):
64 self.analyser_cache['special_phrases'] = phrases
66 def add_country_names(self, code, names):
67 self.analyser_cache['countries'].append((code, names))
70 def process_place(place):