X-Git-Url: https://git.openstreetmap.org/chef.git/blobdiff_plain/f5586151809fd197cf09e3330df4b3212fd089fc..b174082ec71a9a741054e4c42be6ca8e8dd0264c:/cookbooks/planet/files/default/replication-bin/replicate-changesets diff --git a/cookbooks/planet/files/default/replication-bin/replicate-changesets b/cookbooks/planet/files/default/replication-bin/replicate-changesets index 6175691cb..f158ef3da 100644 --- a/cookbooks/planet/files/default/replication-bin/replicate-changesets +++ b/cookbooks/planet/files/default/replication-bin/replicate-changesets @@ -18,7 +18,7 @@ GEO_SCALE = 10000000 ## # replace characters which cannot be represented in XML 1.0. def xml_sanitize(str) - str.gsub(/[\x00-\x08\x0b\x0c\x0e-\x20]/,'?') + str.gsub(/[\x00-\x08\x0b\x0c\x0e-\x1f]/, "?") end ## @@ -117,6 +117,25 @@ class ChangesetBuilder end end +## +# sync a file to guarantee it's on disk +def fsync(f) + File.open(f, &:fsync) +end + +## +# sync a directory to guarantee it's on disk. have to recurse to the root +# to guarantee sync for newly created directories. +def fdirsync(d) + while d != "/" + Dir.open(d) do |dh| + io = IO.for_fd(dh.fileno) + io.fsync + end + d = File.dirname(d) + end +end + ## # state and connections associated with getting changeset data # replicated to a file. @@ -186,9 +205,11 @@ class Replicator fl.flock(File::LOCK_EX) sequence = (@state.key?("sequence") ? @state["sequence"] + 1 : 0) - data_file = @config["data_dir"] + format("/%03d/%03d/%03d.osm.gz", sequence / 1000000, (sequence / 1000) % 1000, (sequence % 1000)) + data_stem = @config["data_dir"] + format("/%03d/%03d/%03d", sequence / 1000000, (sequence / 1000) % 1000, (sequence % 1000)) + data_file = data_stem + ".osm.gz" + data_state_file = data_stem + ".state.txt" tmp_state = @config["state_file"] + ".tmp" - tmp_data = "/tmp/changeset_data.osm.tmp" + tmp_data = data_file + ".tmp" # try and write the files to tmp locations and then # move them into place later, to avoid in-progress # clashes, or people seeing incomplete files. @@ -202,15 +223,37 @@ class Replicator fh.write(YAML.dump(@state)) end + # fsync the files in their old locations. + fsync(tmp_data) + fsync(tmp_state) + + # sync the directory as well, to ensure that the file is reachable + # from the dirent and has been updated to account for any allocations. + fdirsync(File.dirname(tmp_data)) + fdirsync(File.dirname(tmp_state)) + # sanity check: the files we're moving into place # should be non-empty. - fail "Temporary gzip file should exist, but doesn't." unless File.exist?(tmp_data) - fail "Temporary state file should exist, but doesn't." unless File.exist?(tmp_state) - fail "Temporary gzip file should be non-empty, but isn't." if File.zero?(tmp_data) - fail "Temporary state file should be non-empty, but isn't." if File.zero?(tmp_state) + raise "Temporary gzip file should exist, but doesn't." unless File.exist?(tmp_data) + raise "Temporary state file should exist, but doesn't." unless File.exist?(tmp_state) + raise "Temporary gzip file should be non-empty, but isn't." if File.zero?(tmp_data) + raise "Temporary state file should be non-empty, but isn't." if File.zero?(tmp_state) FileUtils.mv(tmp_data, data_file) - FileUtils.mv(tmp_state, @config["state_file"]) + FileUtils.cp(tmp_state, @config["state_file"]) + FileUtils.mv(tmp_state, data_state_file) + + # fsync the files in their new locations, in case the inodes have + # changed in the move / copy. + fsync(data_fie) + fsync(@config["state_file"]) + fsync(data_state_file) + + # sync the directory as well, to ensure that the file is reachable + # from the dirent and has been updated to account for any allocations. + fdirsync(File.dirname(data_file)) + fdirsync(File.dirname(@config["state_file"])) + fl.flock(File::LOCK_UN) rescue