Class: S3backup::Manager
- Inherits:
-
Object
- Object
- S3backup::Manager
- Defined in:
- lib/s3backup/manager.rb
Constant Summary collapse
- DEFAULT_BUF_READ_SIZE =
1024*1024*32
Instance Method Summary collapse
- #delete_direcory(dir) ⇒ Object
- #differential_copy(dir) ⇒ Object
- #expand_tree(dir, tree_info, output_dir) ⇒ Object
- #from_gz(file) ⇒ Object
- #from_tgz(path, dir) ⇒ Object
- #get_chain(key, path) ⇒ Object
- #get_directories(dirs, prefix, output_dir) ⇒ Object
- #get_directory(dir, out_dir) ⇒ Object
- #get_target_tree(dir) ⇒ Object
-
#initialize(target, config) ⇒ Manager
constructor
A new instance of Manager.
- #restore(dir, output_dir) ⇒ Object
- #set_config(config) ⇒ Object
- #shell_name(str) ⇒ Object
- #store_directory(dir) ⇒ Object
- #to_gz(file, remain = false) ⇒ Object
-
#to_tgz(path, dir) ⇒ Object
指定されたディレクトリをtar gzip形式で圧縮する.
Constructor Details
#initialize(target, config) ⇒ Manager
Returns a new instance of Manager.
19 20 21 22 23 24 25 |
# File 'lib/s3backup/manager.rb', line 19 def initialize(target,config) @target = target @resume = false @temporary = "/tmp" @resume_counter = 0 set_config(config) end |
Instance Method Details
#delete_direcory(dir) ⇒ Object
185 186 187 188 189 190 191 192 193 |
# File 'lib/s3backup/manager.rb', line 185 def delete_direcory(dir) if @aes dir = @aes.encrypt(dir) end i=1 while @target.delete("#{i}_#{dir}") i+=1 end end |
#differential_copy(dir) ⇒ Object
194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 |
# File 'lib/s3backup/manager.rb', line 194 def differential_copy(dir) #現在のファイル・ツリーを比較 tree_info = TreeInfo.new({:format=>:directory,:directory=>dir,:db=>@temporary + "/new_" + Time.now.to_i.to_s + "_" + Process.pid.to_s + ".db"}) target_db_name = dir+".gz" #前回のファイル・ツリーを取得 data = @target.get(target_db_name) old_tree = nil if data db_name = @temporary + "/old_" + Time.now.to_i.to_s + "_" + Process.pid.to_s + ".db" File.open(db_name + ".gz","w") do |f| f.write(data) end from_gz(db_name + ".gz") old_tree = TreeInfo.new({:format=>:database,:db=>db_name}) else target_tree_name = "tree_"+dir+".yml" #以前のフォーマットだった場合は変換 data = @target.get(target_tree_name) if data old_tree = TreeInfo.new({:format=>:yaml,:data=>data,:db=>@temporary + "/old_" + Time.now.to_i.to_s + "_" + Process.pid.to_s + ".db"}) else old_tree = TreeInfo.new({:db=>@temporary + "/old_" + Time.now.to_i.to_s + "_" + Process.pid.to_s + ".db"}) end end data = nil; GC.start #前回と今回のファイル・ツリーを比較 tree_info.modify(old_tree) do |dir_info| S3log.debug("diff_info=#{dir_info[:name]}") #更新されたディレクトリをアップロード store_directory(dir_info[:name]) #前回のファイル・ツリー情報のうち、今回アップデートしたディレクトリ情報ファイル情報を更新 old_dir_map = old_tree.update_dir(dir_info) if @resume_counter >= 10 #更新したファイル・ツリー情報をアップロード(途中で失敗しても、resumeできるようにするため。) to_gz(old_tree.db_name,true) @target.post(target_db_name,File.read(old_tree.db_name + ".gz")) @resume_counter = 0 end GC.start end tree_info.remove(old_tree) do |dir_info| delete_direcory(dir_info[:name]) end #今回のファイル・ツリーをAWS S3に登録 to_gz(tree_info.db_name) @target.post(target_db_name,File.read(tree_info.db_name + ".gz")) tree_info.close(true) old_tree.close(true) end |
#expand_tree(dir, tree_info, output_dir) ⇒ Object
275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 |
# File 'lib/s3backup/manager.rb', line 275 def (dir,tree_info,output_dir) now = Time.new tree = tree_info.hierarchie(dir) top = tree[0].keys[0] top_dir = File.dirname(top) tmp_dir = CGI.escape(top_dir) output_dir = output_dir+"/"+tmp_dir FileUtils.mkdir_p(output_dir) tree.each do |node| get_directories(node.keys,top_dir,output_dir) end top_dir_len = top_dir.length (tree.length - 1).downto(0){|n| tree[n].each do |k,v| dir_len = k.length relative_path = k.slice(top_dir_len,dir_len - top_dir_len) dir = output_dir + relative_path File.utime(now,Time.at(v[:mtime]),dir) end } end |
#from_gz(file) ⇒ Object
72 73 74 75 76 77 78 79 80 |
# File 'lib/s3backup/manager.rb', line 72 def from_gz(file) cmd = "(cd #{shell_name(File.dirname(file))};gunzip #{shell_name(file)} > /dev/null 2>&1)" S3log.debug(cmd) system(cmd) unless $?.success? S3log.error("feiled #{cmd} execute. #{$?.inspect}") exit(-1) end end |
#from_tgz(path, dir) ⇒ Object
103 104 105 106 107 108 109 110 111 |
# File 'lib/s3backup/manager.rb', line 103 def from_tgz(path,dir) cmd = "tar -xzvf #{shell_name(path)} -C #{shell_name(dir)} > /dev/null 2>&1" S3log.info(cmd) system(cmd) unless $?.success? S3log.error("feiled #{cmd} execute. #{$?.inspect}") exit(-1) end end |
#get_chain(key, path) ⇒ Object
112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
# File 'lib/s3backup/manager.rb', line 112 def get_chain(key,path) data = nil i=1 if @aes key = @aes.encrypt(key) end File.open(path,"w") do |f| while 1 key_name = i.to_s()+"_"+key data = @target.get(key_name) if data == nil break end if @aes data = @aes.decrypt(data) end f.write(data) i+=1 end end end |
#get_directories(dirs, prefix, output_dir) ⇒ Object
140 141 142 143 144 145 146 147 148 149 |
# File 'lib/s3backup/manager.rb', line 140 def get_directories(dirs,prefix,output_dir) prefix_len = prefix.length dirs.each do |dir| parent = File.dirname(dir) p_len = parent.length relative_path = parent.slice(prefix_len,p_len - prefix_len) cur_dir = output_dir + relative_path get_directory(dir,cur_dir) end end |
#get_directory(dir, out_dir) ⇒ Object
133 134 135 136 137 138 139 |
# File 'lib/s3backup/manager.rb', line 133 def get_directory(dir,out_dir) file_name = @temporary + "/rs_#{Process.pid}.tgz" get_chain(dir,file_name) #tgzのファイルをcur_dirに展開 from_tgz(file_name,out_dir) File.unlink(file_name) end |
#get_target_tree(dir) ⇒ Object
247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 |
# File 'lib/s3backup/manager.rb', line 247 def get_target_tree(dir) base_dir = dir tree_data = nil before_base="" #バックアップしたディレクトリよりも下位のディレクトリが指定されることがあるため while 1 base = base_dir if base == before_base break end tree_db_name = base+".gz" tree_data = @target.get(tree_db_name) if tree_data break end before_base = base base_dir = File.dirname(base_dir) end unless tree_data return nil end db_name = @temporary + "/" + Time.now.to_i.to_s + "_" + Process.pid.to_s + ".db" File.open(db_name + ".gz","w") do |f| f.write(tree_data) end from_gz(db_name + ".gz") return TreeInfo.new({:format=>:database,:db=>db_name}) end |
#restore(dir, output_dir) ⇒ Object
296 297 298 299 300 301 302 303 304 |
# File 'lib/s3backup/manager.rb', line 296 def restore(dir,output_dir) tree = get_target_tree(dir) unless tree S3log.warn("#{dir} isn't find in AWS S3. ignore") return end (dir,tree,output_dir) tree.close(true) end |
#set_config(config) ⇒ Object
26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
# File 'lib/s3backup/manager.rb', line 26 def set_config(config) if config["password"] and config["password"] != "" unless config["salt"] S3log.error("salt doesn't exist in config file.\n") exit(-1) end unless config["salt"] =~ /[0-9A-Fa-f]{16}/ S3log.error("salt format shoud be HexString and length should be 16.\n") exit(-1) end @aes = Crypt.new(config["password"],config["salt"]) end if config["buffer_size"] if config["buffer_size"].class == String @buf_size = config["buffer_size"].to_i else @buf_size = config["buffer_size"] end if @buf_size > 1000*1000*1000*5 S3log.error("buffer_size must be less than 5G\n") exit(-1) end else @buf_size = DEFAULT_BUF_READ_SIZE end if config["temporary"] @temporary = config["temporary"] end if config["resume"] == true @resume = true end end |
#shell_name(str) ⇒ Object
7 8 9 10 11 12 13 14 15 16 17 18 |
# File 'lib/s3backup/manager.rb', line 7 def shell_name(str) str.gsub!(/[!#"$&'()*,:;<=>?\[\]\\^`{|}\s]/, '\\\\\&') a=[] str.each_byte{|i| if i < 0x80 a.push(sprintf("%c",i)) else a.push("'"+sprintf("%c",i) + "'") end } return a.join; end |
#store_directory(dir) ⇒ Object
150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 |
# File 'lib/s3backup/manager.rb', line 150 def store_directory(dir) tmp_file = @temporary + "/bk_#{Process.pid}" #tgzのファイルをtmp.pathに作成 to_tgz(tmp_file,dir) #S3にディレクトリの絶対パスをキーにして、圧縮したデータをストア i=1 key = nil if @aes key = @aes.encrypt(dir) else key = dir end #前回のバックアップデータ削除 cnt = 1 while @target.exists?(cnt.to_s() + "_" + key) @target.delete(cnt.to_s() + "_" + key) cnt+=1 end File.open(tmp_file,"r") do |f| begin while 1 key_name = i.to_s()+"_"+key data = f.readpartial(@buf_size) if @aes data = @aes.encrypt(data) end @target.post(key_name,data) i+=1 @resume_counter += 1 end rescue EOFError end end File.unlink(tmp_file) end |
#to_gz(file, remain = false) ⇒ Object
58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
# File 'lib/s3backup/manager.rb', line 58 def to_gz(file,remain=false) if remain cmd = "(cd #{shell_name(File.dirname(file))};gzip -c #{shell_name(file)} > #{shell_name(file)}.gz " + "2>/dev/null)" else cmd = "(cd #{shell_name(File.dirname(file))};gzip #{shell_name(file)} > /dev/null 2>&1)" end S3log.debug(cmd) system(cmd) unless $?.success? S3log.error("feiled #{cmd} execute. #{$?.inspect}") exit(-1) end end |
#to_tgz(path, dir) ⇒ Object
指定されたディレクトリをtar gzip形式で圧縮する
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 |
# File 'lib/s3backup/manager.rb', line 82 def to_tgz(path,dir) #サブディレクトリを圧縮の対象外にする。 sub_dir = [] Dir.foreach(dir) do |file| next if /^\.+$/ =~ file sub_dir.push(file) if File.directory?(dir+"/"+file) end exclude = "" if sub_dir.length != 0 exclude = " --exclude=#{shell_name(File.basename(dir))}/" + sub_dir.map{|d| shell_name(d)}.join( " --exclude=#{shell_name(File.basename(dir))}/") end cmd = "(cd #{shell_name(File.dirname(dir))};tar -czvf #{shell_name(path)} #{exclude} -- " + "#{shell_name(File.basename(dir))} > /dev/null 2>&1)" S3log.info(cmd) system(cmd) unless $?.success? S3log.error("feiled #{cmd} execute. #{$?.inspect}") exit(-1) end end |