Fix potential data loss if cleaning old files fails

This commit is contained in:
Jarno Seppänen 2014-05-14 22:45:03 +03:00
parent 079dcec131
commit ec5ba51f98
1 changed files with 2 additions and 2 deletions

View File

@ -191,8 +191,6 @@ class GpgFs(LoggingMixIn, Operations):
yield putx yield putx
# commit # commit
write_index(self.store, self.index_path, self.root) write_index(self.store, self.index_path, self.root)
if paths['old']:
self.store.delete(paths['old'])
except: except:
# rollback # rollback
try: try:
@ -204,6 +202,8 @@ class GpgFs(LoggingMixIn, Operations):
except: except:
log.exception('rollback failed') log.exception('rollback failed')
raise raise
if paths['old']:
self.store.delete(paths['old'])
def chmod(self, path, mode): def chmod(self, path, mode):
# sanitize mode (clear setuid/gid/sticky bits) # sanitize mode (clear setuid/gid/sticky bits)