]> andersk Git - sql.git/blobdiff - libexec/daily_afs_backups
backups: keep less monthlys
[sql.git] / libexec / daily_afs_backups
index 08b9535916ef74d53a31fe40273a3d8add24cabc..c7c6288a1199442ffbd4321628d7c4d5bfff6e72 100755 (executable)
@@ -1,8 +1,8 @@
 #!/usr/bin/python
 
 import os, sys, time
-from sql import db
-from sql.util import new_cursor, get_dbs, db_backup_pre, db_backup_mkdir
+from mitsql import db
+from mitsql.util import new_cursor, get_dbs, db_backup_pre, db_backup_mkdir
 from Queue import Queue, Empty
 import threading
 import subprocess
@@ -20,7 +20,8 @@ def consumer():
     while True:
         try:
             next = queue.get(timeout=3)
-            print next[0] + ':',
+            print "Consuming", next
+            #print next[0] + ':',
             log = db.Backup.get_by(db=next[0])
             if not log:
                 log = db.Backup(db=next[0])
@@ -29,10 +30,16 @@ def consumer():
             db_backup_mkdir(next[1])
             args = ['mysqldump', next[0]]
             args.extend(MYSQLDUMP_ARGS)
-            p0 = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-            p1 = subprocess.Popen(['gzip'], stdin=p0.stdout, stdout=file(next[1], 'w+'))
-            p1.wait()
-            err = p0.stderr.read()
+            err = ''
+            try:
+                p0 = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+                p1 = subprocess.Popen(['gzip'], stdin=p0.stdout, stdout=file(next[1], 'w+'))
+                p1.wait()
+                err = p0.stderr.read()
+            except Exception, e:
+                print "Error on", next
+                print e
+                err = str(e)
             if len(err):
                 log.dump_errnum = p0.returncode
                 log.dump_errstr = err
@@ -41,11 +48,14 @@ def consumer():
                 log.dump_errstr = None
             log.save_or_update()
             db.session.flush()
-            print 'Done'
+            #print 'Done'
         except (KeyboardInterrupt, SystemExit):
+            print "Got exit request"
             break
         except Empty:
+            print "No queue."
             if finished:
+                print "Done!"
                 break
 
 t_consumer = threading.Thread(target=consumer)
@@ -53,17 +63,17 @@ t_consumer.start()
 
 def producer():
     c = new_cursor('mysqldump')
-    for db in get_dbs(c):
-        log = db.Backup.get_by(db=db)
+    for dbname in get_dbs(c):
+        log = db.Backup.get_by(db=dbname)
         if not log:
-            log = db.Backup(db=db)
-        elif log.skip_date:
+            log = db.Backup(db=dbname)
+        elif log.skip_date and log.skip_date.timetuple:
             if time.mktime(log.skip_date.timetuple()) + 3600 > time.time():
                 # never recheck a db skipped in the past hour
                 continue
-        d = db_backup_pre(c, db)
+        d = db_backup_pre(c, dbname)
         if d[0]:
-            queue.put((db, d[1]))
+            queue.put((dbname, d[1]))
             log.skip_reason = None
             log.skip_date = None
         else:
This page took 0.031022 seconds and 4 git commands to generate.