]> andersk Git - sql.git/blobdiff - lib/python/sql/util.py
daily_afs_backups only dumps databases with changes since last dump; moved to /srv
[sql.git] / lib / python / sql / util.py
index 3da175641e8fd0043cc1273c5cfc69fbf7d09969..e4dcc91e8aae6952d16787de2009a4c369876873 100755 (executable)
@@ -1,10 +1,18 @@
 #!/usr/bin/python
 
-def get_backup_exceptions(path='/mit/sql/etc/db_no-daily-backup'):
-    return filter(len, map(str.strip, file(path).read().split('\n')))
+import sys, os, stat, time
+import subprocess
+
+PATH_MYSQL_DATA = '/srv/mysql'
+PATH_AFS_BACKUP = '/afs/athena.mit.edu/contrib/sql/backup'
+PATH_AFS_BACKUP_VOL = '/afs/athena.mit.edu/contrib/sql/.backup'
+BACKUP_MAX_SIZE = 200*1024*1024
 
 import MySQLdb, MySQLdb.cursors
 
+def new_cursor(group='client'):
+    return MySQLdb.connect('localhost', read_default_group=group, cursorclass=MySQLdb.cursors.DictCursor).cursor()
+
 def use_db(cursor, db):
     cursor.execute('USE `%s`' % db)
 
@@ -35,7 +43,9 @@ def get_db_mtime(cursor, db):
     return 0
 
 def get_db_size(cursor, db):
-    return sum([status['Data_length'] for table, status in get_db_tables_status(cursor, db).items()])
+    return sum([status['Data_length']
+                for table, status in get_db_tables_status(cursor, db).items()
+                if status['Data_length']])
 
 def get_db_tables_engines(cursor, db):
     return [(table, status['Engine']) for table, status in get_db_tables_status(cursor, db).items()]
@@ -55,35 +65,66 @@ def repair_tables_from_stdin(cursor):
         print table
         repair_table_quick(cursor, table)
 
-def backup_dbs(cursor):
-    for db in get_dbs(cursor):
-        db_size = get_db_size(cursor, db)
-        if db_size < BACKUP_MAX_SIZE:
-            if len(db.split('+')) <= 1:
-                owner = 'root'
-            else:
-                owner = db.split('+')[0]
-
-            db_mtime = get_db_mtime(cursor, db)
-            try:
-                bk_mtime = os.stat('%s/%s/%s.sql.gz' % (BACKUP_ROOT, owner, db))[stat.ST_MTIME]
-            except:
-                bk_mtime = 0
-            if not db_mtime or \
-                int(time.mktime(db_mtime.timetuple())) > bk_mtime:
-                print db, 'DUMP'
-            else:
-                print db, 'SKIP (mtime: %s < %s)' % (str(db_mtime),
-                                                     time.strftime('%Y-%m-%d %H:%M:%S',
-                                                                   time.localtime(bk_mtime)))
+def db_backup_pre(cursor, db):
+    db_size = get_db_size(cursor, db)
+    if db_size < BACKUP_MAX_SIZE:
+        if len(db.split('+')) <= 1:
+            owner = 'root'
         else:
-            print db, 'SKIP (size: %.2fM > %dM)' % (db_size/1024/1024, BACKUP_MAX_SIZE/1024/1024)
+            owner = db.split('+')[0]
+        db_mtime = get_db_mtime(cursor, db)
+        db_path = '%s/%s/%s.sql.gz' % (PATH_AFS_BACKUP, owner.lower(), db)
+        try:
+            bk_mtime = os.stat(db_path)[stat.ST_MTIME]
+        except:
+            bk_mtime = 0
+        if not db_mtime:
+            r = (False, 'empty database')
+        elif int(time.mktime(db_mtime.timetuple())) > bk_mtime:
+            r = (True, db_path)
+        else:
+            r = (False, 'mtime: %s < %s' % (str(db_mtime),
+                                            time.strftime('%Y-%m-%d %H:%M:%S',
+                                                          time.localtime(bk_mtime))))
+    else:
+        r = (False, 'size: %.2fM > %dM' % (db_size/1024/1024, BACKUP_MAX_SIZE/1024/1024))
+    return r
 
-if __name__ == '__main__':
-    import sys, os, stat, time
-    BACKUP_ROOT = '/afs/athena.mit.edu/contrib/sql/backup'
-    BACKUP_MAX_SIZE = 200*1024*1024
+def db_backup_pstrip(path):
+    r = path
+    if r.startswith(PATH_AFS_BACKUP):
+        r = r[len(PATH_AFS_BACKUP):]
+    if r and r[0] == '/':
+        r = r[1:]
+    return r
 
-    cursor = MySQLdb.connect('localhost', read_default_group='client', cursorclass=MySQLdb.cursors.DictCursor).cursor()
+def db_backup_mkdir(path):
+    p = db_backup_pstrip(path)
+    owner = p.split('/')[0]
+    p_owner = '%s/%s' % (PATH_AFS_BACKUP, owner)
+    p_owner_vol = '%s/%s/%s' % (PATH_AFS_BACKUP_VOL, owner[0], owner)
+    if not os.path.islink(p_owner):
+        os.symlink(p_owner_vol, p_owner)
+    if not os.path.isdir(p_owner_vol):
+        os.mkdir(p_owner_vol)
+        subprocess.call(['fs', 'sa', p_owner_vol, 'system:anyuser', 'none'])
+        subprocess.call(['fs', 'sa', p_owner_vol, 'system:authuser', 'none'])
+        if owner == 'root':
+            subprocess.call(['fs', 'sa', p_owner_vol, 'system:sql-backup', 'none'])
+    #print owner
+    #print ' ', p_owner
+    #print ' ', p_owner_vol
+    
+def tables_in_ibdata(cursor):
+    for db in get_dbs(cursor):
+        table_engines = get_db_tables_engines(cursor, db)
+        innodb_tables = [x[0] for x in table_engines \
+                              if x[1] and x[1].lower() == 'innodb']
+        for table in innodb_tables:
+            path = '%s/%s/%s.ibd' % (PATH_MYSQL_DATA, table[0], table[1])
+            if not os.path.isfile(path):
+                yield '`%s`.`%s`' % table
 
+if __name__ == '__main__':
+    pass
 
This page took 0.289385 seconds and 4 git commands to generate.