mirror of https://github.com/bvn13/PyBackuper.git
PyBackuper to YaDisk
parent
1adcbddf0c
commit
bf4b2d6950
|
@ -0,0 +1,3 @@
|
|||
/log/
|
||||
/__pycache__/
|
||||
**/__pycache__/
|
|
@ -0,0 +1,129 @@
|
|||
|
||||
import glob
|
||||
import easywebdav
|
||||
import datetime
|
||||
import ntpath
|
||||
from zipfile import ZipFile as ZipFile
|
||||
from pymssql import _mssql
|
||||
import os
|
||||
|
||||
import pprint
|
||||
|
||||
from settings import settings
|
||||
|
||||
|
||||
now = datetime.datetime.now()
|
||||
nowDate = now.strftime("%Y%m%d")
|
||||
nowTime = now.strftime("%H%M%S")
|
||||
print("now: %s %s" % (nowDate, nowTime))
|
||||
|
||||
|
||||
def getLocalFiles(dbname="") :
|
||||
return (glob.glob("%s%s%s" % (settings['path']['local'], dbname, settings['path']['fmask'])))
|
||||
|
||||
def zipFiles(files) :
|
||||
newFiles = []
|
||||
for fname in files :
|
||||
zipFname = "%s.zip" % (fname,)
|
||||
print("creating zip: %s" % (zipFname,))
|
||||
with ZipFile(zipFname, mode="w") as fzip :
|
||||
print(" - adding file: %s" % (fname,))
|
||||
fzip.write(fname)
|
||||
fzip.close()
|
||||
newFiles.append(zipFname)
|
||||
print("deleting source file: %s" % (fname,))
|
||||
os.remove(fname)
|
||||
return newFiles
|
||||
|
||||
def uploadFiles(webdav, locFiles) :
|
||||
global now, nowDate, nowTime
|
||||
|
||||
print("UPLOADING FILES")
|
||||
#pprint.pprint(webdav.ls('/'))
|
||||
|
||||
folderDate = "%s%s" % (settings['path']['remote'], nowDate)
|
||||
folderTime = "%s%s/%s" % (settings['path']['remote'], nowDate, nowTime)
|
||||
|
||||
# folder like Date
|
||||
if not webdav.exists(folderDate) :
|
||||
webdav.mkdir(folderDate)
|
||||
print("WEBDAV: dir %s created" % (folderDate,))
|
||||
|
||||
#folder like Time
|
||||
if not webdav.exists(folderTime) :
|
||||
webdav.mkdir(folderTime)
|
||||
print("WEBDAV: dir %s created" % (folderTime,))
|
||||
|
||||
for file in locFiles :
|
||||
fname = ntpath.basename(file)
|
||||
remoteFname = "%s/%s" % (folderTime, fname)
|
||||
print("uploading file: %s -> %s" % (fname, remoteFname))
|
||||
webdav.upload(file, remoteFname)
|
||||
|
||||
def leftRemoteSpaceManagement(webdav) :
|
||||
global now, nowDate, nowTime
|
||||
|
||||
print("LEFT SPACE MANAGEMENT")
|
||||
print("planned to left %s days" % (settings['space']['left_days'],))
|
||||
|
||||
days = webdav.ls(settings['path']['remote'])
|
||||
currDay = int(nowDate)
|
||||
|
||||
for file in days :
|
||||
if (file.name != '/') :
|
||||
day = file.name.replace('/', '')
|
||||
iDay = 0
|
||||
try :
|
||||
iDay = int(day)
|
||||
except ValueError :
|
||||
print("unexpected day, skipped: %s, %s" % (file.name, day))
|
||||
continue
|
||||
if (currDay - iDay > int(settings['space']['left_days'])) :
|
||||
print("deleting remote folder: %s" % (file.name,))
|
||||
webdav.rmdir(file.name)
|
||||
else :
|
||||
print("skipping remote folder: %s" % (file.name,))
|
||||
return
|
||||
|
||||
def backupDatabase(dbname) :
|
||||
print("making backup: %s" % (dbname,))
|
||||
command = settings['sql']['command_full']['query'].replace("%dbname%", dbname).replace("%path%", settings['path']['local'])
|
||||
with _mssql.connect(server="localhost", user=settings['sql']['username'], password=settings['sql']['password'], database=dbname) as db :
|
||||
db.execute_non_query(command)
|
||||
return
|
||||
|
||||
def deleteOldLocalBackups() :
|
||||
print("deleting old backups")
|
||||
for fname in glob.glob("%s%s" % (settings['path']['local'], settings['path']['allmask'])) :
|
||||
name = fname
|
||||
if (settings['zip']['make'] == 'yes') :
|
||||
name = "%s.zip" % (name,)
|
||||
os.remove(name)
|
||||
print(" - deleted: %s" % (name,))
|
||||
return
|
||||
|
||||
def do() :
|
||||
|
||||
deleteOldLocalBackups()
|
||||
|
||||
webdav = easywebdav.connect(settings['account']['domain'], username=settings['account']['login'], password=settings['account']['pass'], protocol=settings['account']['protocol'])
|
||||
|
||||
for dbname in settings['sql']['databases'] :
|
||||
backupDatabase(dbname)
|
||||
locFiles = getLocalFiles(dbname)
|
||||
if len(locFiles) == 0 :
|
||||
print("ERROR: cound not find expected backup of database: %s" % (dbname,))
|
||||
continue
|
||||
if (settings['zip']['make'] == 'yes') :
|
||||
locFiles = zipFiles(locFiles)
|
||||
uploadFiles(webdav, locFiles)
|
||||
|
||||
#at last
|
||||
leftRemoteSpaceManagement(webdav)
|
||||
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("starting backup")
|
||||
do()
|
||||
print("ended")
|
|
@ -0,0 +1,315 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import glob
|
||||
import ntpath
|
||||
import traceback
|
||||
from zipfile import ZipFile as ZipFile
|
||||
|
||||
import datetime
|
||||
|
||||
from pprint import pprint
|
||||
|
||||
from backuper.storer import BackupStorerYandexDisk
|
||||
from backuper.sqlmanager import SQLManager
|
||||
from backuper.dirwatcher import DirWatcher
|
||||
from backuper.reporter import Reporter
|
||||
|
||||
from sets import settings
|
||||
|
||||
import threading
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logging.basicConfig(level = logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filename=settings['log'])
|
||||
|
||||
def exception_hook(exc_type, exc_value, exc_traceback):
|
||||
logging.error(
|
||||
"Uncaught exception",
|
||||
exc_info=(exc_type, exc_value, exc_traceback)
|
||||
)
|
||||
|
||||
sys.excepthook = exception_hook
|
||||
|
||||
#raise Exception('Boom')
|
||||
|
||||
version = '0.3.0'
|
||||
|
||||
|
||||
class Backuper(object) :
|
||||
|
||||
storer = None
|
||||
sqlManager = None
|
||||
|
||||
localPath = ''
|
||||
fileMask = '*'
|
||||
makeZip = 'no'
|
||||
|
||||
databases = []
|
||||
|
||||
def init(self, settings) :
|
||||
|
||||
self.fileMask = settings['file_mask']
|
||||
self.localPath = settings['local_path']
|
||||
self.makeZip = settings['zip']['make']
|
||||
|
||||
storerType = settings['storer']
|
||||
storerSettings = settings['storers'][storerType]
|
||||
|
||||
if (storerType == 'ya_disk') :
|
||||
self.storer = BackupStorerYandexDisk()
|
||||
self.storer.setPath(storerSettings['path'])
|
||||
self.storer.setDomain(storerSettings['account']['domain'])
|
||||
self.storer.setUsername(storerSettings['account']['username'])
|
||||
self.storer.setPassword(storerSettings['account']['password'])
|
||||
self.storer.setProtocol(storerSettings['account']['protocol'])
|
||||
else :
|
||||
raise NotImplementedError("%s storer is not implemented yet" % (storerType,))
|
||||
|
||||
sqlSettings = settings['sql']
|
||||
|
||||
self.sqlManager = SQLManager()
|
||||
self.sqlManager.setServer(sqlSettings['server'])
|
||||
self.sqlManager.setUsername(sqlSettings['username'])
|
||||
self.sqlManager.setPassword(sqlSettings['password'])
|
||||
|
||||
self.databases = sqlSettings['databases']
|
||||
return
|
||||
|
||||
|
||||
def getLocalFiles(self, dirname, dbname="") :
|
||||
print("file search mask: %s%s/%s/%s" % (self.localPath, dirname, dbname, self.fileMask))
|
||||
return (glob.glob("%s%s/%s/%s" % (self.localPath, dirname, dbname, self.fileMask)))
|
||||
|
||||
|
||||
def deleteOldLocalBackups(self) :
|
||||
logging.info("deleting old backups")
|
||||
for fname in self.getLocalFiles() : #glob.glob("%s%s" % (self.localPath, self.fileMask)) :
|
||||
name = fname
|
||||
if (self.makeZip == 'yes') :
|
||||
name = "%s.zip" % (name,)
|
||||
os.remove(name)
|
||||
logging.info(" - deleted: %s" % (name,))
|
||||
|
||||
def zipFiles(files) :
|
||||
newFiles = []
|
||||
for fname in files :
|
||||
zipFname = "%s.zip" % (fname,)
|
||||
logging.info("creating zip: %s" % (zipFname,))
|
||||
with ZipFile(zipFname, mode="w") as fzip :
|
||||
logging.info(" - adding file: %s" % (fname,))
|
||||
fzip.write(fname)
|
||||
fzip.close()
|
||||
newFiles.append(zipFname)
|
||||
logging.info("deleting source file: %s" % (fname,))
|
||||
os.remove(fname)
|
||||
return newFiles
|
||||
|
||||
|
||||
def backup(self, type='F', database=None, copyOnly=False) :
|
||||
|
||||
dirName = ''
|
||||
if (type.upper() == 'F') :
|
||||
dirName = 'full'
|
||||
elif (type.upper() == 'D') :
|
||||
dirName = 'diff'
|
||||
elif (type.upper() == 'L') :
|
||||
dirName = 'logs'
|
||||
else :
|
||||
raise NotImplementedError("Unknown type of database backups has been specified")
|
||||
|
||||
#self.deleteOldLocalBackups()
|
||||
|
||||
now = datetime.datetime.now()
|
||||
nowDate = now.strftime("%Y%m%d")
|
||||
nowTime = now.strftime("%H%M%S")
|
||||
logging.info("now: %s %s" % (nowDate, nowTime))
|
||||
|
||||
databases = []
|
||||
|
||||
if not database :
|
||||
databases = self.databases
|
||||
else :
|
||||
databases.append(database)
|
||||
|
||||
for dbname in databases :
|
||||
if (not copyOnly) :
|
||||
self.sqlManager.makeBackup(dbname, type, self.localPath)
|
||||
|
||||
locFiles = self.getLocalFiles(dirName, dbname)
|
||||
if len(locFiles) == 0 :
|
||||
logging.info("ERROR: could not find expected backup of database: %s" % (dbname,))
|
||||
continue
|
||||
if (self.makeZip == 'yes') :
|
||||
locFiles = zipFiles(locFiles)
|
||||
|
||||
for file in locFiles :
|
||||
self.storer.store(file, nowDate, nowTime, dirName)
|
||||
|
||||
#self.storer.manageStorage(setStorerSettings['left_days'])
|
||||
|
||||
|
||||
|
||||
class Watcher(object) :
|
||||
|
||||
storer = None
|
||||
watcher = None
|
||||
reporter = None
|
||||
|
||||
watchPath = ''
|
||||
dirFullBackup = ''
|
||||
dirDiffBackup = ''
|
||||
fileMask = '*'
|
||||
makeZip = 'no'
|
||||
|
||||
databases = []
|
||||
files = set()
|
||||
|
||||
def init(self, settings) :
|
||||
|
||||
#self.fileMask = settings['file_mask']
|
||||
|
||||
self.watchPath = settings['watcher']['path']
|
||||
self.dirFullBackup = settings['watcher']['dir_full']
|
||||
self.dirDiffBackup = settings['watcher']['dir_diff'] if 'dir_diff' in settings['watcher'] else ''
|
||||
self.dirLogsBackup = settings['watcher']['dir_logs'] if 'dir_logs' in settings['watcher'] else ''
|
||||
|
||||
self.makeZip = settings['zip']['make']
|
||||
|
||||
storerType = settings['storer']
|
||||
storerSettings = settings['storers'][storerType]
|
||||
|
||||
self.reporter = Reporter()
|
||||
self.reporter.init(settings)
|
||||
|
||||
if (storerType == 'ya_disk') :
|
||||
self.storer = BackupStorerYandexDisk()
|
||||
self.storer.setPath(storerSettings['path'])
|
||||
self.storer.setDomain(storerSettings['account']['domain'])
|
||||
self.storer.setUsername(storerSettings['account']['username'])
|
||||
self.storer.setPassword(storerSettings['account']['password'])
|
||||
self.storer.setProtocol(storerSettings['account']['protocol'])
|
||||
self.storer.setDaysLeft(storerSettings['left_days'])
|
||||
self.storer.setManageEnabled(storerSettings['manage_enabled'] == 'true')
|
||||
else :
|
||||
raise NotImplementedError("%s storer is not implemented yet" % (storerType,))
|
||||
|
||||
return
|
||||
|
||||
def _getCurrentTime(self) :
|
||||
now = datetime.datetime.now()
|
||||
nowDate = now.strftime("%Y%m%d")
|
||||
nowTime = now.strftime("%H")
|
||||
minutes = int(now.strftime("%M"))
|
||||
if (minutes <= 15) :
|
||||
nowTime += "00"
|
||||
elif (minutes <= 30) :
|
||||
nowTime += "15"
|
||||
elif (minutes <= 45) :
|
||||
nowTime += "30"
|
||||
else :
|
||||
nowTime += "45"
|
||||
|
||||
logging.info("now: %s %s" % (nowDate, nowTime))
|
||||
return (nowDate, nowTime)
|
||||
|
||||
def onBackupCreated(self, filename) :
|
||||
|
||||
self.storer.manageStorage()
|
||||
|
||||
if not filename in self.files :
|
||||
self.files.add(filename)
|
||||
logging.info("BACKUP: %s" % (filename,))
|
||||
full_path = "%s%s" % (self.watchPath, self.dirFullBackup)
|
||||
logging.info("FULL PATH: %s" % (full_path,))
|
||||
diff_path = "%s%s" % (self.watchPath, self.dirDiffBackup)
|
||||
logging.info("DIFF PATH: %s" % (diff_path,))
|
||||
logs_path = "%s%s" % (self.watchPath, self.dirLogsBackup)
|
||||
logging.info("LOGS PATH: %s" % (logs_path,))
|
||||
|
||||
if (filename.find(full_path) >= 0) :
|
||||
logging.info("FULL BACKUP")
|
||||
|
||||
t1 = threading.Timer(0, self._makeBackup, [filename, "FULL"])
|
||||
t1.start()
|
||||
|
||||
elif (self.dirDiffBackup and filename.find(diff_path) >= 0) :
|
||||
logging.info("DIFF BACKUP")
|
||||
|
||||
t2 = threading.Timer(0, self._makeBackup, [filename, "DIFF"])
|
||||
t2.start()
|
||||
|
||||
elif (self.dirLogsBackup and filename.find(logs_path) >= 0) :
|
||||
logging.info("LOGS BACKUP")
|
||||
|
||||
t2 = threading.Timer(0, self._makeBackup, [filename, "LOGS"])
|
||||
t2.start()
|
||||
|
||||
else :
|
||||
logging.warning("IGNORING: %s" % (filename,))
|
||||
else :
|
||||
logging.warning("SKIP DOUBLE TRIGGER FOR: %s" % (filename,))
|
||||
|
||||
return
|
||||
|
||||
def _makeBackup(self, filename, subfolder) :
|
||||
(nowDate, nowTime) = self._getCurrentTime()
|
||||
#fname = ntpath.basename(filename)
|
||||
errortext = ""
|
||||
remoteFname = ""
|
||||
try :
|
||||
remoteFname = self.storer.store(filename, nowDate, nowTime, subfolder)
|
||||
self.reporter.report("%s BACKUP SUCCESSFULL" % (subfolder,), "%s BACKUP UPLOADED: %s -> %s" % (subfolder, filename, remoteFname))
|
||||
logging.info("%s BACKUP UPLOADED: %s -> %s" % (subfolder, filename, remoteFname))
|
||||
except :
|
||||
trace = traceback.format_exc()
|
||||
self.reporter.report("%s BACKUP ERROR" % (subfolder,), "%s BACKUP UPLOAD ERROR: %s - %s" % (subfolder, filename, trace))
|
||||
logging.error("%s BACKUP UPLOAD ERROR: %s -> %s - %s" % (subfolder, filename, remoteFname, trace))
|
||||
|
||||
self.files.remove(filename)
|
||||
return
|
||||
|
||||
def start(self) :
|
||||
self.watcher = DirWatcher(onFileCreatedHandler=self.onBackupCreated, onFileModifiedHandler=self.onBackupCreated)
|
||||
logging.info("Start watching files: %s%s" % (self.watchPath, self.fileMask))
|
||||
self.watcher.watch(self.watchPath, self.fileMask)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
verstr = '%%(prog) %s' % (version,)
|
||||
description = 'Backup maker. %s' % (verstr,)
|
||||
|
||||
parser = argparse.ArgumentParser(add_help=True, description='Backup Maker', prog='backuper')
|
||||
parser.add_argument('-v', '--ver', '--version', action='version', version=version)
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
parser_watcher = subparsers.add_parser('watcher')
|
||||
parser_watcher.add_argument('-s', '--start', required=True, dest='watcher', action='store_true')
|
||||
|
||||
parser_work = subparsers.add_parser('worker')
|
||||
parser_work.add_argument('-t', '--type', metavar='type', required=True, type=str, choices=['F', 'D', 'L'], default='F', help='type of database backup to make this time: F - full, D - diff, L - logs')
|
||||
parser_work.add_argument('-db', '--database', metavar='database', type=str, help='database name to backup. Will make backups for all databases (from settings) if not specified.')
|
||||
parser_work.add_argument('-c', '--copy-only', action='store_true', dest='copy', help='Only copy files to storage')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
#pprint(args)
|
||||
|
||||
if not 'watcher' in args and not 'type' in args :
|
||||
print("WRONG MODE: choose watcher/worker")
|
||||
else :
|
||||
if 'watcher' in args and args.watcher :
|
||||
print("WATCH MODE")
|
||||
watcher = Watcher()
|
||||
watcher.init(settings)
|
||||
watcher.start()
|
||||
|
||||
else :
|
||||
backuper = Backuper()
|
||||
backuper.init(settings)
|
||||
if 'database' in args :
|
||||
backuper.backup(type=args.type, database=args.database, copyOnly='copy' in args)
|
||||
else :
|
||||
backuper.backup(type=args.type, copyOnly='copy' in args)
|
|
@ -0,0 +1,111 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import PatternMatchingEventHandler
|
||||
|
||||
import threading
|
||||
|
||||
import logging
|
||||
|
||||
_MAX_CHECKS_BY_TIMER = 5
|
||||
|
||||
|
||||
class FileSystemEventHandler(PatternMatchingEventHandler) :
|
||||
checkAvailability = True
|
||||
onCreated = None
|
||||
onModified = None
|
||||
|
||||
def isFileAvailable(self, filename) :
|
||||
logging.info("CHECK: %s" % (filename,))
|
||||
if os.path.exists(filename) :
|
||||
try :
|
||||
os.rename(filename, filename)
|
||||
return True
|
||||
except OSError :
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def process(self, event, isThread=False) :
|
||||
global _MAX_CHECKS_BY_TIMER
|
||||
|
||||
logging.info("PROCESS: %r" % (isThread,))
|
||||
doProcess = True
|
||||
|
||||
if self.checkAvailability :
|
||||
logging.info("check for file availability: %s" % (event.src_path,))
|
||||
if not self.isFileAvailable(event.src_path) :
|
||||
logging.info("file is not available: %s" % (event.src_path,))
|
||||
if (isThread) :
|
||||
return None
|
||||
|
||||
t = threading.Timer(5, self.process, [event, True])
|
||||
t.start()
|
||||
doProcess = False
|
||||
|
||||
if doProcess :
|
||||
logging.info("file is available: %s" % (event.src_path,))
|
||||
logging.info("event_type: %s, onCreated: %s, onModified: %s" % (event.event_type, self.onCreated, self.onModified))
|
||||
if event.event_type == 'created' and self.onCreated :
|
||||
logging.info("call onCreated")
|
||||
return self.onCreated(event.src_path)
|
||||
if event.event_type == 'modified' and self.onModified :
|
||||
logging.info("call onModified")
|
||||
return self.onModified(event.src_path)
|
||||
|
||||
def on_any_event(self, event) :
|
||||
logging.info("Any event, type: %s, source: %s, is dir: %r" % (event.event_type, event.src_path, event.is_directory))
|
||||
|
||||
if event.is_directory :
|
||||
return None
|
||||
|
||||
return self.process(event)
|
||||
|
||||
|
||||
class DirWatcher(object) :
|
||||
|
||||
onFileCreated = None
|
||||
onFileModified = None
|
||||
|
||||
observer = None
|
||||
eventHandler = None
|
||||
|
||||
def __init__(self, onFileCreatedHandler=None, onFileModifiedHandler=None) :
|
||||
self.onFileCreated = onFileCreatedHandler
|
||||
self.onFileModified = onFileModifiedHandler
|
||||
|
||||
def watch(self, path, mask) :
|
||||
self.observer = Observer()
|
||||
self.eventHandler = FileSystemEventHandler(patterns=[mask], ignore_patterns=[], ignore_directories=True, case_sensitive=True)
|
||||
self.eventHandler.onCreated = self.onFileCreated
|
||||
self.eventHandler.onModified = self.onFileModified
|
||||
self.observer.schedule(self.eventHandler, path, recursive=True)
|
||||
self.observer.start()
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
self.observer.stop()
|
||||
self.observer.join()
|
||||
|
||||
|
||||
|
||||
|
||||
#def onFileCreatedHandler(event) :
|
||||
# logging.info("Created: %s" % (event.src_path,))
|
||||
|
||||
|
||||
#def onFileModifiedHandler(event) :
|
||||
# logging.info("Modified: %s" % (event.src_path,))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(level = logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
|
||||
path = sys.argv[1] if len(sys.argv) > 1 else '.'
|
||||
|
||||
|
||||
watcher = DirWatcher(onFileCreatedHandler, onFileModifiedHandler)
|
||||
watcher.watch(path, "*.bak")
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
|
||||
import sys
|
||||
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email import encoders
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
class Reporter :
|
||||
|
||||
admins = []
|
||||
settings = {}
|
||||
|
||||
def init(self, settings) :
|
||||
self.admins = settings['admins']
|
||||
self.settings = settings['email']
|
||||
return
|
||||
|
||||
def report(self, subject, message) :
|
||||
try :
|
||||
server = smtplib.SMTP_SSL('%s:%i' % (self.settings['SMTP'], self.settings['PORT']))
|
||||
server.login(self.settings['LOGIN'], self.settings['PASS'])
|
||||
|
||||
msg = MIMEMultipart()
|
||||
msg['From'] = self.settings['EMAIL']
|
||||
msg['To'] = ", ".join(self.admins)
|
||||
msg['Subject'] = subject
|
||||
msg.attach(MIMEText(message, 'html'))
|
||||
|
||||
server.sendmail(self.settings['EMAIL'], self.admins, msg.as_string())
|
||||
server.quit()
|
||||
|
||||
except :
|
||||
logging.error("ERROR sending email: %s" % (sys.exc_info()[0],))
|
|
@ -0,0 +1,29 @@
|
|||
|
||||
from pymssql import _mssql
|
||||
|
||||
|
||||
class SQLManager(object) :
|
||||
|
||||
commandTemplate = "EXEC dbo.sp_BackupDatabases @databaseName='%dbname%',@backupLocation='%path%', @backupType='%type%'"
|
||||
server = 'localhost'
|
||||
username = ''
|
||||
password = ''
|
||||
|
||||
def setServer(self, server) :
|
||||
self.server = server
|
||||
|
||||
def setUsername(self, username) :
|
||||
self.username = username
|
||||
|
||||
def setPassword(self, password) :
|
||||
self.password = password
|
||||
|
||||
|
||||
def makeBackup(self, database, type, path) :
|
||||
print("making backup: %s" % (database,))
|
||||
command = self.commandTemplate \
|
||||
.replace("%dbname%", database) \
|
||||
.replace("%type%", type) \
|
||||
.replace("%path%", path)
|
||||
with _mssql.connect(server=self.server, user=self.username, password=self.password, database=database) as db :
|
||||
db.execute_non_query(command)
|
|
@ -0,0 +1,146 @@
|
|||
|
||||
import easywebdav
|
||||
import ntpath
|
||||
|
||||
import datetime
|
||||
|
||||
import logging
|
||||
|
||||
class BackupStorer(object) :
|
||||
settings = {}
|
||||
path = '/'
|
||||
|
||||
def __init__(self):
|
||||
return
|
||||
|
||||
def store(self, fname, date=None, time=None, folder=None) :
|
||||
raise NotImplementedError("Call an abstract method")
|
||||
|
||||
def manageStorage(self) :
|
||||
raise NotImplementedError("Call an abstract method")
|
||||
|
||||
|
||||
class BackupStorerYandexDisk(BackupStorer) :
|
||||
webdav = None
|
||||
|
||||
def setDomain(self, domain) :
|
||||
self.settings['domain'] = domain
|
||||
|
||||
def setUsername(self, username) :
|
||||
self.settings['username'] = username
|
||||
|
||||
def setPassword(self, password) :
|
||||
self.settings['password'] = password
|
||||
|
||||
def setProtocol(self, protocol) :
|
||||
self.settings['protocol'] = protocol
|
||||
|
||||
def setCertificate(self, cert) :
|
||||
self.settings['cert'] = cert
|
||||
|
||||
def setDaysLeft(self, left_days) :
|
||||
self.settings['left_days'] = left_days
|
||||
|
||||
def setManageEnabled(self, enabled) :
|
||||
self.settings['manage_enabled'] = enabled
|
||||
|
||||
def setPath(self, path) :
|
||||
self.path = path
|
||||
if (self.path[-1] != '/') :
|
||||
self.path = "%s/" % (self.path,)
|
||||
|
||||
def init(self) :
|
||||
if ('cert' in self.settings.keys()) :
|
||||
self.webdav = easywebdav.connect(self.settings['domain'],
|
||||
username=self.settings['username'],
|
||||
password=self.settings['password'],
|
||||
protocol=self.settings['protocol'],
|
||||
cert=self.settings['cert']
|
||||
)
|
||||
else :
|
||||
self.webdav = easywebdav.connect(self.settings['domain'],
|
||||
username=self.settings['username'],
|
||||
password=self.settings['password'],
|
||||
protocol=self.settings['protocol']
|
||||
)
|
||||
|
||||
|
||||
def store(self, fname, nowDate, nowTime, folder=None) :
|
||||
if not self.webdav :
|
||||
self.init()
|
||||
|
||||
folderDate = "%s%s" % (self.path, nowDate)
|
||||
if folder :
|
||||
subfolder = "%s%s/%s" % (self.path, nowDate, folder)
|
||||
folderTime = "%s%s/%s/%s" % (self.path, nowDate, folder, nowTime)
|
||||
else :
|
||||
subfolder = "%s%s/%s" % (self.path, nowDate, nowTime)
|
||||
folderTime = "%s%s/%s" % (self.path, nowDate, nowTime)
|
||||
|
||||
# folder like Date
|
||||
if not self.webdav.exists(folderDate) :
|
||||
self.webdav.mkdir(folderDate)
|
||||
logging.info("WEBDAV: dir %s created" % (folderDate,))
|
||||
|
||||
#subfolder if specified
|
||||
if folder and not self.webdav.exists(subfolder) :
|
||||
self.webdav.mkdir(subfolder)
|
||||
logging.info("WEBDAV: dir %s created" % (subfolder,))
|
||||
|
||||
#folder like Time
|
||||
if not self.webdav.exists(folderTime) :
|
||||
self.webdav.mkdir(folderTime)
|
||||
logging.info("WEBDAV: dir %s created" % (folderTime,))
|
||||
|
||||
localFname = ntpath.basename(fname)
|
||||
remoteFname = "%s/%s" % (folderTime, localFname)
|
||||
logging.info("uploading file: %s -> %s" % (fname, remoteFname))
|
||||
self.webdav.upload(fname, remoteFname)
|
||||
logging.info("upload completed: %s -> %s" % (fname, remoteFname))
|
||||
|
||||
return remoteFname
|
||||
|
||||
def manageStorage(self) :
|
||||
if (not self.settings['manage_enabled']) :
|
||||
return
|
||||
if not self.webdav :
|
||||
self.init()
|
||||
logging.info("LEFT SPACE MANAGEMENT")
|
||||
logging.info("planned to left %s days" % (self.settings['left_days'],))
|
||||
now = datetime.datetime.now()
|
||||
logging.info("now: %s" % (now,))
|
||||
nowDate = now.strftime("%Y%m%d")
|
||||
logging.info("nowDate: %s" % (nowDate,))
|
||||
logging.info("path: %s, webdav: %s" % (self.path, self.webdav))
|
||||
days = self.webdav.ls('/')
|
||||
logging.info("ls done")
|
||||
#logging.info("days dirs: %s" % (days,))
|
||||
#currDay = int(nowDate)
|
||||
currDay = datetime.date(int(nowDate[0:4]), int(nowDate[4:6]), int(nowDate[6:8]))
|
||||
logging.info("currDay: %s" % (currDay,))
|
||||
|
||||
for file in days :
|
||||
if (file.name != '/') :
|
||||
day = file.name.replace('/', '')
|
||||
#logging.info("testing folder by date: %s" % (day,))
|
||||
dayBup = None
|
||||
try :
|
||||
dayBup = datetime.date(int(day[0:4]), int(day[4:6]), int(day[6:8]))
|
||||
#logging.info("dayBup: %s" % (dayBup,))
|
||||
except ValueError :
|
||||
#logging.info("unexpected day, skipped: %s, %s" % (file.name, day))
|
||||
continue
|
||||
dd = str(currDay - dayBup)
|
||||
#logging.info("%s" % (dd,))
|
||||
try:
|
||||
period = int(dd.split()[0])
|
||||
if (period > int(self.settings['left_days'])) :
|
||||
logging.info("deleting remote folder: %s" % (file.name,))
|
||||
self.webdav.rmdir(file.name)
|
||||
else :
|
||||
logging.info("skipping remote folder: %s" % (file.name,))
|
||||
#continue
|
||||
except :
|
||||
pass
|
||||
|
||||
logging.info("store management done")
|
|
@ -0,0 +1,130 @@
|
|||
USE [master]
|
||||
GO
|
||||
/****** Object: StoredProcedure [dbo].[sp_BackupDatabases] Script Date: 02.06.2016 16:03:38 ******/
|
||||
SET ANSI_NULLS ON
|
||||
GO
|
||||
SET QUOTED_IDENTIFIER ON
|
||||
GO
|
||||
|
||||
-- =============================================
|
||||
-- Author: Microsoft
|
||||
-- Create date: 2010-02-06
|
||||
-- Description: Backup Databases for SQLExpress
|
||||
-- Parameter1: databaseName
|
||||
-- Parameter2: backupType F=full, D=differential, L=log
|
||||
-- Parameter3: backup file location
|
||||
-- =============================================
|
||||
|
||||
ALTER PROCEDURE [dbo].[sp_BackupDatabases]
|
||||
@databaseName sysname = null,
|
||||
@backupType CHAR(1),
|
||||
@backupLocation nvarchar(200)
|
||||
--,@fileName nvarchar out
|
||||
AS
|
||||
|
||||
SET NOCOUNT ON;
|
||||
|
||||
DECLARE @DBs TABLE
|
||||
(
|
||||
ID int IDENTITY PRIMARY KEY,
|
||||
DBNAME nvarchar(500)
|
||||
)
|
||||
|
||||
-- Pick out only databases which are online in case ALL databases are chosen to be backed up
|
||||
-- If specific database is chosen to be backed up only pick that out from @DBs
|
||||
INSERT INTO @DBs (DBNAME)
|
||||
SELECT Name FROM master.sys.databases
|
||||
where state=0
|
||||
AND name=@DatabaseName
|
||||
OR @DatabaseName IS NULL
|
||||
ORDER BY Name
|
||||
|
||||
-- Filter out databases which do not need to backed up
|
||||
IF @backupType='F'
|
||||
BEGIN
|
||||
DELETE @DBs where DBNAME IN ('tempdb')
|
||||
END
|
||||
ELSE IF @backupType='D'
|
||||
BEGIN
|
||||
DELETE @DBs where DBNAME IN ('tempdb','master','ou_test')
|
||||
END
|
||||
ELSE IF @backupType='L'
|
||||
BEGIN
|
||||
DELETE @DBs where DBNAME IN ('tempdb','master','ou_test')
|
||||
END
|
||||
ELSE
|
||||
BEGIN
|
||||
RETURN
|
||||
END
|
||||
|
||||
-- Declare variables
|
||||
DECLARE @BackupName varchar(100)
|
||||
DECLARE @BackupFile varchar(100)
|
||||
DECLARE @DBNAME varchar(300)
|
||||
DECLARE @sqlCommand NVARCHAR(1000)
|
||||
DECLARE @dateTime NVARCHAR(20)
|
||||
DECLARE @Loop int
|
||||
DECLARE @fileName nvarchar(4000)
|
||||
|
||||
-- Loop through the databases one by one
|
||||
SELECT @Loop = min(ID) FROM @DBs
|
||||
|
||||
SET @fileName = ''
|
||||
SET @BackupFile = ''
|
||||
|
||||
WHILE @Loop IS NOT NULL
|
||||
BEGIN
|
||||
|
||||
-- Database Names have to be in [dbname] format since some have - or _ in their name
|
||||
SET @DBNAME = '['+(SELECT DBNAME FROM @DBs WHERE ID = @Loop)+']'
|
||||
|
||||
-- Set the current date and time n yyyyhhmmss format
|
||||
SET @dateTime = REPLACE(CONVERT(VARCHAR, GETDATE(),102),'.','') + '_' + REPLACE(CONVERT(VARCHAR, GETDATE(),108),':','')
|
||||
|
||||
-- Create backup filename in path\filename.extension format for full,diff and log backups
|
||||
IF @backupType = 'F'
|
||||
SET @BackupFile = @backupLocation+REPLACE(REPLACE(@DBNAME, '[',''),']','')+ '_FULL_'+ @dateTime+ '.bak'
|
||||
ELSE IF @backupType = 'D'
|
||||
SET @BackupFile = @backupLocation+REPLACE(REPLACE(@DBNAME, '[',''),']','')+ '_DIFF_'+ @dateTime+ '.bak'
|
||||
ELSE IF @backupType = 'L'
|
||||
SET @BackupFile = @backupLocation+REPLACE(REPLACE(@DBNAME, '[',''),']','')+ '_LOG_'+ @dateTime+ '.trn'
|
||||
|
||||
--PRINT @BackupFile
|
||||
SET @fileName = @fileName+ ','+ @BackupFile;
|
||||
--PRINT @fileName;
|
||||
|
||||
-- Provide the backup a name for storing in the media
|
||||
IF @backupType = 'F'
|
||||
SET @BackupName = REPLACE(REPLACE(@DBNAME,'[',''),']','') +' full backup for '+ @dateTime
|
||||
IF @backupType = 'D'
|
||||
SET @BackupName = REPLACE(REPLACE(@DBNAME,'[',''),']','') +' differential backup for '+ @dateTime
|
||||
IF @backupType = 'L'
|
||||
SET @BackupName = REPLACE(REPLACE(@DBNAME,'[',''),']','') +' log backup for '+ @dateTime
|
||||
|
||||
-- Generate the dynamic SQL command to be executed
|
||||
|
||||
IF @backupType = 'F'
|
||||
BEGIN
|
||||
SET @sqlCommand = 'BACKUP DATABASE ' +@DBNAME+ ' TO DISK = '''+@BackupFile+ ''' WITH INIT, NAME= ''' +@BackupName+''', NOSKIP, NOFORMAT'
|
||||
END
|
||||
IF @backupType = 'D'
|
||||
BEGIN
|
||||
SET @sqlCommand = 'BACKUP DATABASE ' +@DBNAME+ ' TO DISK = '''+@BackupFile+ ''' WITH DIFFERENTIAL, INIT, NAME= ''' +@BackupName+''', NOSKIP, NOFORMAT'
|
||||
END
|
||||
IF @backupType = 'L'
|
||||
BEGIN
|
||||
SET @sqlCommand = 'BACKUP LOG ' +@DBNAME+ ' TO DISK = '''+@BackupFile+ ''' WITH INIT, NAME= ''' +@BackupName+''', NOSKIP, NOFORMAT'
|
||||
END
|
||||
|
||||
--PRINT @sqlCommand
|
||||
-- Execute the generated SQL command
|
||||
EXEC(@sqlCommand)
|
||||
|
||||
-- Goto the next database
|
||||
SELECT @Loop = min(ID) FROM @DBs where ID>@Loop
|
||||
|
||||
END
|
||||
|
||||
SET @fileName = SUBSTRING(@fileName, 2, LEN(@fileName)-1)
|
||||
PRINT 'RESULT:'
|
||||
PRINT @fileName;
|
|
@ -0,0 +1,66 @@
|
|||
|
||||
settings = {
|
||||
|
||||
'email' : {
|
||||
'LOGIN' : "user@yandex.ru",
|
||||
'EMAIL' : "user@yandex.ru",
|
||||
'SMTP' : "smtp.yandex.ru",
|
||||
'PORT' : 465,
|
||||
'PASS' : "password",
|
||||
},
|
||||
|
||||
'log' : 'backuper.log',
|
||||
|
||||
'admins' : [
|
||||
'report@gmail.com',
|
||||
],
|
||||
|
||||
'storers' : {
|
||||
'ya_disk' : {
|
||||
'left_days' : '5',
|
||||
'path' : '/',
|
||||
'account' : {
|
||||
'domain' : 'webdav.yandex.ru',
|
||||
'protocol' : 'https',
|
||||
'username' : 'user@yandex.ru',
|
||||
'password' : 'password',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
'storer' : 'ya_disk',
|
||||
|
||||
'sql' : {
|
||||
'server' : 'localhost',
|
||||
'username' : 'sa',
|
||||
'password' : 'pass_for_sa',
|
||||
'databases' : ['ou', 'hrm', 'buh', 'buh_fo'],
|
||||
},
|
||||
|
||||
#'local_path' : 'E:\\ftp\\bvn13\\1\\',
|
||||
'local_path' : 'D:\\dev\\bup2yadisk\\',
|
||||
|
||||
'watcher' : {
|
||||
'path' : 'D:\\dev\\bup2yadisk\\',
|
||||
'dir_full' : 'full',
|
||||
'dir_diff' : 'diff',
|
||||
},
|
||||
|
||||
'file_mask' : '*.bak',
|
||||
|
||||
'zip' : {
|
||||
'make' : 'no',
|
||||
},
|
||||
|
||||
'path' : {
|
||||
#'local' : 'E:\\backup_1C\\sql\\', #ends with slash
|
||||
'local' : 'E:\\ftp\\bvn13\\1\\',
|
||||
'remote' : '/', #ends with slash
|
||||
'fmask' : '*.bak',
|
||||
'allmask' : '*.bak',
|
||||
},
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
settings = {
|
||||
'path' : {
|
||||
#'local' : 'E:\\backup_1C\\sql\\', #ends with slash
|
||||
'local' : 'E:\\ftp\\bvn13\\1\\',
|
||||
'remote' : '/', #ends with slash
|
||||
'fmask' : '*.bak',
|
||||
'allmask' : '*.bak',
|
||||
},
|
||||
'account' : {
|
||||
'domain' : 'webdav.yandex.ru',
|
||||
'protocol' : 'https',
|
||||
'login' : 'user@yandex.ru',
|
||||
'pass' : 'pass_for_webdav',
|
||||
},
|
||||
'space' : {
|
||||
'left_days' : '5',
|
||||
},
|
||||
'zip' : {
|
||||
'make' : 'no',
|
||||
},
|
||||
'sql' : {
|
||||
'username' : 'sa',
|
||||
'password' : 'pass_for_sa',
|
||||
'command_full' : {
|
||||
'query' : "EXEC dbo.sp_BackupDatabases @databaseName='%dbname%',@backupLocation='%path%', @backupType='F'",
|
||||
'procedure' : "dbo.sp_BackupDatabases",
|
||||
#'backupLocation' : 'E:\\backup_1C\\sql\\', #@databaseName='%dbname%',@backupLocation='E:\\backup_1C\\sql\\', @backupType='F'", #two params included!
|
||||
'backupType' : 'F',
|
||||
},
|
||||
'databases' : ['ou', 'hrm', 'buh', 'buh_fo'],
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
import easywebdav
|
||||
import ntpath
|
||||
|
||||
import datetime
|
||||
|
||||
|
||||
if __name__ == '__main__' :
|
||||
print("START TEST")
|
||||
|
||||
left_days = 150
|
||||
|
||||
webdav = easywebdav.connect('webdav.yandex.ru',
|
||||
username='user@yandex.ru',
|
||||
password='passworx',
|
||||
protocol='https'
|
||||
)
|
||||
|
||||
now = datetime.datetime.now()
|
||||
nowDate = now.strftime("%Y%m%d")
|
||||
#webdav.cd('/')
|
||||
days = webdav.ls('/')
|
||||
print("days dirs: %s" % (days,))
|
Loading…
Reference in New Issue