logrotate

This commit is contained in:
bigeagle 2014-11-15 16:53:51 +08:00
parent 4192183fcf
commit 8be110ebf9
6 changed files with 115 additions and 26 deletions

View File

@ -30,20 +30,20 @@ name = "arch1"
provider = "shell" provider = "shell"
command = "sleep 10" command = "sleep 10"
local_dir = "/mnt/sdb1/mirror/archlinux/current/" local_dir = "/mnt/sdb1/mirror/archlinux/current/"
log_file = "/dev/null" # log_file = "/dev/null"
[[mirrors]] [[mirrors]]
name = "arch2" name = "arch2"
provider = "shell" provider = "shell"
command = "sleep 20" command = "sleep 20"
local_dir = "/mnt/sdb1/mirror/archlinux/current/" local_dir = "/mnt/sdb1/mirror/archlinux/current/"
log_file = "/dev/null" # log_file = "/dev/null"
[[mirrors]] [[mirrors]]
name = "arch4" name = "arch4"
provider = "shell" provider = "shell"
command = "./shell_provider.sh" command = "./shell_provider.sh"
log_file = "/tmp/arch4-{date}.log" # log_file = "/tmp/arch4-{date}.log"
use_btrfs = false use_btrfs = false
# vim: ft=toml # vim: ft=toml

View File

@ -17,12 +17,14 @@ class BtrfsHook(JobHook):
self.working_dir = working_dir self.working_dir = working_dir
self.gc_dir = gc_dir self.gc_dir = gc_dir
def before_job(self, *args, **kwargs): def before_job(self, ctx={}, *args, **kwargs):
self._create_working_snapshot() self._create_working_snapshot()
ctx['current_dir'] = self.working_dir
def after_job(self, status=None, *args, **kwargs): def after_job(self, status=None, ctx={}, *args, **kwargs):
if status == "success": if status == "success":
self._commit_changes() self._commit_changes()
ctx['current_dir'] = self.service_dir
def _ensure_subvolume(self): def _ensure_subvolume(self):
# print(self.service_dir) # print(self.service_dir)

View File

@ -30,9 +30,10 @@ def run_job(sema, child_q, manager_q, provider, **settings):
status = "syncing" status = "syncing"
manager_q.put(("UPDATE", (provider.name, status))) manager_q.put(("UPDATE", (provider.name, status)))
ctx = {} # put context info in it
try: try:
for hook in provider.hooks: for hook in provider.hooks:
hook.before_job(name=provider.name) hook.before_job(provider=provider, ctx=ctx)
except Exception: except Exception:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
@ -40,7 +41,7 @@ def run_job(sema, child_q, manager_q, provider, **settings):
else: else:
for retry in range(max_retry): for retry in range(max_retry):
print("start syncing {}, retry: {}".format(provider.name, retry)) print("start syncing {}, retry: {}".format(provider.name, retry))
provider.run() provider.run(ctx=ctx)
status = "success" status = "success"
try: try:
@ -53,7 +54,7 @@ def run_job(sema, child_q, manager_q, provider, **settings):
try: try:
for hook in provider.hooks[::-1]: for hook in provider.hooks[::-1]:
hook.after_job(name=provider.name, status=status) hook.after_job(provider=provider, status=status, ctx=ctx)
except Exception: except Exception:
import traceback import traceback
traceback.print_exc() traceback.print_exc()

71
tunasync/loglimit.py Normal file
View File

@ -0,0 +1,71 @@
#!/usr/bin/env python2
# -*- coding:utf-8 -*-
import sh
import os
from .hook import JobHook
from datetime import datetime
class LogLimitHook(JobHook):
def __init__(self, limit=10):
self.limit = limit
def before_job(self, provider, ctx={}, *args, **kwargs):
log_dir = provider.log_dir
self.ensure_log_dir(log_dir)
log_file = provider.log_file.format(
date=datetime.now().strftime("%Y-%m-%d_%H-%M"))
ctx['log_file'] = log_file
if log_file == "/dev/null":
return
log_link = os.path.join(log_dir, "latest")
lfiles = [os.path.join(log_dir, lfile)
for lfile in os.listdir(log_dir)
if lfile.startswith(provider.name)]
lfiles_set = set(lfiles)
# sort to get the newest 10 files
lfiles_ts = sorted(
[(os.path.getmtime(lfile), lfile) for lfile in lfiles],
key=lambda x: x[0],
reverse=True)
lfiles_keep = set([x[1] for x in lfiles_ts[:self.limit]])
lfiles_rm = lfiles_set - lfiles_keep
# remove old files
for lfile in lfiles_rm:
try:
sh.rm(lfile)
except:
pass
# create a soft link
if log_link != log_file:
if os.path.exists(log_link):
try:
sh.rm(log_link)
except:
return
try:
sh.ln('-s', log_file, log_link)
except:
return
def after_job(self, status=None, ctx={}, *args, **kwargs):
log_file = ctx.get('log_file', None)
if log_file == "/dev/null":
return
if status == "fail":
log_file_save = log_file + ".fail"
try:
sh.mv(log_file, log_file_save)
except:
pass
def ensure_log_dir(self, log_dir):
if not os.path.exists(log_dir):
sh.mkdir("-p", log_dir)
# vim: ts=4 sw=4 sts=4 expandtab

View File

@ -3,6 +3,7 @@
import os import os
from .mirror_provider import RsyncProvider, ShellProvider from .mirror_provider import RsyncProvider, ShellProvider
from .btrfs_snapshot import BtrfsHook from .btrfs_snapshot import BtrfsHook
from .loglimit import LogLimitHook
class MirrorConfig(object): class MirrorConfig(object):
@ -38,10 +39,13 @@ class MirrorConfig(object):
assert isinstance(self.options["interval"], int) assert isinstance(self.options["interval"], int)
log_dir = self._popt["global"]["log_dir"] log_dir = self.options.get(
"log_dir", self._popt["global"]["log_dir"])
if "log_file" not in self.options: if "log_file" not in self.options:
self.options["log_file"] = os.path.join( self.options["log_file"] = os.path.join(
log_dir, self.name, "{date}.log") log_dir, self.name, self.name + "_{date}.log")
self.log_dir = os.path.dirname(self.log_file)
if "use_btrfs" not in self.options: if "use_btrfs" not in self.options:
self.options["use_btrfs"] = self._parent.use_btrfs self.options["use_btrfs"] = self._parent.use_btrfs
@ -59,6 +63,7 @@ class MirrorConfig(object):
self.name, self.name,
self.upstream, self.upstream,
self.local_dir, self.local_dir,
self.log_dir,
self.use_ipv6, self.use_ipv6,
self.password, self.password,
self.exclude_file, self.exclude_file,
@ -71,6 +76,7 @@ class MirrorConfig(object):
self.name, self.name,
self.command, self.command,
self.local_dir, self.local_dir,
self.log_dir,
self.log_file, self.log_file,
self.interval, self.interval,
hooks hooks
@ -105,6 +111,7 @@ class MirrorConfig(object):
) )
hooks.append(BtrfsHook(service_dir, working_dir, gc_dir)) hooks.append(BtrfsHook(service_dir, working_dir, gc_dir))
hooks.append(LogLimitHook())
return hooks return hooks
# vim: ts=4 sw=4 sts=4 expandtab # vim: ts=4 sw=4 sts=4 expandtab

View File

@ -10,21 +10,32 @@ class MirrorProvider(object):
Mirror method class, can be `rsync', `debmirror', etc. Mirror method class, can be `rsync', `debmirror', etc.
''' '''
def __init__(self, name, local_dir, log_file="/dev/null", def __init__(self, name, local_dir, log_dir, log_file="/dev/null",
interval=120, hooks=[]): interval=120, hooks=[]):
self.name = name self.name = name
self.local_dir = local_dir self.local_dir = local_dir
self.log_file = log_file self.log_file = log_file
self.log_dir = log_dir
self.interval = interval self.interval = interval
self.hooks = hooks self.hooks = hooks
self.p = None self.p = None
# deprecated
def ensure_log_dir(self): def ensure_log_dir(self):
log_dir = os.path.dirname(self.log_file) log_dir = os.path.dirname(self.log_file)
if not os.path.exists(log_dir): if not os.path.exists(log_dir):
sh.mkdir("-p", log_dir) sh.mkdir("-p", log_dir)
def run(self): def get_log_file(self, ctx={}):
if 'log_file' in ctx:
log_file = ctx['log_file']
else:
now = datetime.now().strftime("%Y-%m-%d_%H")
log_file = self.log_file.format(date=now)
ctx['log_file'] = log_file
return log_file
def run(self, ctx={}):
raise NotImplementedError("run method should be implemented") raise NotImplementedError("run method should be implemented")
def terminate(self): def terminate(self):
@ -44,10 +55,10 @@ class RsyncProvider(MirrorProvider):
_default_options = \ _default_options = \
"-aHvh --stats --delete-after --timeout=120 --contimeout=120" "-aHvh --stats --delete-after --timeout=120 --contimeout=120"
def __init__(self, name, upstream_url, local_dir, useIPv6=True, def __init__(self, name, upstream_url, local_dir, log_dir,
password=None, exclude_file=None, log_file="/dev/null", useIPv6=True, password=None, exclude_file=None,
interval=120, hooks=[]): log_file="/dev/null", interval=120, hooks=[]):
super(RsyncProvider, self).__init__(name, local_dir, log_file, super(RsyncProvider, self).__init__(name, local_dir, log_dir, log_file,
interval, hooks) interval, hooks)
self.upstream_url = upstream_url self.upstream_url = upstream_url
@ -69,14 +80,12 @@ class RsyncProvider(MirrorProvider):
return _options return _options
def run(self): def run(self, ctx={}):
self.ensure_log_dir()
_args = self.options _args = self.options
_args.append(self.upstream_url) _args.append(self.upstream_url)
_args.append(self.local_dir) _args.append(self.local_dir)
now = datetime.now().strftime("%Y-%m-%d_%H")
log_file = self.log_file.format(date=now)
log_file = self.get_log_file(ctx)
new_env = os.environ.copy() new_env = os.environ.copy()
if self.password is not None: if self.password is not None:
new_env["RSYNC_PASSWORD"] = self.password new_env["RSYNC_PASSWORD"] = self.password
@ -87,17 +96,16 @@ class RsyncProvider(MirrorProvider):
class ShellProvider(MirrorProvider): class ShellProvider(MirrorProvider):
def __init__(self, name, command, local_dir, def __init__(self, name, command, local_dir, log_dir,
log_file="/dev/null", interval=120, hooks=[]): log_file="/dev/null", interval=120, hooks=[]):
super(ShellProvider, self).__init__(name, local_dir, log_file, super(ShellProvider, self).__init__(name, local_dir, log_dir, log_file,
interval, hooks) interval, hooks)
self.command = command.split() self.command = command.split()
def run(self): def run(self, ctx={}):
self.ensure_log_dir()
now = datetime.now().strftime("%Y-%m-%d_%H") log_file = self.get_log_file(ctx)
log_file = self.log_file.format(date=now)
new_env = os.environ.copy() new_env = os.environ.copy()
new_env["TUNASYNC_MIRROR_NAME"] = self.name new_env["TUNASYNC_MIRROR_NAME"] = self.name