mirror of
https://github.com/tuna/tunasync-scripts.git
synced 2025-07-01 07:22:45 +00:00
imported from tunasync
This commit is contained in:
commit
1859fca56e
96
anaconda.sh
Executable file
96
anaconda.sh
Executable file
@ -0,0 +1,96 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# requires: wget, lftp, jq
|
||||||
|
#
|
||||||
|
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
CONDA_REPO_BASE=${CONDA_REPO_BASE:-"http://repo.continuum.io"}
|
||||||
|
LOCAL_DIR_BASE="${TUNASYNC_WORKING_DIR}/pkgs"
|
||||||
|
TMP_DIR=$(mktemp -d)
|
||||||
|
|
||||||
|
CONDA_REPOS=("free" "r" "mro" "pro")
|
||||||
|
CONDA_ARCHES=("linux-64" "linux-32" "linux-armv6l" "linux-armv7l" "linux-ppc64le" "osx-64" "osx-32" "win-64" "win-32")
|
||||||
|
|
||||||
|
function check-and-download() {
|
||||||
|
remote_file=$1
|
||||||
|
local_file=$2
|
||||||
|
wget -q --spider ${remote_file}
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "downloading ${remote_file}"
|
||||||
|
wget -q -N -O ${local_file} ${remote_file}
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanup () {
|
||||||
|
echo "cleaning up"
|
||||||
|
[ -d ${TMP_DIR} ] && {
|
||||||
|
[ -f ${TMP_DIR}/repodata.json ] && rm ${TMP_DIR}/repodata.json
|
||||||
|
[ -f ${TMP_DIR}/repodata.json.bz2 ] && rm ${TMP_DIR}/repodata.json.bz2
|
||||||
|
rmdir ${TMP_DIR}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
echo ${TMP_DIR}
|
||||||
|
|
||||||
|
for repo in ${CONDA_REPOS[@]}; do
|
||||||
|
for arch in ${CONDA_ARCHES[@]}; do
|
||||||
|
PKG_REPO_BASE="${CONDA_REPO_BASE}/pkgs/$repo/$arch"
|
||||||
|
repodata_url="${PKG_REPO_BASE}/repodata.json"
|
||||||
|
bz2_repodata_url="${PKG_REPO_BASE}/repodata.json.bz2"
|
||||||
|
LOCAL_DIR="${LOCAL_DIR_BASE}/$repo/$arch"
|
||||||
|
[ ! -d ${LOCAL_DIR} ] && mkdir -p ${LOCAL_DIR}
|
||||||
|
tmp_repodata="${TMP_DIR}/repodata.json"
|
||||||
|
tmp_bz2_repodata="${TMP_DIR}/repodata.json.bz2"
|
||||||
|
|
||||||
|
check-and-download ${repodata_url} ${tmp_repodata}
|
||||||
|
check-and-download ${bz2_repodata_url} ${tmp_bz2_repodata}
|
||||||
|
|
||||||
|
jq_cmd='.packages | to_entries[] | [.key, .value.size, .value.md5] | map(tostring) | join(" ")'
|
||||||
|
bzip2 -c -d ${tmp_bz2_repodata} | jq -r "${jq_cmd}" | while read line;
|
||||||
|
do
|
||||||
|
read -a tokens <<< $line
|
||||||
|
pkgfile=${tokens[0]}
|
||||||
|
pkgsize=${tokens[1]}
|
||||||
|
pkgmd5=${tokens[2]}
|
||||||
|
|
||||||
|
pkg_url="${PKG_REPO_BASE}/${pkgfile}"
|
||||||
|
dest_file="${LOCAL_DIR}/${pkgfile}"
|
||||||
|
|
||||||
|
declare downloaded=false
|
||||||
|
if [ -f ${dest_file} ]; then
|
||||||
|
rsize=`stat -c "%s" ${dest_file}`
|
||||||
|
if [ ${rsize} -eq ${pkgsize} ]; then
|
||||||
|
downloaded=true
|
||||||
|
echo "Skipping ${pkgfile}, size ${pkgsize}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
while [ $downloaded != true ]; do
|
||||||
|
echo "downloading ${pkg_url}"
|
||||||
|
wget -q -O ${dest_file} ${pkg_url} && {
|
||||||
|
# two space for md5sum check format
|
||||||
|
echo "${pkgmd5} ${dest_file}" | md5sum -c - && downloaded=true
|
||||||
|
}
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
mv -f "${TMP_DIR}/repodata.json" "${LOCAL_DIR}/repodata.json"
|
||||||
|
mv -f "${TMP_DIR}/repodata.json.bz2" "${LOCAL_DIR}/repodata.json.bz2"
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
function sync_installer() {
|
||||||
|
repo_url="$1"
|
||||||
|
repo_dir="$2"
|
||||||
|
|
||||||
|
[ ! -d "$repo_dir" ] && mkdir -p "$repo_dir"
|
||||||
|
cd $repo_dir
|
||||||
|
lftp "${repo_url}/" -e "mirror --verbose -P 5; bye"
|
||||||
|
}
|
||||||
|
|
||||||
|
sync_installer "${CONDA_REPO_BASE}/archive/" "${TUNASYNC_WORKING_DIR}/archive/"
|
||||||
|
sync_installer "${CONDA_REPO_BASE}/miniconda/" "${TUNASYNC_WORKING_DIR}/miniconda/"
|
21
aosp.sh
Executable file
21
aosp.sh
Executable file
@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
REPO=${REPO:-"/usr/local/bin/repo"}
|
||||||
|
|
||||||
|
function repo_init() {
|
||||||
|
mkdir -p $TUNASYNC_WORKING_DIR
|
||||||
|
cd $TUNASYNC_WORKING_DIR
|
||||||
|
$REPO init -u https://android.googlesource.com/mirror/manifest --mirror
|
||||||
|
}
|
||||||
|
|
||||||
|
function repo_sync() {
|
||||||
|
cd $TUNASYNC_WORKING_DIR
|
||||||
|
$REPO sync -f
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ ! -d "$TUNASYNC_WORKING_DIR/git-repo.git" ]; then
|
||||||
|
echo "Initializing AOSP mirror"
|
||||||
|
repo_init
|
||||||
|
fi
|
||||||
|
|
||||||
|
repo_sync
|
59
docker.sh
Executable file
59
docker.sh
Executable file
@ -0,0 +1,59 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# reqires: wget, yum-utils
|
||||||
|
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
_here=`dirname $(realpath $0)`
|
||||||
|
. ${_here}/helpers/apt-download
|
||||||
|
APT_VERSIONS=("debian-wheezy" "debian-jessie" "ubuntu-precise" "ubuntu-trusty" "ubuntu-xenial")
|
||||||
|
|
||||||
|
BASE_PATH="${TUNASYNC_WORKING_DIR}"
|
||||||
|
APT_PATH="${BASE_PATH}/apt/repo"
|
||||||
|
YUM_PATH="${BASE_PATH}/yum/repo"
|
||||||
|
|
||||||
|
mkdir -p ${APT_PATH} ${YUM_PATH}
|
||||||
|
|
||||||
|
wget -q -N -O ${BASE_PATH}/yum/gpg https://yum.dockerproject.org/gpg
|
||||||
|
wget -q -N -O ${BASE_PATH}/apt/gpg https://apt.dockerproject.org/gpg
|
||||||
|
|
||||||
|
# YUM mirror
|
||||||
|
cache_dir="/tmp/yum-docker-cache/"
|
||||||
|
cfg="/tmp/docker-yum.conf"
|
||||||
|
cat <<EOF > ${cfg}
|
||||||
|
[main]
|
||||||
|
keepcache=0
|
||||||
|
|
||||||
|
[centos6]
|
||||||
|
name=Docker Repository
|
||||||
|
baseurl=https://yum.dockerproject.org/repo/main/centos/6
|
||||||
|
enabled=1
|
||||||
|
gpgcheck=0
|
||||||
|
gpgkey=https://yum.dockerproject.org/gpg
|
||||||
|
sslverify=0
|
||||||
|
|
||||||
|
[centos7]
|
||||||
|
name=Docker Repository
|
||||||
|
baseurl=https://yum.dockerproject.org/repo/main/centos/7
|
||||||
|
enabled=1
|
||||||
|
gpgcheck=0
|
||||||
|
gpgkey=https://yum.dockerproject.org/gpg
|
||||||
|
sslverify=0
|
||||||
|
EOF
|
||||||
|
|
||||||
|
[ ! -d ${YUM_PATH}/centos6 ] && mkdir -p ${YUM_PATH}/centos6
|
||||||
|
[ ! -d ${YUM_PATH}/centos7 ] && mkdir -p ${YUM_PATH}/centos7
|
||||||
|
reposync -c $cfg -d -p ${YUM_PATH} -e $cache_dir
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/centos6 ${YUM_PATH}/centos7
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/centos6 ${YUM_PATH}/centos7
|
||||||
|
rm $cfg
|
||||||
|
|
||||||
|
# APT mirror
|
||||||
|
base_url="http://apt.dockerproject.org/repo"
|
||||||
|
for version in ${APT_VERSIONS[@]}; do
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "amd64" "${APT_PATH}" || true
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "i386" "${APT_PATH}" || true
|
||||||
|
done
|
||||||
|
|
||||||
|
# sync_docker "http://apt.dockerproject.org/" "${TUNASYNC_WORKING_DIR}/apt"
|
||||||
|
# sync_docker "http://yum.dockerproject.org/" "${TUNASYNC_WORKING_DIR}/yum"
|
13
excludes/gen_debian_exclude.py
Normal file
13
excludes/gen_debian_exclude.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
ARCH_EXCLUDE = ['armel', 'alpha', 'hurd-i386', 'ia64', 'kfreebsd-amd64', 'kfreebsd-i386', 'mips', 'powerpc', 'ppc64el', 's390', 's390x', 'sparc']
|
||||||
|
|
||||||
|
CONTENT_EXCLUDE = ['binary-{arch}', 'installer-{arch}', 'Contents-{arch}.gz', 'Contents-udeb-{arch}.gz', 'Contents-{arch}.diff', 'arch-{arch}.files', 'arch-{arch}.list.gz', '*_{arch}.deb', '*_{arch}.udeb', '*_{arch}.changes']
|
||||||
|
|
||||||
|
with open("debian-exclude.txt", 'wb') as f:
|
||||||
|
f.write(".~tmp~/\n")
|
||||||
|
f.write(".*\n")
|
||||||
|
for arch in ARCH_EXCLUDE:
|
||||||
|
for content in CONTENT_EXCLUDE:
|
||||||
|
f.write(content.format(arch=arch))
|
||||||
|
f.write('\n')
|
13
excludes/gen_kali_exclude.py
Normal file
13
excludes/gen_kali_exclude.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
ARCH_EXCLUDE = ['armel', 'armhf']
|
||||||
|
|
||||||
|
CONTENT_EXCLUDE = ['binary-{arch}', 'installer-{arch}', 'Contents-{arch}.gz', 'Contents-udeb-{arch}.gz', 'Contents-{arch}.diff', 'arch-{arch}.files', 'arch-{arch}.list.gz', '*_{arch}.deb', '*_{arch}.udeb', '*_{arch}.changes']
|
||||||
|
|
||||||
|
with open("kali-exclude.txt", 'wb') as f:
|
||||||
|
f.write(".~tmp~/\n")
|
||||||
|
f.write(".*\n")
|
||||||
|
for arch in ARCH_EXCLUDE:
|
||||||
|
for content in CONTENT_EXCLUDE:
|
||||||
|
f.write(content.format(arch=arch))
|
||||||
|
f.write('\n')
|
13
excludes/gen_ubuntu_ports_exclude.py
Normal file
13
excludes/gen_ubuntu_ports_exclude.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
ARCH_EXCLUDE = ['powerpc', 'ppc64el', 'ia64', 'sparc', 'armel']
|
||||||
|
|
||||||
|
CONTENT_EXCLUDE = ['binary-{arch}', 'installer-{arch}', 'Contents-{arch}.gz', 'Contents-udeb-{arch}.gz', 'Contents-{arch}.diff', 'arch-{arch}.files', 'arch-{arch}.list.gz', '*_{arch}.deb', '*_{arch}.udeb', '*_{arch}.changes']
|
||||||
|
|
||||||
|
with open("ubuntu-ports-exclude.txt", 'wb') as f:
|
||||||
|
f.write(".~tmp~/\n")
|
||||||
|
f.write(".*\n")
|
||||||
|
for arch in ARCH_EXCLUDE:
|
||||||
|
for content in CONTENT_EXCLUDE:
|
||||||
|
f.write(content.format(arch=arch))
|
||||||
|
f.write('\n')
|
65
gitlab-ce.sh
Executable file
65
gitlab-ce.sh
Executable file
@ -0,0 +1,65 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
_here=`dirname $(realpath $0)`
|
||||||
|
. ${_here}/helpers/apt-download
|
||||||
|
|
||||||
|
[ -z "${LOADED_APT_DOWNLOAD}" ] && (echo "failed to load apt-download"; exit 1)
|
||||||
|
|
||||||
|
BASE_PATH="${TUNASYNC_WORKING_DIR}"
|
||||||
|
|
||||||
|
YUM_PATH="${BASE_PATH}/yum"
|
||||||
|
|
||||||
|
UBUNTU_VERSIONS=("trusty" "wily")
|
||||||
|
DEBIAN_VERSIONS=("wheezy" "jessie" "stretch")
|
||||||
|
UBUNTU_PATH="${BASE_PATH}/ubuntu/"
|
||||||
|
DEBIAN_PATH="${BASE_PATH}/debian/"
|
||||||
|
|
||||||
|
mkdir -p $UBUNTU_PATH $DEBIAN_PATH $YUM_PATH
|
||||||
|
|
||||||
|
cache_dir="/tmp/yum-gitlab-ce-cache/"
|
||||||
|
cfg="/tmp/gitlab-ce-yum.conf"
|
||||||
|
cat <<EOF > ${cfg}
|
||||||
|
[main]
|
||||||
|
keepcache=0
|
||||||
|
|
||||||
|
[el6]
|
||||||
|
name=el6
|
||||||
|
baseurl=https://packages.gitlab.com/gitlab/gitlab-ce/el/6/x86_64
|
||||||
|
repo_gpgcheck=0
|
||||||
|
gpgcheck=0
|
||||||
|
enabled=1
|
||||||
|
gpgkey=https://packages.gitlab.com/gpg.key
|
||||||
|
sslverify=0
|
||||||
|
|
||||||
|
[el7]
|
||||||
|
name=el7
|
||||||
|
baseurl=https://packages.gitlab.com/gitlab/gitlab-ce/el/7/x86_64
|
||||||
|
repo_gpgcheck=0
|
||||||
|
gpgcheck=0
|
||||||
|
enabled=1
|
||||||
|
gpgkey=https://packages.gitlab.com/gpg.key
|
||||||
|
sslverify=0
|
||||||
|
EOF
|
||||||
|
|
||||||
|
reposync -c $cfg -d -p ${YUM_PATH} -e $cache_dir
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/el6 ${YUM_PATH}/el6
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/el7 ${YUM_PATH}/el7
|
||||||
|
rm $cfg
|
||||||
|
|
||||||
|
base_url="https://packages.gitlab.com/gitlab/gitlab-ce/ubuntu"
|
||||||
|
for version in ${UBUNTU_VERSIONS[@]}; do
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "amd64" "${UBUNTU_PATH}" || true
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "i386" "${UBUNTU_PATH}" || true
|
||||||
|
done
|
||||||
|
echo "Ubuntu finished"
|
||||||
|
|
||||||
|
base_url="https://packages.gitlab.com/gitlab/gitlab-ce/debian"
|
||||||
|
for version in ${DEBIAN_VERSIONS[@]}; do
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "amd64" "${DEBIAN_PATH}" || true
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "i386" "${DEBIAN_PATH}" || true
|
||||||
|
done
|
||||||
|
echo "Debian finished"
|
||||||
|
|
||||||
|
|
||||||
|
# vim: ts=4 sts=4 sw=4
|
69
gitlab-ci-multi-runner.sh
Executable file
69
gitlab-ci-multi-runner.sh
Executable file
@ -0,0 +1,69 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# reqires: wget, yum-utils
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
_here=`dirname $(realpath $0)`
|
||||||
|
. ${_here}/helpers/apt-download
|
||||||
|
|
||||||
|
[ -z "${LOADED_APT_DOWNLOAD}" ] && (echo "failed to load apt-download"; exit 1)
|
||||||
|
|
||||||
|
BASE_PATH="${TUNASYNC_WORKING_DIR}"
|
||||||
|
|
||||||
|
YUM_PATH="${BASE_PATH}/yum"
|
||||||
|
|
||||||
|
UBUNTU_VERSIONS=("trusty" "xenial")
|
||||||
|
DEBIAN_VERSIONS=("wheezy" "jessie" "stretch")
|
||||||
|
UBUNTU_PATH="${BASE_PATH}/ubuntu/"
|
||||||
|
DEBIAN_PATH="${BASE_PATH}/debian/"
|
||||||
|
|
||||||
|
mkdir -p $UBUNTU_PATH $DEBIAN_PATH $YUM_PATH
|
||||||
|
|
||||||
|
cache_dir="/tmp/yum-gitlab-runner-cache/"
|
||||||
|
cfg="/tmp/gitlab-runner-yum.conf"
|
||||||
|
cat <<EOF > ${cfg}
|
||||||
|
[main]
|
||||||
|
keepcache=0
|
||||||
|
|
||||||
|
[el6]
|
||||||
|
name=gitlab-ci-multi-runner-el6
|
||||||
|
baseurl=https://packages.gitlab.com/runner/gitlab-ci-multi-runner/el/6/x86_64
|
||||||
|
repo_gpgcheck=0
|
||||||
|
gpgcheck=0
|
||||||
|
enabled=1
|
||||||
|
gpgkey=https://packages.gitlab.com/gpg.key
|
||||||
|
sslverify=0
|
||||||
|
|
||||||
|
[el7]
|
||||||
|
name=gitlab-ci-multi-runner-el7
|
||||||
|
baseurl=https://packages.gitlab.com/runner/gitlab-ci-multi-runner/el/7/x86_64
|
||||||
|
repo_gpgcheck=0
|
||||||
|
gpgcheck=0
|
||||||
|
enabled=1
|
||||||
|
gpgkey=https://packages.gitlab.com/gpg.key
|
||||||
|
sslverify=0
|
||||||
|
EOF
|
||||||
|
|
||||||
|
reposync -c $cfg -d -p ${YUM_PATH} -e $cache_dir
|
||||||
|
[ ! -d ${YUM_PATH}/el6 ] && mkdir -p ${YUM_PATH}/el6
|
||||||
|
[ ! -d ${YUM_PATH}/el7 ] && mkdir -p ${YUM_PATH}/el7
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/el6 ${YUM_PATH}/el6
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/el7 ${YUM_PATH}/el7
|
||||||
|
rm $cfg
|
||||||
|
|
||||||
|
base_url="https://packages.gitlab.com/runner/gitlab-ci-multi-runner/ubuntu"
|
||||||
|
for version in ${UBUNTU_VERSIONS[@]}; do
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "amd64" "${UBUNTU_PATH}" || true
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "i386" "${UBUNTU_PATH}" || true
|
||||||
|
done
|
||||||
|
echo "Ubuntu finished"
|
||||||
|
|
||||||
|
base_url="https://packages.gitlab.com/runner/gitlab-ci-multi-runner/debian"
|
||||||
|
for version in ${DEBIAN_VERSIONS[@]}; do
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "amd64" "${DEBIAN_PATH}" || true
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "i386" "${DEBIAN_PATH}" || true
|
||||||
|
done
|
||||||
|
echo "Debian finished"
|
||||||
|
|
||||||
|
|
||||||
|
# vim: ts=4 sts=4 sw=4
|
92
hackage.sh
Executable file
92
hackage.sh
Executable file
@ -0,0 +1,92 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
function remove_broken() {
|
||||||
|
interval=$1
|
||||||
|
interval_file="/tmp/hackage_lastcheck"
|
||||||
|
now=`date +%s`
|
||||||
|
|
||||||
|
if [[ -f ${interval_file} ]]; then
|
||||||
|
lastcheck=`cat ${interval_file}`
|
||||||
|
between=$(echo "${now}-${lastcheck}" | bc)
|
||||||
|
[[ $between -lt $interval ]] && echo "skip checking"; return 0
|
||||||
|
fi
|
||||||
|
echo "start checking"
|
||||||
|
|
||||||
|
mkdir -p "${TUNASYNC_WORKING_DIR}/package"
|
||||||
|
cd "${TUNASYNC_WORKING_DIR}/package"
|
||||||
|
|
||||||
|
ls | while read line; do
|
||||||
|
echo -n "$line\t\t"
|
||||||
|
tar -tzf $line >/dev/null || (echo "FAIL"; rm $line) && echo "OK"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo `date +%s` > $interval_file
|
||||||
|
}
|
||||||
|
|
||||||
|
function must_download() {
|
||||||
|
src=$1
|
||||||
|
dst=$2
|
||||||
|
while true; do
|
||||||
|
echo "downloading: $name"
|
||||||
|
wget "$src" -O "$dst" &>/dev/null || true
|
||||||
|
tar -tzf package/$name >/dev/null || rm package/$name && break
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
function hackage_mirror() {
|
||||||
|
local_pklist="/tmp/hackage_local_pklist_$$.list"
|
||||||
|
remote_pklist="/tmp/hackage_remote_pklist_$$.list"
|
||||||
|
|
||||||
|
cd ${TUNASYNC_WORKING_DIR}
|
||||||
|
mkdir -p package
|
||||||
|
|
||||||
|
echo "Downloading index..."
|
||||||
|
rm index.tar.gz || true
|
||||||
|
axel http://hdiff.luite.com/packages/archive/index.tar.gz -o index.tar.gz > /dev/null
|
||||||
|
|
||||||
|
echo "building local package list"
|
||||||
|
ls package | sed "s/\.tar\.gz$//" > $local_pklist
|
||||||
|
echo "preferred-versions" >> $local_pklist # ignore preferred-versions
|
||||||
|
|
||||||
|
echo "building remote package list"
|
||||||
|
tar ztf index.tar.gz | (cut -d/ -f 1,2 2>/dev/null) | sed 's|/|-|' > $remote_pklist
|
||||||
|
|
||||||
|
echo "building download list"
|
||||||
|
# substract local list from remote list
|
||||||
|
comm <(sort $remote_pklist) <(sort $local_pklist) -23 | while read pk; do
|
||||||
|
# limit concurrent level
|
||||||
|
bgcount=`jobs | wc -l`
|
||||||
|
while [[ $bgcount -ge 5 ]]; do
|
||||||
|
sleep 0.5
|
||||||
|
bgcount=`jobs | wc -l`
|
||||||
|
done
|
||||||
|
|
||||||
|
name="$pk.tar.gz"
|
||||||
|
if [ ! -a package/$name ]; then
|
||||||
|
must_download "http://hackage.haskell.org/package/$pk/$name" "package/$name" &
|
||||||
|
else
|
||||||
|
echo "skip existed: $name"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# delete redundanty files
|
||||||
|
comm <(sort $remote_pklist) <(sort $local_pklist) -13 | while read pk; do
|
||||||
|
name="$pk.tar.gz"
|
||||||
|
echo "deleting ${name}"
|
||||||
|
rm "package/$name"
|
||||||
|
done
|
||||||
|
|
||||||
|
cp index.tar.gz 00-index.tar.gz
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanup () {
|
||||||
|
echo "cleaning up"
|
||||||
|
[[ ! -z $local_pklist ]] && (rm $local_pklist $remote_pklist ; true)
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup EXIT
|
||||||
|
remove_broken 86400
|
||||||
|
hackage_mirror
|
||||||
|
|
||||||
|
# vim: ts=4 sts=4 sw=4
|
132
helpers/apt-download
Normal file
132
helpers/apt-download
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
LOADED_APT_DOWNLOAD="yes"
|
||||||
|
|
||||||
|
function check-and-download() {
|
||||||
|
remote_file=$1
|
||||||
|
local_file=$2
|
||||||
|
wget -q --spider ${remote_file}
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "downloading ${remote_file}"
|
||||||
|
wget -q -N -O ${local_file} ${remote_file}
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function apt-download-binary() {
|
||||||
|
base_url=$1
|
||||||
|
dist=$2
|
||||||
|
repo=$3
|
||||||
|
arch=$4
|
||||||
|
dest_base_dir=$5
|
||||||
|
if [ -z $dest_base_dir ]; then
|
||||||
|
echo "Destination directory is empty, cannot continue"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
dest_dir="${dest_base_dir}/dists/${dist}"
|
||||||
|
[ ! -d "$dest_dir" ] && mkdir -p "$dest_dir"
|
||||||
|
check-and-download "${base_url}/dists/${dist}/Contents-${arch}.gz" "${dest_dir}/Contents-${arch}.gz" || true
|
||||||
|
check-and-download "${base_url}/dists/${dist}/InRelease" "${dest_dir}/InRelease" || true
|
||||||
|
check-and-download "${base_url}/dists/${dist}/Release" "${dest_dir}/Release"
|
||||||
|
check-and-download "${base_url}/dists/${dist}/Release.gpg" "${dest_dir}/Release.gpg" || true
|
||||||
|
|
||||||
|
# Load Package Index URLs from Release file
|
||||||
|
release_file="${dest_dir}/Release"
|
||||||
|
dest_dir="${dest_base_dir}/dists/${dist}/${repo}/binary-${arch}"
|
||||||
|
[ ! -d "$dest_dir" ] && mkdir -p "$dest_dir"
|
||||||
|
|
||||||
|
declare pkgidx_content=""
|
||||||
|
declare cnt_start=false
|
||||||
|
declare -i checksum_len
|
||||||
|
if (grep -e '^SHA256:$' ${release_file} &>/dev/null); then
|
||||||
|
checksum_cmd="sha256sum"; checksum_regex="^SHA256:$"; checksum_len=64
|
||||||
|
elif (grep -e '^SHA1:$' ${release_file} &>/dev/null); then
|
||||||
|
checksum_cmd="sha1sum"; checksum_regex="^SHA1:$"; checksum_len=40
|
||||||
|
elif (grep -e '^MD5Sum:$' ${release_file} &>/dev/null); then
|
||||||
|
checksum_cmd="md5sum"; checksum_regex="^MD5sum:$"; checksum_len=32
|
||||||
|
fi
|
||||||
|
|
||||||
|
while read line; do
|
||||||
|
if [[ ${cnt_start} = true ]]; then
|
||||||
|
read -a tokens <<< $line
|
||||||
|
checksum=${tokens[0]}
|
||||||
|
if [[ ${#checksum} != ${checksum_len} ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
filesize=${tokens[1]}
|
||||||
|
filename=${tokens[2]}
|
||||||
|
if [[ "$filename" =~ ${repo}/binary-${arch} ]]; then
|
||||||
|
# Load package list from Packages file
|
||||||
|
pkgidx_file="${dest_base_dir}/dists/${dist}/${filename}"
|
||||||
|
dest_dir=`dirname ${pkgidx_file}`
|
||||||
|
[ ! -d "$dest_dir" ] && mkdir -p "$dest_dir"
|
||||||
|
pkglist_url="${base_url}/dists/${dist}/${filename}"
|
||||||
|
check-and-download "${pkglist_url}" ${pkgidx_file} || true
|
||||||
|
echo "${checksum} ${pkgidx_file}" | ${checksum_cmd} -c -
|
||||||
|
if [ -z "${pkgidx_content}" -a -f ${pkgidx_file} ]; then
|
||||||
|
echo "getting packages index content"
|
||||||
|
case $filename in
|
||||||
|
"*.bz2")
|
||||||
|
pkgidx_content=`bunzip2 -c ${pkgidx_file}`
|
||||||
|
;;
|
||||||
|
"*.gz")
|
||||||
|
pkgidx_content=`gunzip -c ${pkgidx_file}`
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
pkgidx_content=`cat ${pkgidx_file}`
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [[ "$line" =~ ${checksum_regex} ]]; then
|
||||||
|
cnt_start=true
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done < ${release_file}
|
||||||
|
|
||||||
|
if [ -z "${pkgidx_content}" ]; then
|
||||||
|
echo "index is empty, failed"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set checksum method
|
||||||
|
if (echo -e "${pkgidx_content}" | grep -e '^SHA256' &>/dev/null); then
|
||||||
|
checksum_cmd="sha256sum"; checksum_regex="^SHA256"
|
||||||
|
elif (echo -e "${pkgidx_content}" | grep -e '^SHA1' &>/dev/null); then
|
||||||
|
checksum_cmd="sha1sum"; checksum_regex="^SHA1"
|
||||||
|
elif (echo -e "${pkgidx_content}" | grep -e '^MD5sum' &>/dev/null); then
|
||||||
|
checksum_cmd="md5sum"; checksum_regex="^MD5sum"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Download packages
|
||||||
|
(echo -e "${pkgidx_content}" | grep -e '^Filename' -e '^Size' -e ${checksum_regex} | cut -d' ' -f 2) | \
|
||||||
|
while read pkg_filename; read pkg_size; read pkg_checksum; do
|
||||||
|
dest_filename="${dest_base_dir}/${pkg_filename}"
|
||||||
|
dest_dir=`dirname ${dest_filename}`
|
||||||
|
[ ! -d "$dest_dir" ] && mkdir -p "$dest_dir"
|
||||||
|
pkg_url="${base_url}/${pkg_filename}"
|
||||||
|
declare downloaded=false
|
||||||
|
if [ -f ${dest_filename} ]; then
|
||||||
|
rsize=`stat -c "%s" ${dest_filename}`
|
||||||
|
if [ ${rsize} -eq ${pkg_size} ]; then
|
||||||
|
downloaded=true
|
||||||
|
echo "Skipping ${pkg_filename}, size ${pkg_size}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
while [ $downloaded != true ]; do
|
||||||
|
echo "downloading ${pkg_url}"
|
||||||
|
wget -q -O ${dest_filename} ${pkg_url} && {
|
||||||
|
echo "${pkg_checksum} ${dest_filename}" | ${checksum_cmd} -c - && downloaded=true # two space for md5sum/sha1sum/sha256sum check format
|
||||||
|
}
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Mirroring ${base_url} ${dist}, ${repo}, ${arch} done!"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
# vim: ts=4 sts=4 sw=4
|
17
homebrew.sh
Executable file
17
homebrew.sh
Executable file
@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
if [ ! -d "$TUNASYNC_WORKING_DIR" ]; then
|
||||||
|
echo "Directory not exists, fail"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
function update_homebrew_git() {
|
||||||
|
repo_dir="$1"
|
||||||
|
cd $repo_dir
|
||||||
|
echo "==== SYNC $repo_dir START ===="
|
||||||
|
/usr/bin/timeout -s INT 3600 git remote -v update
|
||||||
|
echo "==== SYNC $repo_dir DONE ===="
|
||||||
|
}
|
||||||
|
|
||||||
|
update_homebrew_git "$TUNASYNC_WORKING_DIR/homebrew.git"
|
||||||
|
update_homebrew_git "$TUNASYNC_WORKING_DIR/homebrew-python.git"
|
||||||
|
update_homebrew_git "$TUNASYNC_WORKING_DIR/homebrew-science.git"
|
12
linux.sh
Executable file
12
linux.sh
Executable file
@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
if [ ! -d "$TUNASYNC_WORKING_DIR" ]; then
|
||||||
|
echo "Directory not exists, fail"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
function update_linux_git() {
|
||||||
|
cd $TUNASYNC_WORKING_DIR
|
||||||
|
/usr/bin/timeout -s INT 3600 git remote -v update
|
||||||
|
}
|
||||||
|
|
||||||
|
update_linux_git
|
16
lxc-images.sh
Executable file
16
lxc-images.sh
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
function sync_lxc_images() {
|
||||||
|
repo_url="$1"
|
||||||
|
repo_dir="$2"
|
||||||
|
|
||||||
|
[ ! -d "$repo_dir" ] && mkdir -p "$repo_dir"
|
||||||
|
cd $repo_dir
|
||||||
|
|
||||||
|
# lftp "${repo_url}/" -e "mirror --verbose --log=${TUNASYNC_LOG_FILE} --exclude-glob='*/SRPMS/*' -P 5 --delete --only-newer; bye"
|
||||||
|
lftp "${repo_url}/" -e "mirror --verbose -P 5 --delete --only-newer; bye"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
sync_lxc_images "http://images.linuxcontainers.org/images" "${TUNASYNC_WORKING_DIR}/images"
|
||||||
|
sync_lxc_images "http://images.linuxcontainers.org/meta" "${TUNASYNC_WORKING_DIR}/meta"
|
88
mongodb.sh
Executable file
88
mongodb.sh
Executable file
@ -0,0 +1,88 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
_here=`dirname $(realpath $0)`
|
||||||
|
. ${_here}/helpers/apt-download
|
||||||
|
|
||||||
|
[ -z "${LOADED_APT_DOWNLOAD}" ] && (echo "failed to load apt-download"; exit 1)
|
||||||
|
|
||||||
|
BASE_PATH="${TUNASYNC_WORKING_DIR}"
|
||||||
|
|
||||||
|
YUM_PATH="${BASE_PATH}/yum"
|
||||||
|
APT_PATH="${BASE_PATH}/apt"
|
||||||
|
|
||||||
|
UBUNTU_VERSIONS=("trusty" "precise")
|
||||||
|
DEBIAN_VERSIONS=("wheezy")
|
||||||
|
MONGO_VERSIONS=("3.2" "3.0")
|
||||||
|
STABLE_VERSION="3.2"
|
||||||
|
|
||||||
|
UBUNTU_PATH="${APT_PATH}/ubuntu"
|
||||||
|
DEBIAN_PATH="${APT_PATH}/debian"
|
||||||
|
|
||||||
|
mkdir -p $UBUNTU_PATH $DEBIAN_PATH $YUM_PATH
|
||||||
|
|
||||||
|
cache_dir="/tmp/yum-mongodb-cache/"
|
||||||
|
cfg="/tmp/mongodb-yum.conf"
|
||||||
|
cat <<EOF > ${cfg}
|
||||||
|
[main]
|
||||||
|
keepcache=0
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
for mgver in ${MONGO_VERSIONS[@]}; do
|
||||||
|
cat <<EOF >> ${cfg}
|
||||||
|
[el6-${mgver}]
|
||||||
|
name=el6-${mgver}
|
||||||
|
baseurl=https://repo.mongodb.org/yum/redhat/6/mongodb-org/${mgver}/x86_64/
|
||||||
|
repo_gpgcheck=0
|
||||||
|
gpgcheck=0
|
||||||
|
enabled=1
|
||||||
|
sslverify=0
|
||||||
|
|
||||||
|
[el7-${mgver}]
|
||||||
|
name=el7-${mgver}
|
||||||
|
baseurl=https://repo.mongodb.org/yum/redhat/7/mongodb-org/${mgver}/x86_64/
|
||||||
|
repo_gpgcheck=0
|
||||||
|
gpgcheck=0
|
||||||
|
enabled=1
|
||||||
|
sslverify=0
|
||||||
|
EOF
|
||||||
|
done
|
||||||
|
|
||||||
|
reposync -c $cfg -d -p ${YUM_PATH} -e $cache_dir
|
||||||
|
for mgver in ${MONGO_VERSIONS[@]}; do
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/el6-$mgver/ ${YUM_PATH}/el6-$mgver/
|
||||||
|
createrepo --update -v -c $cache_dir -o ${YUM_PATH}/el7-$mgver/ ${YUM_PATH}/el7-$mgver/
|
||||||
|
done
|
||||||
|
|
||||||
|
[ -e ${YUM_PATH}/el6 ] || (cd ${YUM_PATH}; ln -s el6-${STABLE_VERSION} el6)
|
||||||
|
[ -e ${YUM_PATH}/el7 ] || (cd ${YUM_PATH}; ln -s el7-${STABLE_VERSION} el7)
|
||||||
|
|
||||||
|
rm $cfg
|
||||||
|
|
||||||
|
base_url="http://repo.mongodb.org/apt/ubuntu"
|
||||||
|
for ubver in ${UBUNTU_VERSIONS[@]}; do
|
||||||
|
for mgver in ${MONGO_VERSIONS[@]}; do
|
||||||
|
version="$ubver/mongodb-org/$mgver"
|
||||||
|
apt-download-binary ${base_url} "$version" "multiverse" "amd64" "${UBUNTU_PATH}" || true
|
||||||
|
apt-download-binary ${base_url} "$version" "multiverse" "i386" "${UBUNTU_PATH}" || true
|
||||||
|
done
|
||||||
|
mg_basepath="${UBUNTU_PATH}/dists/$ubver/mongodb-org"
|
||||||
|
[ -e ${mg_basepath}/stable ] || (cd ${mg_basepath}; ln -s ${STABLE_VERSION} stable)
|
||||||
|
done
|
||||||
|
echo "Ubuntu finished"
|
||||||
|
|
||||||
|
base_url="http://repo.mongodb.org/apt/debian"
|
||||||
|
for dbver in ${DEBIAN_VERSIONS[@]}; do
|
||||||
|
for mgver in ${MONGO_VERSIONS[@]}; do
|
||||||
|
version="$dbver/mongodb-org/$mgver"
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "amd64" "${DEBIAN_PATH}" || true
|
||||||
|
apt-download-binary ${base_url} "$version" "main" "i386" "${DEBIAN_PATH}" || true
|
||||||
|
done
|
||||||
|
mg_basepath="${DEBIAN_PATH}/dists/$dbver/mongodb-org"
|
||||||
|
[ -e ${mg_basepath}/stable ] || (cd ${mg_basepath}; ln -s ${STABLE_VERSION} stable)
|
||||||
|
done
|
||||||
|
echo "Debian finished"
|
||||||
|
|
||||||
|
|
||||||
|
# vim: ts=4 sts=4 sw=4
|
18
nodesource.sh
Executable file
18
nodesource.sh
Executable file
@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
function sync_nodesource() {
|
||||||
|
repo_url="$1"
|
||||||
|
repo_dir="$2"
|
||||||
|
|
||||||
|
[ ! -d "$repo_dir" ] && mkdir -p "$repo_dir"
|
||||||
|
cd $repo_dir
|
||||||
|
# lftp "${repo_url}/" -e "mirror --verbose --exclude-glob='*/SRPMS/*' -P 5 --delete --only-newer; bye"
|
||||||
|
lftp "${repo_url}/" -e "mirror --verbose -P 5 --delete --only-newer; bye"
|
||||||
|
}
|
||||||
|
|
||||||
|
sync_nodesource "https://deb.nodesource.com/node" "${TUNASYNC_WORKING_DIR}/deb"
|
||||||
|
sync_nodesource "https://deb.nodesource.com/node_0.12" "${TUNASYNC_WORKING_DIR}/deb_0.12"
|
||||||
|
sync_nodesource "https://deb.nodesource.com/node_4.x" "${TUNASYNC_WORKING_DIR}/deb_4.x"
|
||||||
|
sync_nodesource "https://rpm.nodesource.com/pub" "${TUNASYNC_WORKING_DIR}/rpm"
|
||||||
|
sync_nodesource "https://rpm.nodesource.com/pub_0.12" "${TUNASYNC_WORKING_DIR}/rpm_0.12"
|
||||||
|
sync_nodesource "https://rpm.nodesource.com/pub_4.x" "${TUNASYNC_WORKING_DIR}/rpm_4.x"
|
13
openwrt.sh
Executable file
13
openwrt.sh
Executable file
@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
function sync_openwrt() {
|
||||||
|
repo_url="$1"
|
||||||
|
repo_dir="$2"
|
||||||
|
|
||||||
|
[ ! -d "$repo_dir" ] && mkdir -p "$repo_dir"
|
||||||
|
cd $repo_dir
|
||||||
|
lftp "${repo_url}/" -e "mirror --verbose -P 5 --delete --only-newer; bye"
|
||||||
|
}
|
||||||
|
|
||||||
|
sync_openwrt "http://downloads.openwrt.org/chaos_calmer/15.05/" "${TUNASYNC_WORKING_DIR}/chaos_calmer/15.05"
|
||||||
|
sync_openwrt "http://downloads.openwrt.org/snapshots/trunk/" "${TUNASYNC_WORKING_DIR}/snapshots/trunk"
|
9
pypi.sh
Executable file
9
pypi.sh
Executable file
@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
if [ ! -d "$TUNASYNC_WORKING_DIR" ]; then
|
||||||
|
echo "Directory not exists, fail"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Syncing to $TUNASYNC_WORKING_DIR"
|
||||||
|
|
||||||
|
/usr/bin/timeout -s INT 3600 /home/tuna/.virtualenvs/bandersnatch/bin/bandersnatch -c /etc/bandersnatch.conf mirror || exit 1
|
17
repo-ck.sh
Executable file
17
repo-ck.sh
Executable file
@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
function sync_repo_ck() {
|
||||||
|
repo_url="$1"
|
||||||
|
repo_dir="$2"
|
||||||
|
|
||||||
|
[ ! -d "$repo_dir" ] && mkdir -p "$repo_dir"
|
||||||
|
cd $repo_dir
|
||||||
|
lftp "${repo_url}/" -e 'mirror -v -P 5 --delete --only-missing --only-newer --no-recursion; bye'
|
||||||
|
wget "${repo_url}/repo-ck.db" -O "repo-ck.db"
|
||||||
|
wget "${repo_url}/repo-ck.files" -O "repo-ck.files"
|
||||||
|
}
|
||||||
|
|
||||||
|
UPSTREAM="http://repo-ck.com"
|
||||||
|
|
||||||
|
sync_repo_ck "${UPSTREAM}/x86_64" "${TUNASYNC_WORKING_DIR}/x86_64"
|
||||||
|
sync_repo_ck "${UPSTREAM}/i686" "${TUNASYNC_WORKING_DIR}/i686"
|
16
termux.sh
Executable file
16
termux.sh
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
_here=`dirname $(realpath $0)`
|
||||||
|
. ${_here}/helpers/apt-download
|
||||||
|
[ -z "${LOADED_APT_DOWNLOAD}" ] && (echo "failed to load apt-download"; exit 1)
|
||||||
|
|
||||||
|
BASE_PATH="${TUNASYNC_WORKING_DIR}"
|
||||||
|
|
||||||
|
base_url="http://apt.termux.com"
|
||||||
|
ARCHES=("aarch64" "all" "arm" "i686")
|
||||||
|
for arch in ${ARCHES[@]}; do
|
||||||
|
echo "start syncing: ${arch}"
|
||||||
|
apt-download-binary "${base_url}" "stable" "main" "${arch}" "${BASE_PATH}" || true
|
||||||
|
done
|
||||||
|
echo "finished"
|
Loading…
x
Reference in New Issue
Block a user