Add files via upload

master
Aerya 4 months ago
parent 988ffa8eb1
commit 80e8d07473
  1. 23
      ArchLinux-Full-Backup-AES.sh
  2. 124
      Backup Incremental
  3. 66
      BitTorrent auto mktorrent and post
  4. 163
      BitTorrent autoUP
  5. 4
      CloudStation _ find and delete _Conflit_
  6. 8
      Discord notif rClone move
  7. 7
      Discord notif rClone remote
  8. 7
      Discord notif stockage NUC1
  9. 7
      Discord notif stockage NUC2
  10. 438
      Discord.sh (rClone notif)
  11. 4
      File move based on name
  12. 83
      IPtables Proxmox SYS1
  13. 1
      Linux Docker get local subnet
  14. 106
      PlexDrive Proxmox
  15. 11
      Series Rename
  16. 97
      Transmission post-process file move
  17. 30
      Up2Mega
  18. 59
      Usenet POST
  19. 32
      VPN autoreconnect (network manager)
  20. 22
      Wireguard ON OFF raccourcis desktop
  21. 19
      crontab NUC1
  22. 17
      crontab NUC2
  23. 2
      fstab Arch
  24. 20
      lftp Manuel
  25. 34
      lftp MuD Mirror - Unrar - Move - Delete
  26. 25
      lftp Python mirror
  27. 138
      mkTorrent
  28. 5
      nzb to torrent
  29. 19
      rClone Move
  30. 31
      rClone Service Mount
  31. 14
      rClone Service Move
  32. 14
      rClone Service Move notif Discord
  33. 15
      rClone Service unionfs
  34. 25
      rClone Sync + SAkeys
  35. 6
      rClone Sync lolo Manuel
  36. 1
      rClone bouger un dossier sur le remote directement
  37. 51
      rClone mount check and Docker restart
  38. 2
      rCloneBrowser
  39. 30
      rTorrent PushOver

@ -0,0 +1,23 @@
#!/bin/bash
# Nom du script .. : ArchLinux-Full-Backup-AES.sh
# Date ........... : 03.2016
# Auteur ......... : Aerya
DIR=/mnt/Datas/Backups/
AES_PWD='*****'
# Script ......... : NE PAS MODIFIER
# 1 .............. : Variable d'ajout de la date du jour dans le nom de l'archive
# 2 .............. : Positionnement dans le dossier de destination
# 3 .............. : Compression & exclusion des dossiers système
# 4 .............. : Chiffrement de l'archive
# 5 .............. : Suppression roulante des archives chiffrées tous les X jours
DATE=`date +%Y-%m-%d`
cd "$DIR" || exit 1
tar -jcvpf "$DIR"/FullServerBackup."$DATE".tar.bz2 --directory=/ --exclude=proc --exclude=sys --exclude=dev/pts --exclude=tmp --exclude="$DIR" .
openssl enc -aes-256-cbc -salt -a -out FullServerBackup."$DATE".tar.bz2.aes -k "$AES_PWD"

@ -0,0 +1,124 @@
#!/bin/bash
#https://github.com/pedroetb/rsync-incremental-backup/blob/master/rsync-incremental-backup-local
# Configuration variables (change as you wish)
src="${1:-/home/dan}"
dst="${2:-/mnt/unionfs/backup/PlexServer/homedan}"
backupDepth=${backupDepth:-7}
timeout=${timeout:-1800}
pathBak0="${pathBak0:-last}"
rotationLockFileName="${rotationLockFileName:-.rsync-rotation-lock}"
pathBakN="${pathBakN:-old}"
nameBakN="${nameBakN:-backup}"
exclusionFileName="${exclusionFileName:-exclude.txt}"
dateCmd="${dateCmd:-date}"
logName="${logName:-rsync-incremental-backup_$(${dateCmd} -Id)_$(${dateCmd} +%H-%M-%S).log}"
ownFolderName="${ownFolderName:-.rsync-incremental-backup}"
logFolderName="${logFolderName:-logs}"
# Combinate previously defined variables for use (don't touch this)
ownFolderPath="${HOME}/${ownFolderName}"
tempLogPath="${ownFolderPath}/local_${dst//[\/]/\\}"
exclusionFilePath="${ownFolderPath}/${exclusionFileName}"
bak0="${dst}/${pathBak0}"
rotationLockFilePath="${dst}/${rotationLockFileName}"
logPath="${dst}/${pathBakN}/${logFolderName}"
logFile="${tempLogPath}/${logName}"
# Prepare own folder
mkdir -p "${tempLogPath}"
touch "${logFile}"
touch "${exclusionFilePath}"
writeToLog() {
echo -e "${1}" | tee -a "${logFile}"
}
writeToLog "********************************"
writeToLog "* *"
writeToLog "* rsync-incremental-backup *"
writeToLog "* *"
writeToLog "********************************"
# Prepare backup paths
i=1
while [ "${i}" -le "${backupDepth}" ]
do
export "bak${i}=${dst}/${pathBakN}/${nameBakN}.${i}"
true "$((i = i + 1))"
done
writeToLog "\\n[$(${dateCmd} -Is)] You are going to backup"
writeToLog "\\tfrom: ${src}"
writeToLog "\\tto: ${bak0}"
# Prepare paths at destination
mkdir -p "${dst}" "${logPath}"
writeToLog "\\n[$(${dateCmd} -Is)] Old logs sending begins\\n"
# Send old pending logs to destination
rsync -rhv --remove-source-files --exclude="${logName}" --log-file="${logFile}" \
"${tempLogPath}/" "${logPath}/"
writeToLog "\\n[$(${dateCmd} -Is)] Old logs sending finished"
# Rotate backups if last rsync succeeded ..
if ([ ! -e "${rotationLockFilePath}" ])
then
# .. and there is previous data
if [ -d "${bak0}" ]
then
writeToLog "\\n[$(${dateCmd} -Is)] Backups rotation begins"
true "$((i = i - 1))"
# Remove the oldest backup if exists
bak="bak${i}"
rm -rf "${!bak}"
# Rotate the previous backups
while [ "${i}" -gt 0 ]
do
bakNewPath="bak${i}"
true "$((i = i - 1))"
bakOldPath="bak${i}"
if [ -d "${!bakOldPath}" ]
then
mv "${!bakOldPath}" "${!bakNewPath}"
fi
done
writeToLog "[$(${dateCmd} -Is)] Backups rotation finished\\n"
else
writeToLog "\\n[$(${dateCmd} -Is)] No previous data found, there is no backups to be rotated\\n"
fi
else
writeToLog "\\n[$(${dateCmd} -Is)] Last backup failed, backups will not be rotated\\n"
fi
# Set rotation lock file to detect in next run when backup fails
touch "${rotationLockFilePath}"
writeToLog "[$(${dateCmd} -Is)] Backup begins\\n"
# Do the backup
if rsync -achv --progress --timeout="${timeout}" --delete -W --link-dest="${bak1}/" \
--log-file="${logFile}" --exclude="${ownFolderPath}" --chmod=+r \
--exclude-from="${exclusionFilePath}" "${src}/" "${bak0}/"
then
writeToLog "\\n[$(${dateCmd} -Is)] Backup completed successfully\\n"
# Clear unneeded partials and lock file
rm -rf "${rotationLockFilePath}"
rsyncFail=0
else
writeToLog "\\n[$(${dateCmd} -Is)] Backup failed, try again later\\n"
rsyncFail=1
fi
# Send the complete log file to destination
mv "${logFile}" "${logPath}"
exit "${rsyncFail}"

@ -0,0 +1,66 @@
#!/bin/bash
WORKINGDIR="/path/to/dir"
INDIR="/path/to/release"
OUTDIR="/path/to/outdir"
AUTOADD="autoadd-dir"
UPLOAD="uploading-dir"
TODAY=$(date +"%Y%m%d")
PYSTRIPPERBIN="${WORKINGDIR}/pystripper/main.py"
UNRARBIN="/usr/bin/unrar"
TORRENTBIN="/usr/bin/transmission-create"
TORRENTCOMMENT="Created by autouploader"
TRACKERURL="http://www.torrentsite.com/announce.php"
UPLOADSITE="http://www.torrentsite.com"
UPLOADUID="uid"
UPLOADPASS="pass"
UPLOADFORM="file"
UPLOADURL="${UPLOADSITE}/upload.php"
# Find all .rar files in ${INDIR} non recursively
for RARFILE in $(find ${INDIR}/ -maxdepth 2 -name '*.rar' -print)
do
RELEASE=`echo "${RARFILE}" | cut -d'/' -f7`
if [[ -n "$(find ${OUTDIR}/${UPLOAD}/ -name ${RELEASE})" ]]; then
echo "${RELEASE} have already been processed..."
else
# We don't have this ${RELEASE} yet. Process it.
# Verify the release for .nfo and .sfv files before processing.
if [[ $(find ${INDIR}/${RELEASE} -type f | egrep '(.nfo|.sfv)' | wc -l) -eq 2 ]]; then
echo "PROCESSING ${RELEASE}..."
# SFV processing
SFV=`find ${INDIR}/${RELEASE} -type f | egrep '.sfv'`
if [[ ! -n $(cksfv -q -g "${SFV}") ]]; then
# SFV check passed now, copy to ${UPLOAD}-dir.
cp -pr ${INDIR}/${RELEASE} ${OUTDIR}/${UPLOAD}/
# Create a torrent-file of ${RELEASE}
# -p (Private) -o (Outfile) -c (Comment) -t (Tracker url)
${TORRENTBIN} -p -c "${TORRENTCOMMENT}" -t "${TRACKERURL}" -o "${OUTDIR}/${RELEASE}.torrent" ${OUTDIR}/${UPLOAD}/${RELEASE}
if [[ -e "${OUTDIR}/${RELEASE}.torrent" ]]; then
# We have successfully created torrent-file.
# Put content of ${RELEASE} .nfo file into variable for curl upload.
NFOFILE=`find ${INDIR}/${RELEASE} -type f | egrep '(.nfo)'`
NFO="`python ${PYSTRIPPERBIN} -i ${NFOFILE}`"
# Upload the torrent to your site with curl.
# Save the output html to temporary file.
# Modify according to your sites upload form.
curl --cookie "uid=${UPLOADUID};pass=${UPLOADPASS}" -F "${UPLOADFORM}=@${OUTDIR}/${RELEASE}.torrent" -F "nfo=${NFO}" -L ${UPLOADURL} > temp
DOWNLOADURL=`cat temp | grep "${RELEASE}.torrent" | grep -oP '(?<=href=")[^"]*(?=")'`
# Get the new .torrentfile with ${DOWNLOADURL}
wget ${UPLOADSITE}/${DOWNLOADURL} -O ${OUTDIR}/${AUTOADD}/${RELEASE}.torrent
# We're done, hopefully we can now seed happily.
# Clean up after yourself...
rm ${WORKINGDIR}/temp
rm ${OUTDIR}/${RELEASE}.torrent
else
echo "Could NOT find ${OUTDIR}/${RELEASE}.torrent"
fi
else
echo "SFV check did NOT pass. Compressed files are baaaad..."
fi
else
echo "${RELEASE} does not contain .nfo and/or .sfv files."
fi
fi
done

@ -0,0 +1,163 @@
#!/bin/bash
SCRIPT=${0}
SCRIPT_NAME=${SCRIPT##*/}
SCRIPT_PATH=$(dirname ${SCRIPT})
MEDIAINFO="/usr/bin/mediainfo"
URLUPLOAD="https://api.*/torrents/"
XAUTHTOKEN="X-AUTH-TOKEN: $AUTHKEY"
ANNOUNCE="https://*/$TORRENTKEY/announce"
# trap ctrl-c and call ctrl_c()
trap ctrl_c INT
function ctrl_c() {
exit 0;
}
function usage() {
cat << EOF
usage: ${SCRIPT_NAME} options
This script uploads a file or multiple files to uspk.
If NFO does not exists (NFO should have the same name than the file)
it will generate one using mediainfo
OPTIONS:
-f File(s). You can use * as well
-t Type: film ou tv
EOF
}
function check_type() {
case $type in
'film')
category=1
;;
'tv')
category=2
;;
?)
usage
exit 1
;;
esac
}
MY_DIR=$(dirname $0)
input_file=
category=
type=
while getopts "hf:t:" OPTION
do
case $OPTION in
h)
usage
exit 1
;;
f)
input_file=$OPTARG
;;
t)
type=$OPTARG
check_type
;;
?)
usage
exit 1
;;
esac
done
if [[ -z $input_file ]] || [[ -z $category ]]; then
usage
echo "input_file: $input_file"
exit 1
fi
echo "MY_DIR: $MY_DIR"
echo "SCRIPT: $SCRIPT"
echo "SCRIPT_NAME: $SCRIPT_NAME"
echo "SCRIPT_PATH: $SCRIPT_PATH"
echo "input_file: $input_file"
echo "type: $type"
rawlist=$(find -L . -maxdepth 1 -type f -name "$input_file" | sort)
old_IFS=$IFS
IFS=$'\n'
for file in $rawlist; do
echo "file: $file"
filedirname=$(realpath $file)
echo "filedirname: $filedirname"
filename="${file##*/}"
echo "filename: $filename"
name="${filename%.*}"
echo "name: $name"
extension="${filename##*.}"
echo "extension: $extension"
nfo="${filedirname%.*}.nfo"
tmp="${filedirname%.*}.tmp"
torrent="${filedirname%.*}.torrent"
echo "nfo: $nfo"
#generation of NFO if file is mkv or mp4 and if does not exists
if [[ $extension == "mkv" ]] || [[ $extension == "mp4" ]]; then
if [[ ! -e "$nfo" ]]; then
#generation of NFO
echo "NFO needs to be generated"
$MEDIAINFO $file > $tmp
sed "3 cComplete name : $filename" $tmp > $nfo
rm $tmp
fi
fi
if [[ ! -e "$nfo" ]]; then
echo "nfo missing, skip"
continue
fi
#generation of torrent file
echo "generation of the .torrent"
if [[ -e "$torrent" ]]; then
rm "$torrent"
fi
mktorrent -a "${ANNOUNCE}" -l 20 -p -o ${torrent} ${filedirname}
if [[ ! -e "$torrent" ]]; then
echo "error during torrent generation, skip"
continue
fi
while
response=$(curl -i -s -k -m 15 -X POST -H "Content-Type: multipart/form-data" -H "$XAUTHTOKEN" -F torrentFile=@"$torrent" -F nfoFile=@"$nfo" -F cat="$category" -F MAX_FILE_SIZE="3145728" "$URLUPLOAD");
[[ $response == "" ]];
do
echo "in the while";
sleep 10s;
done
regex="\{(.*)\}"
if [[ $(echo $response | grep "200 OK") != "" ]]; then
#get the uploaded torrent
echo "OK"
if [[ "$response" =~ $regex ]]; then
response="{${BASH_REMATCH[1]}}"
torrentTK=$(echo ${response} | jq '.id')
echo "torrentID: $torrentTK"
rm "$torrent"
curl -s -k -H "$XAUTHTOKEN" "$URLUPLOAD$torrentTK/download/" -o "$torrent"
fi
else
echo -n "error during upload: "
if [[ "$response" =~ $regex ]]; then
echo "${BASH_REMATCH[1]}"
fi
fi
sleep 5s
done

@ -0,0 +1,4 @@
#!/bin/bash
# Lister
find /home/aerya/Documents/CloudStation/Mails/ -type f -print0 | grep -Ez 'Conflict' | xargs -0 -n1

@ -0,0 +1,8 @@
#!/bin/bash
WEBHOOK=https://discordapp.com/api/webhooks/*
tail -F /home/aerya/logs/rclone.log | while read line; do
if echo $line | grep 'Copied (new)' > /dev/null 2>&1; then
/home/aerya/scripts/notifs/discord.sh --webhook-url="$WEBHOOK" --text "tail -n 1 /home/aerya/logs/rclone.log"
fi
done

@ -0,0 +1,7 @@
#!/bin/bash -e
WEBHOOK=https://discordapp.com/api/webhooks/*
MSG=`rclone --drive-service-account-file=/home/aerya/scripts/rclone/sakeys/aer01mount.json size Aer01Crypt: | grep 'Total size:'`
bash /home/aerya/scripts/notifs/discord.sh \
--webhook-url="$WEBHOOK" \
--text "Aer01 ==> $MSG"

@ -0,0 +1,7 @@
#!/bin/bash -e
WEBHOOK=https://discordapp.com/api/webhooks/*
MSG=`df -H | grep '/dev/sda2'`
bash /home/aerya/scripts/notifs/discord.sh \
--webhook-url="$WEBHOOK" \
--text "NUC1 ==> $MSG"

@ -0,0 +1,7 @@
#!/bin/bash -e
WEBHOOK=https://discordapp.com/api/webhooks/*
MSG=`df -H | grep '/dev/sda1'`
bash /home/aerya/scripts/notifs/discord.sh \
--webhook-url="$WEBHOOK" \
--text "NUC2 ==> $MSG"

@ -0,0 +1,438 @@
#!/usr/bin/env bash
#
# Discord.sh - Discord on command-line
# by ChaoticWeg and fieu
shopt -s lastpipe # avoid subshell weirdness hopefully
shopt -so pipefail # hopefully correctly get $? in substitution
# check for jq
jq --version >/dev/null 2>&1
jq_ok=$?
[[ "$jq_ok" -eq 127 ]] && \
echo "fatal: jq not installed" && exit 2
[[ "$jq_ok" -ne 0 ]] && \
echo "fatal: unknown error in jq" && exit 2
# jq exists and runs ok
# check for curl
curl --version >/dev/null 2>&1
curl_ok=$?
[[ "$curl_ok" -eq 127 ]] && \
echo "fatal: curl not installed" && exit 2
# curl exists and runs ok
get_ts() { date -u --iso-8601=seconds; };
thisdir="$(cd "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" && pwd)"
webhook_file="${thisdir}/.webhook"
help_text="Usage: discord.sh --webhook-url=<url> [OPTIONS]
General options:
--help Display this help and exit
--text <text> Body text of message to send
--tts Send message with text-to-speech enabled
--webhook-url Specify the Discord webhook URL
Identity options:
--username <name> Set username to <name>
--avatar <url> Set avatar to image located at <url>
Embedded content options:
Main:
--title <title> Display embed title as <title>
--description <description> Display embed description as <description>
--url <url> URL of content
--color <color> Set color of bar on left border of embed
Syntax of <color>:
Option 1: 0x<hexadecimal number> (Example: --color 0xFFFFF)
Option 2: <decimal number> (Example: --color 16777215)
--thumbnail <url> Set thumbnail to image located at <url>
Author:
--author <name> Display author name as <name>
--author-icon <url> Display author icon as image located at <url>
--author-url <url> Set author title to go to <url> when clicked
Image:
--image <url> Set image to image located at <url>
--image-height <number> Set image height to <number> pixels
--image-width <number> Set image width to <number> pixels
Fields:
--field <name,value,inline> Add field to embed
Example: --field \"CPU;95%;false\"
Example: --field \"Hostname;localhost\"
Types:
name: string
value: string
inline: boolean (default: true) (optional)
Footer:
--footer <text> Display <text> in footer
--footer-icon <url> Display image located at <url> in footer
--timestamp Display timestamp"
# HELP TEXT PLEASE
[[ "$#" -eq 0 ]] && echo "$help_text" && exit 0
[[ "${1}" == "help" ]] && echo "$help_text" && exit 0
##
# add field to stack
##
add_field() {
local _name
local _value
local _inline
fields="${fields:=}"
# don't add if not embedding
[[ -z "${embedding}" ]] && exit;
[[ "${embedding}" -ne 1 ]] && exit;
_name="$(echo "$1" | cut -d';' -f1)"
_value="$(echo "$1" | cut -d';' -f2)"
_inline="$(echo "$1" | cut -d';' -f3)"
_inline="${_inline:-true}"
fields="${fields}{\"name\": \"${_name}\", \"value\": \"${_value}\", \"inline\": ${_inline}},"
}
build_fields() {
echo ", \"fields\": [${fields::-1} ]"
}
# gather arguments
while (( "$#" )); do
case "${1}" in
--help) echo "$help_text" && exit 0;;
-h) echo "$help_text" && exit 0;;
--dry-run) is_dry=1; shift;;
--tts) is_tts=1; shift;;
--webhook-url=*) webhook_url=${1/--webhook-url=/''}; shift;;
--webhook-url*) webhook_url=${2}; shift; shift;;
--username=*) username=${1/--username=/''}; shift;;
--username*) username=${2}; shift; shift;;
--avatar=*) avatar_url=${1/--avatar=/''}; shift;;
--avatar*) avatar_url=${2}; shift; shift;;
--text=*) text=${1/--text=/''}; shift;;
--text*) text=${2}; shift; shift;;
# embed goodies
--title=*) embed_title=${1/--title=/''}; embedding=1; shift;;
--title*) embed_title=${2}; embedding=1; shift; shift;;
--description=*) embed_description=${1/--description=/''}; embedding=1; shift;;
--description*) embed_description=${2}; embedding=1; shift; shift;;
--url=*) embed_url=${1/--url=/''}; embedding=1; shift;;
--url*) embed_url=${2}; embedding=1; shift; shift;;
--color=*) embed_color=${1/--color=/''}; embedding=1; shift;;
--color*) embed_color=${2}; embedding=1; shift; shift;;
--timestamp*) embed_timestamp=1; shift;;
# embed author
--author-url=*) embed_authorurl=${1/--author-url=/''}; embedding=1; shift;;
--author-url*) embed_authorurl=${2}; embedding=1; shift; shift;;
--author-icon=*) embed_authoricon=${1/--author-icon=/''}; embedding=1; shift;;
--author-icon*) embed_authoricon=${2}; embedding=1; shift; shift;;
--author=*) embed_authorname=${1/--author=/''}; embedding=1; shift;;
--author*) embed_authorname=${2}; embedding=1; shift; shift;;
# thumbnail
--thumbnail=*) embed_thumbnail=${1/--thumbnail=/''}; embedding=1; shift;;
--thumbnail*) embed_thumbnail=${2}; embedding=1; shift; shift;;
# image
--image-height=*) embed_imageheight=${1/--image-height=/''}; embedding=1; shift;;
--image-height*) embed_imageheight=${2}; embedding=1; shift; shift;;
--image-width=*) embed_imagewidth=${1/--image-width=/''}; embedding=1; shift;;
--image-width*) embed_imagewidth=${2}; embedding=1; shift; shift;;
--image=*) embed_imageurl=${1/--image=/''}; embedding=1; shift;;
--image*) embed_imageurl=${2}; embedding=1; shift; shift;;
# fields
--field=*) add_field "${1/--field=/''}"; embedding=1; shift;;
--field*) add_field "${2}"; embedding=1; shift; shift;;
# footer
--footer-icon=*) embed_footericon=${1/--footer-icon=/''}; embedding=1; shift;;
--footer-icon*) embed_footericon=${2}; embedding=1; shift; shift;;
--footer=*) embed_footertext=${1/--footer=/''}; embedding=1; shift;;
--footer*) embed_footertext=${2}; embedding=1; shift; shift;;
# file
--file=*) file_path=${1/--file=/''}; has_file=1; shift;;
--file*) file_path=${2}; has_file=1; shift; shift;;
# unknown argument. bail out
*) echo "fatal: unknown argument '${1}'"; exit 1;;
esac
done
# files must be standalone
[[ -n "${embedding}" ]] && [[ -n "${has_file}" ]] && \
echo "fatal: files must be sent on their own (i.e. without text or embeds)" && \
exit 3
# set webhook url (if none exists after argument handling)
[[ -z ${webhook_url} ]] && [[ -n "${DISCORD_WEBHOOK}" ]] && webhook_url=${DISCORD_WEBHOOK}
[[ -z ${webhook_url} ]] && [[ -r "${webhook_file}" ]] && [[ -f "${webhook_file}" ]] && webhook_url=$(cat "${webhook_file}")
# no webhook could be found. bail out
[[ -z ${webhook_url} ]] && echo "fatal: no --webhook-url passed or no .webhook file to read from" && exit 1;
enforce_limits() {
# title <= 256
[[ -n "${embed_title}" ]] && [[ "${#embed_title}" -gt 256 ]] && \
embed_title="${embed_title::256}" && \
echo "warning: embed title limited to ${#embed_title} characters"
# description <= 2048
[[ -n "${embed_description}" ]] && [[ "${#embed_description}" -gt 2048 ]] && \
embed_description="${embed_description::2048}" && \
echo "warning: embed description limited to ${#embed_description} characters"
# footer.text <= 2048
[[ -n "${embed_footertext}" ]] && [[ "${#embed_footertext}" -gt 2048 ]] && \
embed_footertext="${embed_footertext::2048}" && \
echo "warning: embed footer text limited to ${#embed_footertext} characters"
# author.name <= 256
[[ -n "${embed_authorname}" ]] && [[ "${#embed_authorname}" -gt 256 ]] && \
embed_authorname="${embed_authorname::256}" && \
echo "warning: embed author name limited to ${#embed_authorname} characters"
}
##
# build embed author object
##
build_author() {
# don't build if not embedding
[[ -z "${embedding}" ]] && exit;
[[ "${embedding}" -ne 1 ]] && exit;
[[ -n "${embed_authorname}" ]] && local _name=", \"name\": \"${embed_authorname}\""
[[ -n "${embed_authorurl}" ]] && local _url=", \"url\": \"${embed_authorurl}\""
[[ -n "${embed_authoricon}" ]] && local _icon=", \"icon_url\": \"${embed_authoricon}\""
echo ", \"author\": { \"_\": \"_\"${_name}${_url}${_icon} }"
}
##
# build thumbnail object
##
build_thumbnail() {
# don't build if not embedding
[[ -z "${embedding}" ]] && exit;
[[ "${embedding}" -ne 1 ]] && exit;
[[ -n "${embed_thumbnail}" ]] && local _url="\"url\": \"${embed_thumbnail}\""
echo ", \"thumbnail\": { ${_url} }"
}
##
# build footer object
##
build_footer() {
# don't build if not embedding
[[ -z "${embedding}" ]] && exit;
[[ "${embedding}" -ne 1 ]] && exit;
[[ -n "${embed_footertext}" ]] && local _text=", \"text\": \"${embed_footertext}\""
[[ -n "${embed_footericon}" ]] && local _icon=", \"icon_url\": \"${embed_footericon}\""
echo ", \"footer\": { \"_\":\"_\"${_text}${_icon} }"
}
##
# build image object
##
build_image() {
# don't build if not embedding
[[ -z "${embedding}" ]] && exit;
[[ "${embedding}" -ne 1 ]] && exit;
[[ -n "${embed_imageurl}" ]] && local _iurl=", \"url\": \"${embed_imageurl}\""
[[ -n "${embed_imageheight}" ]] && local _height=", \"height\": ${embed_imageheight}"
[[ -n "${embed_imagewidth}" ]] && local _width=", \"width\": ${embed_imagewidth}"
echo ", \"image\": { \"_\": \"_\"${_iurl}${_height}${_width} }"
}
##
# build an embed object
##
build_embed() {
local _ts
local _author
local _thumb
local _image
local _footer
local _fields
# should we embed? if not, bail out without error
[[ -z "${embedding}" ]] && exit;
[[ "${embedding}" -ne 1 ]] && exit;
[[ -n "${embed_title}" ]] && local _title=", \"title\": \"${embed_title}\""
[[ -n "${embed_description}" ]] && local _desc=", \"description\": \"${embed_description}\""
[[ -n "${embed_url}" ]] && local _eurl=", \"url\": \"${embed_url}\""
[[ -n "${embed_color}" ]] && local _color=", \"color\": ${embed_color}"
[[ -n "${embed_timestamp}" ]] && [[ "${embed_timestamp}" -eq 1 ]] && _ts=", \"timestamp\": \"$(get_ts)\""
_author="$(build_author)"
_thumb="$(build_thumbnail)"
_image="$(build_image)"
_fields="$(build_fields)"
_footer="$(build_footer)"
echo ", \"embeds\": [{ \"_\": \"_\"${_title}${_desc}${_eurl}${_color}${_ts}${_author}${_thumb}${_image}${_fields}${_footer} }]"
}
build() {
local _content
local _username
local _avatar
local _embed
# need to have SOMETHING to build
[[ -z "${has_file}" ]] && \
[[ -z "${text}" ]] && \
[[ -z "${embed_title}" ]] && \
[[ -z "${embed_description}" ]] && \
[[ -z "${embed_imageurl}" ]] && \
echo "fatal: nothing to build" && exit 1
# strip 0x prefix and convert hex to dec if necessary
[[ -n "${embed_color}" ]] && [[ "${embed_color}" =~ ^0x[0-9a-fA-F]+$ ]] && embed_color="$(( embed_color ))"
# embed color must be an integer, if given
[[ -n "${embed_color}" ]] && ! [[ "${embed_color}" =~ ^[0-9]+$ ]] && \
echo "fatal: illegal color '${embed_color}'" && exit 1
# let's build, boys
[[ -n "${is_tts}" ]] && _tts=", \"tts\": true"
[[ -n "${text}" ]] && _content=", \"content\": \"${text}\""
[[ -n "${username}" ]] && _username=", \"username\": \"${username}\""
[[ -n "${avatar_url}" ]] && _avatar=", \"avatar_url\": \"${avatar_url}\""
[[ -n "${embedding}" ]] && _embed="$(build_embed)"
local _prefix="\"wait\": true${_tts}${_content}${_username}${_avatar}${_embed}"
echo "{ ${_prefix}${_embed} }"
}
##
# send something to the text channel
##
send()
{
# gotta send something
[[ -z "${1}" ]] && echo "fatal: give me something to send" && exit 1;
local _sendme="${1}"
# dry run?
[[ -n "${is_dry}" ]] && [[ "${is_dry}" -ne 0 ]] && echo "${1}" && exit 0;
# make the POST request and parse the results
# results should be empty if there's no problem. otherwise, there should be code and message
local _result
_result=$(curl -H "Content-Type: application/json" -H "Expect: application/json" -X POST "${webhook_url}" -d "${_sendme}" 2>/dev/null)
send_ok=$?
[[ "${send_ok}" -ne 0 ]] && echo "fatal: curl failed with code ${send_ok}" && exit $send_ok
_result=$(echo "${_result}" | jq '.')
# if we have a result, there was a problem. echo and exit.
[[ -n "${_result}" ]] && \
echo error! "${_result}" && \
echo attempted to send: "$(echo "${_sendme}" | jq '.')" && \
exit 1
exit 0
}
##
# send a file to the channel
##
send_file() {
# gotta have a file
[[ ( -z "${has_file}" ) || ( -z "${file_path}" ) ]] && echo "fatal: give me a file" && exit 4
local _json
if ! _json=$(build); then echo "${_json}"; exit 1; fi
# dry run
if [[ ( -n "${is_dry}" ) && ( "${is_dry}" -ne 0 ) ]]; then
nc -l -N localhost 8000 &
curl -i \
-F "file=@${file_path}" \
-F "${_json}" \
"localhost:8000"
exit 0
fi
[[ -n "${is_dry}" ]] && [[ "${is_dry}" -ne 0 ]] && \
echo "${_json}" && exit 0
# send with correct Content-Type and url-encoded data
curl -i \
-H "Expect: application/json" \
-F "file=@${file_path}" \
-F "payload_json=${_json}" \
"${webhook_url}" >/dev/null 2>&1
# error checking
sent_ok=$?
[[ "${sent_ok}" -eq 0 ]] && exit 0
echo "fatal: curl exited with code ${sent_ok} when sending file \"${file_path}\""
}
## enforce discord API limits
enforce_limits
## no file? build and send normally
if ! [[ "${has_file}" -eq 1 ]]; then
if target=$(build); then
send "${target}"
exit 0
else
echo "${target}"
exit 1
fi
fi
## has file. send as such
send_file

@ -0,0 +1,4 @@
#!/bin/bash
# EMP
find . -name '[Empornium\]*' -exec mv {} /mnt/BT/WatchDir_SBMDD/ \;

@ -0,0 +1,83 @@
*filter
# Purge des regles IPtables
-P INPUT ACCEPT
-P OUTPUT ACCEPT
-P FORWARD ACCEPT
-F
-X
# Regles de base
# Autorisation de tout trafic sortant
-P OUTPUT ACCEPT
# Rejeter tout le trafic entrant sauf règles qui suivent
-P INPUT DROP
# Autorisation des connexions entrantes deja etablies
-A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
# Autorisation des connexions internes (loopback)
-A INPUT -i lo -j ACCEPT
# Autorisation entrantes des services
# SSH / autorisation selon IP VPN personnel 1
-I INPUT -p tcp -s * --dport 22 -j ACCEPT
# SSH / autorisation selon IP ProxMox SYS 2
-I INPUT -p tcp -s * --dport 22 -j ACCEPT
# Interface Web de gestion ProxMox selon IP VPN1 Vitry_Numericable
-I INPUT -p tcp -s * --dport 8006 -j ACCEPT
# Cluster ProxMox
-I INPUT -p udp -s * --dport 5404 -j ACCEPT
-I INPUT -p udp -s * --dport 5405 -j ACCEPT
# Ping depuis VPN
-I INPUT -p icmp -s * --icmp-type echo-request -j ACCEPT
# Regles de protection + logs
# DROP paquets autres que SYN
-A INPUT -p tcp ! --syn -m conntrack --ctstate NEW -j DROP
# DROP paquets invalides
-A INPUT -m conntrack --ctstate INVALID -j DROP
# DROP paquets incomplets
-A INPUT -f -j DROP
# DROP NULL
-A INPUT -p tcp --tcp-flags ALL NONE -j DROP
# DROP XMAS
-A INPUT -p tcp --tcp-flags ALL ALL -j DROP
# DROP SYNFIN
-A INPUT -p tcp --tcp-flags ALL SYN,FIN -j DROP
# DROP FIN scan
-A INPUT -p tcp --tcp-flags ALL FIN -j DROP
# DROP SYN RST
-A INPUT -p tcp --tcp-flags SYN,RST SYN,RST -j DROP
# DROP NMAP XMAS
-A INPUT -p tcp --tcp-flags ALL URG,PSH,FIN -j DROP
# DROP NMAP
-A INPUT -p tcp --tcp-flags ALL URG,PSH,SYN,FIN -j DROP
# DROP SYN FLOOD
-N SYN-FLOOD
-A SYN-FLOOD -m limit --limit 1/sec --limit-burst 4 -j RETURN
-A SYN-FLOOD -j DROP
# DROP port scans
-N PORT-SCAN
-A INPUT -p tcp --tcp-flags SYN,ACK,FIN,RST RST -j PORT-SCAN
-A PORT-SCAN -m limit --limit 1/s --limit-burst 4 -j RETURN
-A PORT-SCAN -j DROP
# Blocage IP attaque 24h
-A INPUT -m recent --name DUMBASS --rcheck --seconds 86400 -j DROP
-A FORWARD -m recent --name DUMBASS --rcheck --seconds 86400 -j DROP
# Levee du blocage
-A INPUT -m recent --name DUMBASS --remove
-A FORWARD -m recent --name DUMBASS --remove
# Blocage de toute IP chinoise (#NORACISM)
#-A INPUT -p tcp -m set --match-set chine src -j DROP
# Logging des paquets entrants droppes
-N LOGGING
-A INPUT -j LOGGING
-A LOGGING -m limit --limit 3/min -j LOG --log-prefix "IPtables_DROP: " --log-level 7
-A LOGGING -j DROP
COMMIT

@ -0,0 +1 @@
ip route | awk '!/ (docker0|br-)/ && /src/ {print $1}'

@ -0,0 +1,106 @@
#!/bin/bash
#LogFiles
plexdriveLOG="/var/lib/vz/dan/Logs/plexdriveLOG.log"
rcloneEncLOG="/var/lib/vz/dan/Logs/rcloneEncLOG.log"
rcloneclearLOG="/var/lib/vz/dan/Logs/rcloneclearLOG.log"
unionLOG="/var/lib/vz/dan/Logs/unionLOG.log"
# PlexDrive
plexdrive mount --uid=1002 --gid=1002 -o allow_other allow_non_empty_mount -v 3 --refresh-interval=1m /var/lib/vz/dan/plexdrive/ > "$plexdriveLOG"\
#rClone GDrive chiffre
rclone mount Enc_GDRIVE: /var/lib/vz/dan/plexdrive/ --allow-other --allow-non-empty --log-file="$rcloneEncLOG" --config=/var/lib/vz/dan/configs/rclone.conf --no-modtime & \
#rClone GDrive clair
sudo rclone mount GDRIVE: /var/lib/vz/dan/GDRIVE/ --allow-other --log-file="$rcloneclearLOG" --config=/var/lib/vz/dan/configs/rclone.conf --no-modtime & \
#Unionfs
unionfs-fuse -d -o debug_file="$unionLOG" -o cow,allow_other,nonempty,direct_io,auto_cache,sync_read /var/lib/vz/dan/Pre=RW:/var/lib/vz/dan/plexdrive=RO /var/lib/vz/dan/Union/
exit
Fichiers services
PlexDrive
# /etc/systemd/system/plexdrive.service
[Unit]
Description=Plexdrive
After=syslog.target local-fs.target network.target
[Service]
Type=simple
User=root
ExecStart=/usr/bin/plexdrive mount -o allow_other read_only allow_non_empty_mount -v 3 --refresh-interval=1m /home/dan/storage/plexdrive >> home/dan/Logs/plexdriveLOG.log
ExecStop=/bin/fusermount -u -z /home/dan/storage/plexdrive
Restart=always
[Install]
WantedBy=default.target
systemctl daemon-reload
systemctl start plexdrive.service
systemctl enable plexdrive.service
rClone clair
# /etc/systemd/system/rcloneclair.service
[Unit]
Description=Google Drive (rclone)
AssertPathIsDirectory=/home/dan/storage/GDRIVE
After=plexdrive.service
[Service]
Type=simple
ExecStart=/usr/bin/rclone mount --allow-other --log-file=/home/dan/Logs/rcloneclearLOG.log --config=/home/dan/configs/rclone.conf --no-modtime GDRIVE: /home/dan/storage/GDRIVE
ExecStop=/bin/fusermount -u /home/dan/storage/GDRIVE
Restart=on-abort
[Install]
WantedBy=default.target
systemctl daemon-reload
systemctl start rcloneclair.service
systemctl enable rcloneclair.service
rClone chiffre
# /etc/systemd/system/rclonecrypt.service
[Unit]
Description=Google Drive Crypted (rclone)
After=syslog.target local-fs.target network.target plexdrive.service
[Service]
Type=simple
User=root
ExecStart=/usr/bin/rclone mount --allow-other --log-file=/home/dan/Logs/rcloneEncLOG.log --config=/home/dan/configs/rclone.conf --no-modtime Enc_GDRIVE: /home/dan/storage/plexdrive
ExecStop=/bin/fusermount -u -z /home/dan/storage/plexdrive
Restart=always
[Install]
WantedBy=default.target
systemctl daemon-reload
systemctl start rclonecrypt.service
systemctl enable rclonecrypt.service
Union
# /etc/systemd/system/union.service
[Unit]
Description=Union
After=syslog.target local-fs.target network.target rclonecrypt.service
[Service]
Type=simple
User=root
ExecStart=/usr/bin/unionfs-fuse -d -o debug_file=/home/dan/Logs/unionLOG.log -o cow,allow_other,nonempty,direct_io,auto_cache,sync_read /home/dan/storage/Pre=RW:/home/dan/storage/plexdrive=RO /home/dan/storage/union
ExecStop=/bin/fusermount -u -z /home/dan/storage/union
Restart=always
[Install]
WantedBy=default.target

@ -0,0 +1,11 @@
#!/bin/bash
reg='([0-9][0-9]*)[^0-9]*([0-9][0-9]*)'
for filename in *.mp4 *.mkv; do
name="${filename%.*}"
ext="${filename##*.}"
if [[ $name =~ $reg ]]; then
printf -v newname 'S%02dE%02d.%s' "$((10#${BASH_REMATCH[1]}))" "$((10#${BASH_REMATCH[2]}))" "${ext}"
echo mv "$filename" "$newname"
fi
done

@ -0,0 +1,97 @@
#!/bin/bash
# Transmission script to move files to post-processing
# and/or specified directories.
# AUTHOR: divreg <https://github.com/divreg>
#################################################################################
# These are inherited from Transmission. #
# Do not declare these. Just use as needed. #
# #
# TR_APP_VERSION #
# TR_TIME_LOCALTIME #
# TR_TORRENT_DIR #
# TR_TORRENT_HASH #
# TR_TORRENT_ID #
# TR_TORRENT_NAME #
# #
#################################################################################
#################################################################################
# CONSTANTS #
# configure directories and filetypes #
#################################################################################
# Use recursive hardlinks (cp -al) only if both Transmission's seed dir and
# the final dir belong to the same filesystem. Set to false to make a
# duplicate copy. Note: true allows you to seed and copy without using up
# twice the storage.
HARDLINKS=true
# The file for logging events from this script
LOGFILE="/home/transmission-complete.log"
# Listening directories
SYNC_DIR="/home/Sync"
# Transmission remote login details. Leave user:pass blank if no authentication
TR_HOST="0.0.0.0"
# Music extensions
#SYNC_EXTS[0]="flac"
#MUSIC_EXTS[1]="mp3"
# Path to new content from transmission
TR_DOWNLOADS="$TR_TORRENT_DIR/$TR_TORRENT_NAME"
#################################################################################
# SCRIPT CONTROL #
# edit with caution #
#################################################################################
function edate
{
echo "`date '+%Y-%m-%d %H:%M:%S'` $1" >> "$LOGFILE"
}
function trans_check
{
for directory in $(find "$TR_DOWNLOADS" -type d)
do
cd "$TR_DOWNLOADS" > /dev/null 2>&1
cd $directory > /dev/null 2>&1
files=$(ls *.${MUSIC_EXTS[*]} 2> /dev/null | wc -l)
if [ $files != "0" ]
then
echo "$files"
continue
fi
done
}
edate "Directory is $TR_TORRENT_DIR"
edate "Torrent ID is $TR_TORRENT_ID"
edate "Torrent Hash is $TR_TORRENT_HASH"
edate "Working on the new download $TR_DOWNLOADS"
# Move new music dir and files to the listening location
# Passes through if none of your extension types are found in
# the new music dir
if [ "$(trans_check ${MUSIC_EXTS[*]})" ]
then
edate "File $TR_TORRENT_NAME contains audio files!"
if [ $HARDLINKS == true ]
then
edate "Hardlinking file contents to listening directory. Success!"
cp -al "$TR_DOWNLOADS" "$SYNC_DIR" >> "$LOGFILE"
fi
if [ $HARDLINKS == false ]
then
edate "Duplicating file contents to listening directory. Success!"
cp -R "$TR_DOWNLOADS" "$SYNC_DIR" >> "$LOGFILE"
fi
fi

@ -0,0 +1,30 @@
#!/bin/bash
# Nom du script .. : uptomega.sh
# Date ........... : 05.2016
# Auteur ......... : Aerya | https://upandclear.org
# Description .... : Up to MEGA and email link+key
# Prerequis ...... : MegaTools | https://github.com/megous/megatools
# Execution ...... : "bash mega.sh fichier email"
# Specificites ... : Si vous voulez toujours envoyer le lien de DL sur le même email
# => Remplir la variable EMAIL=''
# => Ligne 4 du script, remplacez "$2" par "EMAIL"
# Variables ...... : A definir ici et ne pas modifier la suite du script
# MEGA_USER ...... : Compte de connexion MEGA (email)
# MEGA_PWD ....... : Mot de passe du compte de connexion MEGA
# EMAIL .......... : Ne remplir que si vous voulez toujours envoyer au même destinataire
MEGA_USER='*'
MEGA_PWD='*'
EMAIL=''
# Script ......... : NE PAS MODIFIER
# 1 .............. : Creation sur MEGA d'un dossier du nom du meme nom
# 2 .............. : Upload sur MEGA dans le dossier cree
# 3 .............. : Recuperation du lien de DL (contenant la cle de chiffrement)
# 4 .............. : Envoi du lien de DL par email
#megamkdir -u "$MEGA_USER" -p "$MEGA_PWD" /Root/"$1"
megaput -u "$MEGA_USER" -p "$MEGA_PWD" --path /Root "$1"
LOG=`megals -u "$MEGA_USER" -p "$MEGA_PWD" -e /Root "$1"`
echo "$LOG" | mail -s "$1 est sur MEGA" "$2"

@ -0,0 +1,59 @@
#!/bin/bash
# SQL Parameters
#SQLUser=
#SQLpwd=
SQLTable='usenet'
# NameORIG / SQL
NameORIG=`basename "$1"`
# Hash / SQL
Hash=$(echo "$1" | sha256sum | base64 | tail -c 17 | head -c 15)
# Size /SQL
Size=`du -h "$1" | awk '{ print $1 }'`
# Date / SQL
Date=`date '+%d/%m/%Y' | awk '{ print $Date }'`
# Time / SQL
Time=`date '+%H:%M:%S' | awk '{ print $Time }'`
# Category / SQL
Category=$(/usr/bin/basename $(/bin/pwd))
# Fake File
RootDir=/mnt/Faked/"$Category"
FakeFile=touch "$NameORIG" -r "$1"
# Encrypt File
#tar -czf "$1".tgz "$1" | bcrypt "$1".tgz
# CryptKey / SQL
# NameCRYPT / SQL
HashDate=$(date +%s | sha256sum | base64 | head -c 15)
NameCRYPT=$(echo ${Hash}${HashDate})
# RAR
# RAR files number / SQL
# PAR2
# RARpwd / SQL
# NZBName / SQL NameORIG_NameCRYPT_Category.nzb
NZBName="$1"_"$NameCRYPT".nzb
# NZB backup
# Get post date + time / SQL
# Get post group / SQL
# Get header check / SQL
# Export SQL
mysql --user=root usenet -e "INSERT INTO "$SQLTable" (NameORIG, Hash, Size, Date, Time, Category, NameCRYPT) VALUES ('$NameORIG', '$Hash', '$Size', '$Date', '$Time', '$Category', '$NameCRYPT')"

@ -0,0 +1,32 @@
#!/bin/bash
# Nom du script .. : OpenVPNAutoReconnect.sh
# Date ........... : 05.2017
# Auteur ......... : Aerya | https://upandclear.org
# Description .... : Script de reconnexion automatique d'un VPN OpenVPN. Compatible Ubuntu 17.
# Prerequis ...... : RAS
# Execution ...... : Rendre actif au boot/reboot de la machine (dépend de l'OS/du DE)
# Specificites ... : RAS
# Variables ...... : A définir ici et ne pas modifier la suite du script
# FREQ ........... : Fréquence, en secondes, à laquelle la connexion VPN est vérifiée
FREQ=2
# Script ......... : NE PAS MODIFIER
# 1-2 / 7-8 ...... : Boucle d'une durée de "FREQ" secondes
# 3 .............. : Va chercher le nom de la connexion vpn
# 4 .............. : Si connexion non active
# 5-6 ............ : Lancement
while [ true ]
do
VPN=`nmcli con show | grep vpn | cut -f1 -d " "`
if [[ "$VPN" != "" ]]; then
nmcli con up "$VPN"
fi
sleep "$FREQ"
done

@ -0,0 +1,22 @@
[Desktop Entry]
Version=1.0
Type=Application
Name=Wireguard UP
Comment=
Exec=sudo wg-quick up wg0-client
Icon=
Path=
Terminal=true
StartupNotify=false
[Desktop Entry]
Version=1.0
Type=Application
Name=Wireguard OFF
Comment=
Exec=sudo wg-quick down wg0-client
Icon=
Path=
Terminal=true
StartupNotify=false

@ -0,0 +1,19 @@
# Droits *arr
* * * * * chmod -R 777 /mnt/PRE/
# rClone Sync
#0 4 * * * /home/aerya/scripts/rclone/sync.sh
# rClone Sync Lolo
0 4 * * * /home/aerya/scripts/rclone/lolo/synclolo.sh
# Stockages
0 */4 * * * /home/aerya/scripts/notifs/stockages/nuc1.sh
#0 */4 * * * /home/aerya/scripts/notifs/stockages/td1.sh
0 */4 * * * /home/aerya/scripts/notifs/stockages/aer01.sh
#0 */4 * * * /home/aerya/scripts/notifs/stockages/aer02.sh
# Stockages Lolo
0 */4 * * * /home/aerya/scripts/notifs/lolo/stockages/lolo1.sh
#0 */4 * * * /home/aerya/scripts/notifs/lolo/stockages/lolo2.sh
0 */4 * * * /home/aerya/scripts/notifs/lolo/stockages/lolo3.sh

@ -0,0 +1,17 @@
0 */4 * * * /home/aerya/scripts/notifs/stockages/nuc2.sh
#0 */4 * * * /home/aerya/scripts/notifs/stockages/td1.sh
# rClone Sync
#0 4 * * * /home/aerya/scripts/rclone/sync.sh
# rClone move
#* * * * * /home/aerya/scripts/notifs/rclonemove.sh
# Bot Discord Scene
#@reboot python3 WarezBot.py -b *
# Droits *arr
* * * * * chmod -R 777 /mnt/PRE/

@ -0,0 +1,2 @@
# montage de DockerLab pour le Backup
10.0.4.94:/volume1/Backup /mnt/DockerLab nfs rsize=8192,wsize=8192,timeo=14,intr

@ -0,0 +1,20 @@
FilmsHDFR
lftp -u aerya,12345 -p 47812 *.*.*.* -e "set ftp:passive-mode on;set ftp:ssl-force true;set ftp:ssl-protect-data true;set ssl:verify-certificate fasle;set ftp:use-fxp true;cd MOViES-FR-HD;cls -ltr"
DocsHDFR
lftp -u aerya,12345 -p 47812 *.*.*.* -e "set ftp:passive-mode on;set ftp:ssl-force true;set ftp:ssl-protect-data true;set ssl:verify-certificate fasle;set ftp:use-fxp true;cd MOViES-FR-DOCS;cls -ltr"
TvHDFR
lftp -u aerya,12345 -p 47812 *.*.*.* -e "set ftp:passive-mode on;set ftp:ssl-force true;set ftp:ssl-protect-data true;set ssl:verify-certificate fasle;set ftp:use-fxp true;cd TV-FR-HD;cls -ltr"
lftp -u aerya,12345 -p 47812 -e "cd MOViES-FR-HD && cd Special.Correspondents.2016.FRENCH.720p.WEBRip.x264-SH0W && get special.correspondents.2016.french.720p.webrip.x264-sh0w.rar /home/st/SC; quit" *.*.*.*
lftp -u aerya,12345 -p 47812 -e 'mirror -i "\.(rar)$" MOViES-FR-HD/Special.Correspondents.2016.FRENCH.720p.WEBRip.x264-SH0W /home/st; quit' *.*.*.*
lftp -u aerya,12345 -p 47812 *.*.*.* -e "set ssl:verify-certificate no;cd archives/frxviddocs;cls -l --sort="date""
lftp -u aerya,12345 -p 47812 *.*.*.* -e "set ssl:verify-certificate no;cd archives/frxvidhd;cls -l --sort="date""
lftp -u aerya,12345 -p 47812 *.*.*.* -e "set ssl:verify-certificate no;cd archives/frtvhd;cls -l --sort="date""

@ -0,0 +1,34 @@
#!/bin/bash
# Nom du script .. : MuD.sh
# Date ........... : 05.2016
# Auteur ......... : Aerya
# Description .... : lftp - Mirror - Unrar - Move - Delete
# Specificites ... : -
# Execution ...... : "bash MuD.sh Dossier1 Dossier2" => Dossier1 = categorie, Dossier2 = dossier cible
# Variables ...... : A definir ici et ne pas modifier la suite du script
# FTP_USER ....... : Compte de connexion FTP
# FTP PWD ........ : Mot de passe du compte de connexion FTP
# FTP_IP ......... : IP serveur FTP
# FTP_PORT ....... : PORT serveur FTP
# TEMP_DIR ....... : Dossier local pour le mirror
# FINAL_DIR ...... : Dossier local pour fichier final
FTP_USER='*'
FTP_PWD='*'
FTP_IP='*'
FTP_PORT='*'
TEMP_DIR='/home/TMP/'
FINAL_DIR='/home/st/OK/'
# Script ......... : NE PAS MODIFIER
# 1 .............. : Commande lftp de connexion/navigation/recuperation du dossier cible
# 2 .............. : Decompression dans le dossier final et on chmod en 777 histoire de ne pas voir de souci de droits
# 3 .............. : Suppression du mirror dans le dossier temporaire
lftp -u "$FTP_USER","$FTP_PWD" -p "$FTP_PORT" -e "cd "$1" && mirror "$2" "$TEMP_DIR"; quit" "$FTP_IP"
unrar e "$TEMP_DIR""$2"/"*.rar" &>/dev/null "$FINAL_DIR" && chmod -R 777 "$FINAL_DIR"
rm -rf "$TEMP_DIR""$2"

@ -0,0 +1,25 @@
#!/usr/bin/env python3
import sys, os
#config
dir_dl = '/home/st/SC/'
ip_ftp = '*.*.*.*'
name_ftp = 'aerya'
pass_ftp = '12345'
port_ftp = '41269'
dir_lftp = 'lftp'
###########################################################################
def main():
try:
(script,cats,name) = sys.argv
except:
print "Usage: python [script.py] [category] [name]"
sys.exit(1)
os.system(''+dir_lftp+' -u '+name_ftp+','+pass_ftp+' -p '+port_ftp+' -e "cd '+cats+' && mirror '+name+' '+dir_dl+'; quit" '+ip_ftp)
if __name__ == '__main__': main()

@ -0,0 +1,138 @@
#!/bin/bash
#
# Nom du script .. : mktorrent.sh
# Date ........... : 09.2016
# Auteur ......... : Aerya | https://upandclear.org
# # Description .... : Dossier/Fichier vers .torrent + taille pieces automatisée ou non
# Prerequis ...... : mktorrent
# Execution ...... : "./create.sh"
# Variables ...... : A definir ici et ne pas modifier la suite du script
USER=exrat
TRACKER="https://annonce.tracker.bt"
# Dossier adapté pour conf ruTorrent mondedie.fr
TORRENT="/home/$USER/torrents"
WATCH="/home/$USER/watch"
####################################
FONCAUTO () {
TAILLE=$(du -s "$TORRENT"/"$FILE" | awk '{ print $1 }')
if [ "$TAILLE" -lt 524288 ]; then
PIECE=18 # 256 bytes
elif [ "$TAILLE" -lt 1048576 ]; then
PIECE=19 # 512 bytes
elif [ "$TAILLE" -lt 2097152 ]; then
PIECE=20 # 1024 bytes
elif [ "$TAILLE" -lt 4194304 ]; then
PIECE=21 # 2048 bytes
elif [ "$TAILLE" -lt 8388608 ]; then
PIECE=22 # 4096 bytes
elif [ "$TAILLE" -lt 16777216 ]; then
PIECE=23 # 8192 bytes
elif [ "$TAILLE" -lt 33554432 ]; then
PIECE=24 # 16384 bytes
else
PIECE=25 # 32768 bytes
fi
}
FONCCREATE () {
mktorrent -p -l "$PIECE" -a "$TRACKER" "$TORRENT"/"$FILE"
chown "$USER":"$USER" "$FILE".torrent
}
# test presence mktorrent
command -v mktorrent >/dev/null 2>&1
if [ $? = 1 ]; then
apt-get install -y mktorrent
fi
if [ "$1" = "" ]; then
NAME=$(whiptail --title "Nom" --inputbox "Entrez le nom du fichier ou dossier source" 10 60 3>&1 1>&2 2>&3)
exitstatus=$?
if [ $exitstatus = 0 ]; then
FILE=$NAME
if [ -d "$TORRENT/$FILE" ] || [ -f "$TORRENT/$FILE" ]; then
echo
else
whiptail --title "Erreur" --msgbox "Le fichier ou dossier source n'existe pas\nVérifiez le nom exact" 13 60
exit 0
fi
else
echo "Vous avez annulé..."
exit 0
fi
OPTION=$(whiptail --title "Taille" --menu "Choisissez la taille de pièces du .torrent" 15 60 9 \
"1" " Automatique" \
"2" " 256 Ko" \
"3" " 512 Ko" \
"4" " 1 Mo" \
"5" " 2 Mo" \
"6" " 4 Mo" \
"7" " 8 Mo" \
"8" " 16 Mo" \
"9" " 32 Mo" 3>&1 1>&2 2>&3)
if [ "$OPTION" = 1 ]; then
FONCAUTO
elif [ "$OPTION" = 2 ]; then
PIECE=18 # 256 bytes
elif [ "$OPTION" = 3 ]; then
PIECE=19 # 512 bytes
elif [ "$OPTION" = 4 ]; then
PIECE=20 # 1024 bytes
elif [ "$OPTION" = 5 ]; then
PIECE=21 # 2048 bytes
elif [ "$OPTION" = 6 ]; then
PIECE=22 # 4096 bytes
elif [ "$OPTION" = 7 ]; then
PIECE=23 # 8192 bytes
elif [ "$OPTION" = 8 ]; then
PIECE=24 # 16384 bytes
elif [ "$OPTION" = 9 ]; then
PIECE=25 # 32768 bytes
else
echo "Vous avez annulé..."
exit 0
fi
FONCCREATE
SEED=$(whiptail --title "Mise en seed" --menu "Voulez vous mettre le torrent en seed ?" 15 60 2 \
"1" " Oui" \
"2" " Non" 3>&1 1>&2 2>&3)
if [ "$SEED" = 1 ]; then
mv "$FILE".torrent "$WATCH"/"$FILE".torrent
whiptail --title "Ok" --msgbox " Torrent crée en:\n $WATCH/$FILE.torrent\n Source:\n $TORRENT/$FILE" 13 60
elif [ "$SEED" = 2 ]; then
mv "$FILE".torrent /home/"$USER"/"$FILE".torrent
whiptail --title "Ok" --msgbox " Torrent crée en:\n /home/$USER/$FILE.torrent\n Source:\n $TORRENT/$FILE" 13 60
else
rm "$FILE".torrent
echo "Vous avez annulé..."
exit 0
fi
elif [ "$1" = "--auto" ]; then
FILE="$2"
FONCAUTO
FONCCREATE
mv "$FILE".torrent "$WATCH"/"$FILE".torrent
else
FILE="$1"
FONCAUTO
FONCCREATE
echo -n -e "Voulez vous mettre le torrent en seed ? (y/n): "
read -r SEED
if [ "$SEED" = "y" ]; then
mv "$FILE".torrent "$WATCH"/"$FILE".torrent
echo "$FILE.torrent en seed"
else
mv "$FILE".torrent /home/"$USER"/"$FILE".torrent
echo "$FILE.torrent en /home/$USER"
fi
fi

@ -0,0 +1,5 @@
#!/bin/bash
for file in /mnt/BT/WatchDir_SBMDD/Medusa/*.nzb
do
mv "$file" "${file%.nzb}.torrent"
done

@ -0,0 +1,19 @@
#!/bin/bash
LOG="/home/aerya/logs/rclone.log"
EXC1="/SD/**"
EXC2="/Ratio/**"
EXC3="/PreSeries/**"
EXC4="/PreFilms/**"
EXC5="/PreComics/**"
if pidof -o %PPID -x "$0"; then
exit 1
fi
sleep 30
while true
do
echo "$(date "+%d.%m.%Y %T") RCLONE UPLOAD STARTED" | tee -a "$LOG"
rclone move -c -v --copy-links --exclude='**partial~' --exclude="**_HIDDEN~" --exclude=".unionfs/**" --exclude=".unionfs-fuse/**" --exclude="$EXC1" --exclude="$EXC2" --exclude="$EXC3" --exclude="$EXC4" --exclude="$EXC5" --min-age=5m --user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36" --drive-service-account-file=/home/aerya/scripts/rclone/sakeys/aer01move.json --fast-list --drive-stop-on-upload-limit --transfers=10 --checkers=10 --stats 5s --log-file="$LOG" /mnt/PRE Aer01Crypt:/
sleep 480
echo "$(date "+%d.%m.%Y %T") RCLONE UPLOAD ENDED" | tee -a "$LOG"
done

@ -0,0 +1,31 @@
[Unit]
Description=RClone Service
Wants=network-online.target
After=network-online.target
[Service]