This commit is contained in:
Anthony Axenov 2023-06-28 22:15:23 +08:00
parent ed013d9321
commit 9cb6aee0c3
Signed by: anthony
GPG Key ID: EA9EC32FF7CCD4EC
18 changed files with 1689 additions and 346 deletions

View File

@ -1,6 +1,6 @@
# Github Gists # Github Gists
Репозиторий предназначен для резервного хранения сниппетов из моего [gist.github.com](https://gist.github.com/anthonyaxenov). Резервный репозиторий публичных сниппетов из [gist.github.com/anthonyaxenov](https://gist.github.com/anthonyaxenov).
Оригинальные сниппеты в первую очередь я изменяю именно в гистах, потом здесь. Оригинальные сниппеты в первую очередь я изменяю именно в гистах, потом здесь.

104
cfg/.bash_aliases Normal file
View File

@ -0,0 +1,104 @@
#!/bin/bash
# alias bashrc='source ~/.bashrc'
alias zshrc='source ~/.zshrc'
alias realias='source ~/.bash_aliases'
alias reload='exec ${SHELL} -l'
alias sudo='sudo ' # enable aliases to be sudoed
alias g='git'
alias hosts="sudo nano /etc/hosts"
alias shrug="echo '¯\_(ツ)_/¯' | xclip -selection c"
alias ..='cd ..' # zsh builtin
alias ~='cd ~' # zsh builtin
# alias "--"='cd -' # zsh builtin
alias chmod='chmod --preserve-root'
alias chown='chown --preserve-root'
alias free='free -h'
alias duh='du -ha --max-depth=1'
alias sduh='sudo du -ha --max-depth=1'
alias l='ls -pCFh --color=auto'
alias la='ls -pAFh --color=auto'
alias ll='ls -palFh --color=auto'
alias mkdir='mkdir -pv'
alias where='whereis' # zsh builtin
alias ps='ps auxf'
alias psg='ps aux | grep -v grep | grep -i -e VSZ -e'
alias is='type -a'
alias upgrade='sudo apt update && sudo apt upgrade -y && sudo snap refresh'
alias untargz='tar -czf'
alias mkcd="mkdir -p $1 && cd $1"
alias cl='cd $1 && ll'
alias myip='curl http://ipecho.net/plain; echo'
alias ports='netstat -tulpan'
alias ssh.pub='cat ~/.ssh/*.pub'
alias gpg.new="gpg --full-generate-key"
alias gpg.pub="gpg --armor --export $@"
alias gpg.list='gpg --list-keys --keyid-format SHORT'
alias lite-xl="LITE_SCALE=1 lite-xl"
alias wine='LANG=ru_RU.utf8 wine'
alias docker.prune='docker image prune -f; docker network prune -f; docker container prune -f'
# https://obsproject.com/forum/threads/how-to-start-virtual-camera-without-sudo-privileges.139783/
alias obscam="sudo modprobe v4l2loopback video_nr=2 card_label='OBS Virtual Camera'"
curltime() {
curl -w @- -o /dev/null -s "$@" <<'EOF'
time_namelookup: %{time_namelookup} sec\n
time_connect: %{time_connect} sec\n
time_appconnect: %{time_appconnect} sec\n
time_pretransfer: %{time_pretransfer} sec\n
time_redirect: %{time_redirect} sec\n
time_starttransfer: %{time_starttransfer} sec\n
---------------\n
time_total: %{time_total} sec\n
EOF
}
# Download music from Youtube or Youtube Music
# and save as top quality flac file without video
# Playlist and video/track URLs are supported
# Usage: $ ytm https://www.youtube.com/watch\?v=dQw4w9WgXcQ
# More info: https://github.com/ytdl-org/youtube-dl
ytm() {
youtube-dl \
--extract-audio \
--audio-format flac \
--audio-quality 0 \
--format bestaudio \
--write-info-json \
--output "${HOME}/Музыка/ytm/%(playlist_title)s/%(channel)s - %(title)s.%(ext)s" \
$@
}
docker.ip() {
if [ "$1" ]; then
if [ "$1" = "-a" ]; then
docker ps -aq \
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
| sed -e 's#^/##' \
| column -t
elif [ "$1" = "-c" ]; then
docker-compose ps -q \
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
| sed -e 's#^/##' \
| column -t
else
docker inspect --format '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$1"
docker port "$1"
fi
else
docker ps -q \
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
| sed -e 's#^/##' \
| column -t
fi
}

View File

@ -1,88 +1,20 @@
# https://gist.github.com/anthonyaxenov/020b25ea53701d82902a7acfb557866c [user]
# ...прочие настройки # ...
[core] # signingkey = <key>
editor = nano # git config user.signingkey ... -- установить ключ
autocrlf = input [commit]
gpgSign = true
[tag]
gpgSign = true
[push]
default = current
[pull] [pull]
default = current default = current
rebase = false rebase = false
[push] [core]
default = current editor = nano
[alias] autocrlf = input
[remote "origin"]
# общее ----------------------------------------------------------------- prune = true
aliases = config --get-regexp '^alias' # показать список доступных алиасов [include]
head = log -1 HEAD # показать последний коммит в текущей ветке path = path/to/git_aliases
# название текущей ветки или тега при detached head:
dehead = "!BR=$(git branch --show-current); if [ -n \"$BR\" ]; then echo $BR; else git describe --contains --all HEAD; fi;"
# ветки -------------------------------------------------------------------
bheads = branch -vv # ветки и их последние коммиты
# br = status -sb # показать название текущей ветки
# branch = branch --list -vv # показать текущую ветку в общем списке локальных веток
#mn = merge --no-ff # слить ветку с принудительным коммитом слияния
brd = branch -D # удалить ветку локально
brod = "!git branch -D "${1}"; git push origin :"${1}";" # удалить ветку локально и на origin
merged = branch --merged # показать список веток, слитых в текущую
ghpr = "!git fetch origin pull/$1/head:pr/$1 && git checkout pr/$1" # github: встать на PR с указанным id
# удалить локальные ветки, слитые в текущую:
trim = "!DEFAULT=master; git branch --merged ${1-$DEFAULT} | grep -v " ${1-$DEFAULT}$" | xargs git branch -d; git remote prune origin;"
# переключение ------------------------------------------------------------
co = checkout # переключиться на ветку/тег/коммит
cob = checkout -b # создание новое ветки
master = "!git checkout master && git pull" # переключиться на ветку master и обновить
dev = "!git checkout dev && git pull" # переключиться на ветку dev и обновить
develop = "!git checkout develop && git pull" # переключиться на ветку develop и обновить
# фиксация изменений ------------------------------------------------------
c = commit # коммит
ca = commit -a # коммит всех файлов
cm = commit -m # коммит с заданным сообщением
cam = commit -am # коммит всех файлов с заданным сообщением
amend = commit --amend --no-edit # прикрепляет все индексированные файлы к последнему коммиту, используя уже существующее сообщение
# amenda = commit --amend --no-edit -a
amendm = commit --amend -m # прикрепляет все индексированные файлы к последнему коммиту, спрашивает новое сообщение к коммиту
cp = cherry-pick # применить коммит поверх текущего HEAD
diffc = diff --cached # показать дельту staged-файла
# управление изменениями, сброс состояний, откат --------------------------
st = status -sb # короткий status
rh = reset --hard # откат коммита с удалением всех изменений на указанный коммит
rhh = reset --hard HEAD # откат коммита с удалением всех изменений на последний коммит
reseth = reset --mixed HEAD # откат коммита с сохранением всех изменений
unstage = reset HEAD # переводит файл staged => unstaged без потери изменений
clear = checkout -- # удаляет изменения в файле
# алиасы для временной фиксации
# на самом деле, для таких дел надо использовать git stash
# save = !git add -A && git commit -m 'SAVEPOINT'
# wip = commit -am "WIP"
# undo = reset HEAD~1 --mixed
# работа с remote-репами --------------------------------------------------
pushf = push --force # отправить ветку принудительно
pusht = push --tags # отправить теги
pushft = push --tags --force # отправить теги принудительно
pullt = pull --tags --force # получить теги
ploh = pull origin HEAD # получить текущую ветку из origin
remotes = remote -v # показать список удалённых репозиториев
#sy = remote update origin --prune #
rso = remote show origin # показать состояние локальных веток относительно удалённых на origin bare
rpo = remote prune origin # удалить все мёртвые ссылки на bare-ветки origin
repush = 'git push origin :$1 && git push origin $1'
# просмотр логов ----------------------------------------------------------
heads = log --graph --decorate --simplify-by-decoration --oneline # коммиты, которыми оканчиваются ветки
tree = log --graph --pretty=format:'%C(yellow)%h%C(cyan)%d%Creset %s %C(white)- %an, %ar%Creset'
hist = log --pretty=format:\"%h | %an (%ad) %s%d\" --graph --date=relative
logfull = log --graph --decorate --all
grog = log --graph --decorate --all --pretty=format:'%C(bold red)%h%C(reset) %C(bold blue)%an%C(reset) %C(green)%cr%C(reset) [%aD]%d%n%B'
# сабмодули ---------------------------------------------------------------
si = submodule init # инициализация сабмодулей
sdi = submodule deinit -f # удаление сабмодуля
sa = submodule add # добавление сабмодуля
sup = submodule update # обновление сабмодуля
sst = submodule status # статус сабмодулей
ss = submodule summary # список сабмодулей

86
cfg/git_aliases Normal file
View File

@ -0,0 +1,86 @@
[alias]
# общее -----------------------------------------------------------------
init = init -q # no blm!
aliases = config --get-regexp '^alias' # показать список доступных алиасов
user = config --local --get-regexp '^user' # локальные настройки пользователя git
guser = config --global --get-regexp '^user' # глобальные настройки пользователя git
user-me = "!git config user.name 'Anthony Axenov'; git config user.email 'anthonyaxenov@gmail.com'; git config user.signingkey 'F7CCD4EC'"
# ветки -------------------------------------------------------------------
bheads = branch -vv # ветки и их последние коммиты
branches = branch --list -vv # показать текущую ветку в общем списке локальных веток
# br = status -sb # показать название текущей ветки
brd = branch -D # удалить ветку локально
brod = "!git branch -D "$1"; git push origin :"$1";" # удалить ветку локально и на origin
merged = branch --merged # показать список веток, слитых в текущую
#ghpr = "!git fetch origin pull/$1/head:pr/$1 && git checkout pr/$1" # github: встать на PR с указанным id
# удалить локальные ветки, слитые в текущую:
trim = "!DEFAULT=master; git branch --merged ${1-$DEFAULT} | grep -v " ${1-$DEFAULT}$" | xargs git branch -d; git remote prune origin;"
# переключение ------------------------------------------------------------
co = checkout # переключиться на ветку/тег/коммит
cob = checkout -b # создание новое ветки
master = "!git checkout master && git pull" # переключиться на ветку master и обновить
dev = "!git checkout dev && git pull" # переключиться на ветку dev и обновить
develop = "!git checkout develop && git pull" # переключиться на ветку develop и обновить
# фиксация изменений ------------------------------------------------------
c = commit # коммит
ca = commit -a # коммит всех файлов
cm = commit -m # коммит с заданным сообщением
cam = commit -am # коммит всех файлов с заданным сообщением
amend = commit --amend --no-edit -a # прикрепляет все индексированные файлы к последнему коммиту, используя уже существующее сообщение
#amenda = commit --amend --no-edit
amendm = commit --amend -m # прикрепляет все индексированные файлы к последнему коммиту, спрашивает новое сообщение к коммиту
cp = cherry-pick # применить коммит поверх текущего HEAD
diffc = diff --cached # показать дельту staged-файла
# управление изменениями, сброс состояний, откат --------------------------
# st = status -sb # короткий status
st = status # сокращение
rh = reset --hard # откат коммита с удалением всех изменений на указанный коммит
rhh = reset --hard HEAD # откат коммита с удалением всех изменений на последний коммит
rmh = reset --mixed HEAD # откат коммита с сохранением всех изменений
unstage = reset HEAD # переводит файл staged => unstaged без потери изменений
clear = checkout -- # удаляет изменения в файле
# алиасы для временной фиксации
# на самом деле, для таких дел надо использовать git stash
# save = !git add -A && git commit -m 'SAVEPOINT'
wip = commit -am "WIP"
wipa = commit --amend -am "WIP"
undo = reset --mixed HEAD~
# работа с remote-репами --------------------------------------------------
pushf = push --force # отправить ветку принудительно
pusht = push --tags # отправить теги
pushft = push --tags --force # отправить теги принудительно
pullf = pull --force # получить ветку принудительно
pullt = pull --tags # получить теги
pullft = pull --tags --force # получить теги
ploh = pull origin HEAD # получить текущую ветку из origin
remotes = remote -v # показать список удалённых репозиториев
#sy = remote update origin --prune #
rso = remote show origin # показать состояние локальных веток относительно удалённых на origin bare
rpo = remote prune origin # удалить все мёртвые ссылки на bare-ветки origin
repush = 'git push origin :$1 && git push origin $1' # замена push --force
# просмотр логов ----------------------------------------------------------
head = log -1 HEAD # показать последний коммит в текущей ветке
heads = log --graph --decorate --simplify-by-decoration --oneline # коммиты, которыми оканчиваются ветки
# название текущей ветки или тега при detached head:
dehead = "!BR=$(git branch --show-current); if [ -n \"$BR\" ]; then echo $BR; else git describe --contains --all HEAD; fi;"
tree = log --graph --pretty=format:'%C(yellow)%h%C(cyan)%d%Creset %s %C(white)- %an, %ar%Creset'
hist = log --pretty=format:\"%h | %an (%ad) %s%d\" --graph --date=relative
logfull = log --graph --decorate --all
grog = log --graph --decorate --all --pretty=format:'%C(bold red)%h%C(reset) %C(bold blue)%an%C(reset) %C(green)%cr%C(reset) [%aD]%d%n%B'
# сабмодули ---------------------------------------------------------------
sub = submodule # сокращение
# si = submodule init # инициализация сабмодулей
# sdi = submodule deinit -f # удаление сабмодуля
# sa = submodule add # добавление сабмодуля
# sup = submodule update # обновление сабмодуля
# sst = submodule status # статус сабмодулей
# ss = submodule summary # список сабмодулей

35
php/here.php Normal file
View File

@ -0,0 +1,35 @@
<?php
/**
* Returns caller class/file name, function and line where current
*
* Potentially doesn't cover all cases, but is simple and pretty handy for use in frameworks.
*
* @param bool $as_array result as array or string in this format: `<file|class>:<func>():<line>`
* @return string|array
*/
function here(bool $as_array = false): string|array
{
$trace = debug_backtrace(!DEBUG_BACKTRACE_PROVIDE_OBJECT | DEBUG_BACKTRACE_IGNORE_ARGS, 2);
return $as_array
? [
'from' => $trace[1]['class'] ?? $trace[0]['file'],
'function' => $trace[1]['function'],
'line' => $trace[0]['line'],
]
: sprintf(
'%s%s%s():%s',
$trace[1]['class'] ?? $trace[0]['file'],
$trace[1]['type'] ?? '::',
$trace[1]['function'],
$trace[0]['line']
);
}
// Usage:
class MyClass {
public function test(): string {
return here();
}
}
echo (new MyClass)->test(); // MyClass->test():4

26
shell/dc Normal file
View File

@ -0,0 +1,26 @@
#!/bin/bash
CONTAINER="my-container" # the name of the container in which to 'exec' something
CONFIG="$(dirname $([ -L $0 ] && readlink -f $0 || echo $0))/docker-compose.yml" # path to compose yml file
CMD="docker-compose -f $CONFIG" # docker-compose command
APP_URL='http://localhost:8000/'
open_browser() {
if which xdg-open > /dev/null; then
xdg-open "$1" </dev/null >/dev/null 2>&1 & disown
elif which gnome-open > /dev/null; then
gnome-open "$1" </dev/null >/dev/null 2>&1 & disown
fi
}
case "$1" in
'' | 'help' ) echo -e "Provide one of operations: \t start, stop, up, down, restart, rebuild, open";
echo "Otherwise all args will be passed to 'docker exec -ti $CONTAINER ...'" ;;
'open' ) open_browser $APP_URL ;;
'up' ) $CMD up -d --build ;; # build and start containers
'down' ) $CMD down --remove-orphans ;; # stop and remove containers
'start' ) $CMD start ;; # start containers
'stop' ) $CMD stop ;; # stop containers
'restart' ) $CMD stop && $CMD start ;; # restart containers
'rebuild' ) $CMD down --remove-orphans && $CMD up -d --build ;; # rebuild containers
* ) docker exec -ti $CONTAINER $@ # exec anything in container
esac

607
shell/helpers.sh Normal file
View File

@ -0,0 +1,607 @@
#########################################################################
# #
# Bunch of helpers for bash scripting #
# #
# This file is compilation from some of my projects. #
# I'm not sure they're all in perfiect condition but I use them #
# time to time in my scripts. #
# #
#########################################################################
######################################
# Little handy helpers for scripting
######################################
installed() {
command -v "$1" >/dev/null 2>&1
}
installed_pkg() {
dpkg --list | grep -qw "ii $1"
}
apt_install() {
sudo apt install -y --autoremove "$*"
}
require() {
sw=()
for package in "$@"; do
if ! installed "$package" && ! installed_pkg "$package"; then
sw+=("$package")
fi
done
if [ ${#sw[@]} -gt 0 ]; then
echo "These packages will be installed in your system:\n${sw[*]}"
apt_install "${sw[*]}"
[ $? -gt 0 ] && {
echo "installation cancelled"
exit 201
}
fi
}
require_pkg() {
sw=()
for package in "$@"; do
if ! installed "$package" && ! installed_pkg "$package"; then
sw+=("$package")
fi
done
if [ ${#sw[@]} -gt 0 ]; then
echo "These packages must be installed in your system:\n${sw[*]}"
exit 200
fi
}
require_dir() {
is_dir "$1" || die "Directory '$1' does not exist!" 1
}
title() {
[ "$1" ] && title="$1" || title="$(grep -m 1 -oP "(?<=^##makedesc:\s).*$" ${BASH_SOURCE[1]})"
info
info "==============================================="
info "$title"
info "==============================================="
info
}
unpak_targz() {
require tar
tar -xzf "$1" -C "$2"
}
symlink() {
ln -sf "$1" "$2"
}
download() {
require wget
wget "$1" -O "$2"
}
clone() {
require git
git clone $*
}
clone_quick() {
require git
git clone $* --depth=1 --single-branch
}
abspath() {
echo $(realpath -q "${1/#\~/$HOME}")
}
is_writable() {
[ -w "$(abspath $1)" ]
}
is_dir() {
[ -d "$(abspath $1)" ]
}
is_file() {
[ -f "$(abspath $1)" ]
}
is_function() {
declare -F "$1" > /dev/null
}
regex_match() {
printf "%s" "$1" | grep -qP "$2"
}
in_array() {
local find=$1
shift
for e in "$@"; do
[[ "$e" == "$find" ]] && return 0
done
return 1
}
implode() {
local d=${1-}
local f=${2-}
if shift 2; then
printf %s "$f" "${@/#/$d}"
fi
}
open_url() {
if which xdg-open > /dev/null; then
xdg-open "$1" </dev/null >/dev/null 2>&1 & disown
elif which gnome-open > /dev/null; then
gnome-open "$1" </dev/null >/dev/null 2>&1 & disown
fi
}
########################################################
# Desktop notifications
########################################################
notify () {
require "notify-send"
[ -n "$1" ] && local title="$1" || local title="My notification"
local text="$2"
local level="$3"
local icon="$4"
case $level in
"critical") local timeout=0 ;;
"low") local timeout=5000 ;;
*) local timeout=10000 ;;
esac
notify-send "$title" "$text" -a "MyScript" -u "$level" -i "$icon" -t $timeout
}
notify_error() {
notify "Error" "$1" "critical" "dialog-error"
}
notify_warning() {
notify "Warning" "$1" "normal" "dialog-warning"
}
notify_info() {
notify "" "$1" "low" "dialog-information"
}
######################################
# Input & output
######################################
IINFO="( i )"
INOTE="( * )"
IWARN="( # )"
IERROR="( ! )"
IFATAL="( @ )"
ISUCCESS="( ! )"
IASK="( ? )"
IDEBUG="(DBG)"
IVRB="( + )"
BOLD="\e[1m"
DIM="\e[2m"
NOTBOLD="\e[22m" # sometimes \e[21m
NOTDIM="\e[22m"
NORMAL="\e[20m"
RESET="\e[0m"
FRESET="\e[39m"
FBLACK="\e[30m"
FWHITE="\e[97m"
FRED="\e[31m"
FGREEN="\e[32m"
FYELLOW="\e[33m"
FBLUE="\e[34m"
FLRED="\e[91m"
FLGREEN="\e[92m"
FLYELLOW="\e[93m"
FLBLUE="\e[94m"
BRESET="\e[49m"
BBLACK="\e[40m"
BWHITE="\e[107m"
BRED="\e[41m"
BGREEN="\e[42m"
BYELLOW="\e[43m"
BBLUE="\e[44m"
BLRED="\e[101m"
BLGREEN="\e[102m"
BLYELLOW="\e[103m"
BLBLUE="\e[104m"
dt() {
echo "[$(date +'%H:%M:%S')] "
}
ask() {
IFS= read -rp "$(print ${BOLD}${BBLUE}${FWHITE}${IASK}${BRESET}\ ${BOLD}$1 ): " $2
}
print() {
echo -e "$*${RESET}"
}
debug() {
if [ "$2" ]; then
print "${DIM}${BOLD}${RESET}${DIM} ${FUNCNAME[1]:-?}():${BASH_LINENO:-?}\t$1 "
else
print "${DIM}${BOLD}${RESET}${DIM}$1 "
fi
}
var_dump() {
debug "$1 = ${!1}" 0
}
verbose() {
print "${BOLD}${IVRB}${RESET}${FYELLOW} $1 "
}
info() {
print "${BOLD}${FWHITE}${BLBLUE}${IINFO}${RESET}${FWHITE} $1 "
}
note() {
print "${BOLD}${DIM}${FWHITE}${INOTE}${RESET} $1 "
}
success() {
print "${BOLD}${BGREEN}${FWHITE}${ISUCCESS}${BRESET}$FGREEN $1 "
}
warn() {
print "${BOLD}${BYELLOW}${FBLACK}${IWARN}${BRESET}${FYELLOW} Warning:${RESET} $1 "
}
error() {
print "${BOLD}${BLRED}${FWHITE}${IERROR} Error: ${BRESET}${FLRED} $1 " >&2
}
fatal() {
print "${BOLD}${BRED}${FWHITE}${IFATAL} FATAL: $1 " >&2
print_stacktrace
}
die() {
error "${1:-halted}"
exit ${2:-100}
}
print_stacktrace() {
STACK=""
local i
local stack_size=${#FUNCNAME[@]}
debug "Callstack:"
# for (( i=$stack_size-1; i>=1; i-- )); do
for (( i=1; i<$stack_size; i++ )); do
local func="${FUNCNAME[$i]}"
[ x$func = x ] && func=MAIN
local linen="${BASH_LINENO[$(( i - 1 ))]}"
local src="${BASH_SOURCE[$i]}"
[ x"$src" = x ] && src=non_file_source
debug " at $func $src:$linen"
done
}
########################################################
# Tests
########################################################
# $1 - command to exec
assert_exec() {
[ "$1" ] || exit 1
local prefix="$(dt)${BOLD}${FWHITE}[TEST EXEC]"
if $($1 1>/dev/null 2>&1); then
local text="${BGREEN} PASSED"
else
local text="${BLRED} FAILED"
fi
print "${prefix} ${text} ${BRESET} ($?):${RESET} $1"
}
# usage:
# func1() {
# return 0
# }
# func2() {
# return 1
# }
# assert_exec "func1" # PASSED
# assert_exec "func2" # FAILED
# assert_exec "whoami" # PASSED
# $1 - command to exec
# $2 - expected output
assert_output() {
[ "$1" ] || exit 1
[ "$2" ] && local expected="$2" || local expected=''
local prefix="$(dt)${BOLD}${FWHITE}[TEST OUTP]"
local output=$($1 2>&1)
local code=$?
if [[ "$output" == *"$expected"* ]]; then
local text="${BGREEN} PASSED"
else
local text="${BLRED} FAILED"
fi
print "${prefix} ${text} ${BRESET} (${code}|${expected}):${RESET} $1"
# print "\tOutput > $output"
}
# usage:
# func1() {
# echo "some string"
# }
# func2() {
# echo "another string"
# }
# expect_output "func1" "string" # PASSED
# expect_output "func2" "some" # FAILED
# expect_output "func2" "string" # PASSED
# $1 - command to exec
# $2 - expected exit-code
assert_code() {
[ "$1" ] || exit 1
[ "$2" ] && local expected=$2 || local expected=0
local prefix="$(dt)${BOLD}${FWHITE}[TEST CODE]"
$($1 1>/dev/null 2>&1)
local code=$?
if [[ $code -eq $expected ]]; then
local text="${BGREEN} PASSED"
else
local text="${BLRED} FAILED"
fi
print "${prefix} ${text} ${BRESET} (${code}|${expected}):${RESET} $1"
}
# usage:
# func1() {
# # exit 0
# return 0
# }
# func2() {
# # exit 1
# return 1
# }
# expect_code "func1" 0 # PASSED
# expect_code "func1" 1 # FAILED
# expect_code "func2" 0 # FAILED
# expect_code "func2" 1 # PASSED
########################################################
# Misc
########################################################
curltime() {
curl -w @- -o /dev/null -s "$@" <<'EOF'
time_namelookup: %{time_namelookup} sec\n
time_connect: %{time_connect} sec\n
time_appconnect: %{time_appconnect} sec\n
time_pretransfer: %{time_pretransfer} sec\n
time_redirect: %{time_redirect} sec\n
time_starttransfer: %{time_starttransfer} sec\n
---------------\n
time_total: %{time_total} sec\n
EOF
}
ytm() {
youtube-dl \
--extract-audio \
--audio-format flac \
--audio-quality 0 \
--format bestaudio \
--write-info-json \
--output "${HOME}/Downloads/ytm/%(playlist_title)s/%(channel)s - %(title)s.%(ext)s" \
$*
}
docker.ip() { # not finished
if [ "$1" ]; then
if [ "$1" = "-a" ]; then
docker ps -aq \
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
| sed -e 's#^/##' \
| column -t
elif [ "$1" = "-c" ]; then
docker-compose ps -q \
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
| sed -e 's#^/##' \
| column -t
else
docker inspect --format '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$1"
docker port "$1"
fi
else
docker ps -q \
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
| sed -e 's#^/##' \
| column -t
fi
}
########################################################
# Working with git
########################################################
git.is_repo() {
[ "$1" ] || die "Path is not specified" 101
require_dir "$1/"
check_dir "$1/.git"
}
git.require_repo() {
git.is_repo "$1" || die "'$1' is not git repository!" 10
}
git.cfg() {
[ "$1" ] || die "Key is not specified" 101
if [[ "$2" ]]; then
git config --global --replace-all "$1" "$2"
else
echo $(git config --global --get-all "$1")
fi
}
git.set_user() {
[ "$1" ] || die "git.set_user: Repo is not specified" 100
git.cfg "$1" "user.name" "$2"
git.cfg "$1" "user.email" "$3"
success "User set to '$name <$email>' in ${FWHITE}$1"
}
git.fetch() {
if [ "$1" ]; then
if git.remote_branch_exists "origin/$1"; then
git fetch origin "refs/heads/$1:refs/remotes/origin/$1" --progress --prune --quiet 2>&1 || die "Could not fetch $1 from origin" 12
else
warn "Tried to fetch branch 'origin/$1' but it does not exist."
fi
else
git fetch origin --progress --prune --quiet 2>&1 || exit 12
fi
}
git.reset() {
git reset --hard HEAD
git clean -fd
}
git.clone() {
git clone $* 2>&1
}
git.co() {
git checkout $* 2>&1
}
git.is_it_current_branch() {
[ "$1" ] || die "git.is_it_current_branch: Branch is not specified" 19
[[ "$(git.current_branch)" = "$1" ]]
}
git.pull() {
[ "$1" ] && BRANCH=$1 || BRANCH=$(git.current_branch)
# note "Updating branch $BRANCH..."
git pull origin "refs/heads/$BRANCH:refs/remotes/origin/$BRANCH" --prune --force --quiet 2>&1 || exit 13
git pull origin --tags --force --quiet 2>&1 || exit 13
# [ "$1" ] || die "git.pull: Branch is not specified" 19
# if [ "$1" ]; then
# note "Updating branch $1..."
# git pull origin "refs/heads/$1:refs/remotes/origin/$1" --prune --force --quiet 2>&1 || exit 13
# else
# note "Updating current branch..."
# git pull
# fi
}
git.current_branch() {
git branch --show-current || exit 18
}
git.local_branch_exists() {
[ -n "$(git for-each-ref --format='%(refname:short)' refs/heads/$1)" ]
}
git.update_refs() {
info "Updating local refs..."
git remote update origin --prune 1>/dev/null 2>&1 || exit 18
}
git.delete_remote_branch() {
[ "$1" ] || die "git.remote_branch_exists: Branch is not specified" 19
if git.remote_branch_exists "origin/$1"; then
git push origin :"$1" # || die "Could not delete the remote $1 in $ORIGIN"
return 0
else
warn "Trying to delete the remote branch $1, but it does not exists in origin"
return 1
fi
}
git.is_clean_worktree() {
git rev-parse --verify HEAD >/dev/null || exit 18
git update-index -q --ignore-submodules --refresh
git diff-files --quiet --ignore-submodules || return 1
git diff-index --quiet --ignore-submodules --cached HEAD -- || return 2
return 0
}
git.is_branch_merged_into() {
[ "$1" ] || die "git.remote_branch_exists: Branch1 is not specified" 19
[ "$2" ] || die "git.remote_branch_exists: Branch2 is not specified" 19
git.update_refs
local merge_hash=$(git merge-base "$1"^{} "$2"^{})
local base_hash=$(git rev-parse "$1"^{})
[ "$merge_hash" = "$base_hash" ]
}
git.remote_branch_exists() {
[ "$1" ] || die "git.remote_branch_exists: Branch is not specified" 19
git.update_refs
[ -n "$(git for-each-ref --format='%(refname:short)' refs/remotes/$1)" ]
}
git.new_branch() {
[ "$1" ] || die "git.new_branch: Branch is not specified" 19
if [ "$2" ] && ! git.local_branch_exists "$2" && git.remote_branch_exists "origin/$2"; then
git.co -b "$1" origin/"$2"
else
git.co -b "$1" "$2"
fi
}
git.require_clean_worktree() {
if ! git.is_clean_worktree; then
warn "Your working tree is dirty! Look at this:"
git status -bs
_T="What should you do now?\n"
_T="${_T}\t${BOLD}${FWHITE}0.${RESET} try to continue as is\t- errors may occur!\n"
_T="${_T}\t${BOLD}${FWHITE}1.${RESET} hard reset\t\t\t- clear current changes and new files\n"
_T="${_T}\t${BOLD}${FWHITE}2.${RESET} stash changes (default)\t- save all changes in safe to apply them later via 'git stash pop'\n"
_T="${_T}\t${BOLD}${FWHITE}3.${RESET} cancel\n"
ask "${_T}${BOLD}${FWHITE}Your choice [0-3]" reset_answer
case $reset_answer in
1 ) warn "Clearing your work..." && git.reset ;;
3 ) exit ;;
* ) git stash -a -u -m "WIP before switch to $branch_task" ;;
esac
fi
}
########################################################
# Also
########################################################
# https://gist.github.com/anthonyaxenov/d53c4385b7d1466e0affeb56388b1005
# https://gist.github.com/anthonyaxenov/89c99e09ddb195985707e2b24a57257d
# ...and other my gists with [SHELL] prefix
########################################################
# Sources and articles used
########################################################
# https://github.com/nvie/gitflow/blob/develop/gitflow-common (BSD License)
# https://github.com/petervanderdoes/gitflow-avh/blob/develop/gitflow-common (FreeBSD License)
# https://github.com/vaniacer/bash_color/blob/master/color
# https://misc.flogisoft.com/bash/tip_colors_and_formatting
# https://www-users.york.ac.uk/~mijp1/teaching/2nd_year_Comp_Lab/guides/grep_awk_sed.pdf
# https://www.galago-project.org/specs/notification/
# https://laurvas.ru/bash-trap/
# https://stackoverflow.com/a/52674277
# https://rtfm.co.ua/bash-funkciya-getopts-ispolzuem-opcii-v-skriptax/
# https://gist.github.com/jacknlliu/7c51e0ee8b51881dc8fb2183c481992e
# https://gist.github.com/anthonyaxenov/d53c4385b7d1466e0affeb56388b1005
# https://github.com/nvie/gitflow/blob/develop/gitflow-common
# https://github.com/petervanderdoes/gitflow-avh/blob/develop/gitflow-common
# https://gitlab.com/kyb/autorsync/-/blob/master/
# https://lug.fh-swf.de/vim/vim-bash/StyleGuideShell.en.pdf
# https://www.thegeekstuff.com/2010/06/bash-array-tutorial/
# https://www.distributednetworks.com/linux-network-admin/module4/ephemeral-reserved-portNumbers.php

View File

@ -0,0 +1,19 @@
# Daemon file
# Place or symlink it to /etc/systemd/system/inotifywait-cp.service
# Enable and start: sudo systemctl enable --now inotifywait-cp
# Check it: sudo systemctl status inotifywait-cp
[Unit]
Description=Photosync from android
[Service]
Type=simple
Restart=always
# correct these parameters as needed:
User=user
WorkingDirectory=/home/user
ExecStart=bash /home/user/.local/bin/photosync-a53.sh
[Install]
WantedBy=network.target

View File

@ -0,0 +1,59 @@
#!/bin/bash
# My use case:
# syncthing synchronizes ALL changes in DCIM directory on my android to PC.
# I wanted files to be copied somewhere else on my PC to stay forever, so I
# could sort them later and safely free some space on mobile without loss.
# Also I wish to have some stupid log with history of such events.
# inotify-tools package must be installed!
# CHANGE THIS PARAMETERS to ones you needed
dir_src="$HOME/Syncthing/Mobile/Camera"
dir_dest="$HOME/some/safe/place"
dir_logs="$HOME/inotifywait-cp-logs"
regexp="[0-9]{8}_[0-9]{6}.*\.(jpg|mp4|gif)"
print() {
echo -e "[`date '+%H:%M:%S'`] $*" \
| tee -a "$dir_logs/`date '+%Y%m%d'`.log"
}
copy () {
mkdir -p "$dir_src" "$dir_dest" "$dir_logs"
if [ -f "$dir_dest/$1" ]; then
print "SKIPPED:\t$dir_dest/$1"
else
cp "$dir_src/$1" "$dir_dest/$1"
print "COPIED:\t$dir_src/$1 => $dir_dest/$1"
fi
}
mkdir -p "$dir_src" "$dir_dest" "$dir_logs"
print "START\t========================="
# First, try to backup files synced since last exec of this script
ls -1 "$dir_src" \
| grep -E "^$regexp$" \
| while read filename; do copy "$filename"; done
# Next, run inotifywait against source directory with args:
# --quiet -- print less (only print events)
# --monitor -- don't stop after first event (like infinite loop)
# --event -- first syncthing creates hidden file to write data into
# then renames it according to source file name, so here
# we listen to MOVED_TO event to catch final filename
# --format %f -- print only filename
# --include -- filename regexp to catch event from, ensure your $regexp
# is correct or remove line 56 to catch synced ALL files
inotifywait \
--quiet \
--monitor \
--event moved_to \
--format %f \
--include "$regexp" \
"$dir_src" \
| while read filename; do copy "$filename"; done
print "FINISH\t========================="

View File

@ -1,124 +0,0 @@
#!/bin/bash
# https://gist.github.com/anthonyaxenov/b17c6fbd7895c6049e1ceddc7c54bb5b
# source: https://misc.flogisoft.com/bash/tip_colors_and_formatting
########################################################
# Иконки
########################################################
IINFO="[ i ]"
INOTE="[ * ]"
IWARN="[ # ]"
IERROR="[ ! ]"
IFATAL="[ @ ]"
ISUCCESS="[ ! ]"
IASK="[ ? ]"
########################################################
# Атрибуты текста (форматирование)
########################################################
BOLD="\e[1m" # жирный
DIM="\e[2m" # приглушённый
# UNDERL="\e[4m" # подчёркнутый
# BLINK="\e[5m" # мигающий
# INV="\e[7m" # инвертированный
# HIDDEN="\e[8m" # скрытый
_BOLD="\e[21m" # нежирный
_DIM="\e[22m" # неприглушённый
# _BLINK="\e[25m" # немигающий
# _UNDERL="\e[24m" # неподчёркнутый
# _INV="\e[27m" # неинвертированный
# _HIDDEN="\e[28m" # нескрытый
NORMAL="\e[20m" # сброс всех атрибутов
RESET="\e[0m" # сброс всех атрибутов и цветов (вообще)
########################################################
# Цвет текста
########################################################
FRESET="\e[39m" # сброс цвета
FBLACK="\e[30m"
FWHITE="\e[97m"
FRED="\e[31m"
FGREEN="\e[32m"
FYELLOW="\e[33m"
FBLUE="\e[34m"
FLRED="\e[91m"
FLGREEN="\e[92m"
FLYELLOW="\e[93m"
FLBLUE="\e[94m"
########################################################
# Цвет фона текста
########################################################
BRESET="\e[49m" # сброс цвета
BBLACK="\e[40m"
BWHITE="\e[107m"
BRED="\e[41m"
BGREEN="\e[42m"
BYELLOW="\e[43m"
BBLUE="\e[44m"
BLRED="\e[101m"
BLGREEN="\e[102m"
BLYELLOW="\e[103m"
BLBLUE="\e[104m"
########################################################
# Функции для вывода текста
########################################################
print() {
echo -e "$*${RESET}"
}
ask() {
IFS= read -rp "$(dt)$(print ${BOLD}${BBLUE}${FWHITE}${IASK}${BRESET}\ ${BOLD}$1 ): " $2
}
dbg() {
print "${DIM}$*"
}
info() {
print "$(dt)${BOLD}${FWHITE}${IINFO}${RESET}${FWHITE} $1 "
}
note() {
print "$(dt)${BOLD}${DIM}${FWHITE}${INOTE}${RESET} $1 "
}
success() {
print "$(dt)${BOLD}${BGREEN}${FWHITE}${ISUCCESS}${BRESET}$FGREEN $1 "
}
warn() {
print "$(dt)${BOLD}${BYELLOW}${FBLACK}${IWARN}${BRESET}${FYELLOW} Warning:${RESET} $1 " >&2
}
error() {
print "$(dt)${BOLD}${BLRED}${FWHITE}${IERROR} Error: ${BRESET}${FLRED} $1 " >&2
}
fatal() {
print "$(dt)${BOLD}${BRED}${FWHITE}${IFATAL} FATAL: $1 " >&2
}
########################################################
# Тестирование
########################################################
# print
# print "print test"
# print
# ask "ask test" test
# dbg "debug test: answer is $test"
# info "info test"
# note "note test"
# success "success test"
# warn "warn test"
# error "error test"
# fatal "fatal test"

157
shell/php Normal file
View File

@ -0,0 +1,157 @@
#!/bin/bash
# Welcome to amusement park!
[[ "$1" = '--help' ]] || [[ "$1" = '-h' ]] && cat <<EOF && exit
NetBeans docker wrapper for php
===============================
Anthony Axenov (c) 2023, The MIT License
https://axenov.dev
https://opensource.org/license/mit
Replacement host php interpreter with dockerized one to run & debug cli php scripts.
Usage:
./$(basename $0) --container=<NAME> [--map=<PATH1>:<PATH2>] [PHP_ARGS] <SCRIPT> [SCRIPT_ARGS]
Arguments:
--container : docker container where your SCRIPT is located. Required.
--map : sources path mapped from the host to container. Not required.
PATH1 is an absolute path to php sources directory on the host.
PATH2 is an absolute path of the same directory inside of container.
Delimiter ':' is required. If PATH1, PATH2 or delimiter is missed
or value is empty then error will be thrown.
PHP_ARGS : arguments you can pass to real php interpreter according to its --help.
Not required.
SCRIPT : a path to script file (.php) to be executed in container. Required.
Note that this file must exist inside or be available from that container.
SCRIPT_ARGS : arguments to call your script with. They will be passed to script as is.
Not required.
Read this article to know how to set this helper as interpreter for NetBeans:
ru: https://axenov.dev/netbeans-php-docker-xdebug-cli
en: https://axenov.dev/en/netbeans-php-docker-xdebug-cli-en
EOF
pwd=$(pwd) # current working directory
cmdline=($@) # copy currently called command line to array
collect_php_args=1 # should we collect php args or script ones?
quiet=0 # should we print some useful data before executing?
# find a path where this wrapper is located
wrapper_dir="$(dirname $0)"
# find a path where project is probably located
project_dir="$(dirname $wrapper_dir)"
# here we check if this wrapper is global or local
# but if it is set as global from nbproject dir of
# current project then it is not detected as global
# anyway behavior will be correct
nbproject="$(basename $wrapper_dir)"
[ "$nbproject" = 'nbproject' ] && is_global=0 || is_global=1
# prepare new array to collect php args
declare -a php_cmd=("docker" "exec")
# and another one for script args
declare -a script_args=()
# and one more for directory mapping
declare -a map_arr=()
# iterate over arguments we received from netbeans
for arg in "${cmdline[@]}"; do
# if this is a container name
if [ "${arg::11}" = '--container' ]; then
container="${arg:12}" # save it
php_cmd+=("$container" 'php') # add php itself
continue # jump to next iteration
fi
# if this is a path map
if [ "${arg::5}" = '--map' ]; then
map="${arg:6}" # save it
map_arr=(${map//:/ }) # split it and check if it is correct
if [ -z "${map_arr[0]}" ] || [ -z "${map_arr[1]}" ]; then
echo "ERROR: directory map is incorrect!"
echo "Use $0 --help to get info about how to use this wrapper."
echo "Exit code 3."
exit 3
fi
continue # jump to next iteration
fi
# if this is a container name
if [ "${arg::7}" = '--quiet' ]; then
quiet=1
continue # jump to next iteration
fi
# if this is an absolute path to a script file
if [ -f "$arg" ]; then
# make its path correct for container
if [ "$map" ]; then # when paths are mapped
# remove first part of map from an absolute filepath and append result to second map part
filepath="${map_arr[1]}${arg##${map_arr[0]}}"
else # when paths are NOT mapped
# remove project path from absolute filepath
filepath="${arg##$project_dir/}"
fi
php_cmd+=("$filepath") # append php args with filepath
collect_php_args=0 # now we need to collect script args
continue # jump to next iteration
fi
if [ "$collect_php_args" = 1 ]; then # if we collect php args
php_cmd+=("$arg") # add current arg to php args as is
continue # jump to next iteration
fi
script_args+=("$arg") # otherwise add current arg to script args as is
done
# docker container name is required so we must halt here if there is no one
if [ -z "$container" ]; then
echo "ERROR: no docker container is specified!" >&2
echo "Use $0 --help to get info about how to use this wrapper." >&2
echo "Exit code 1." >&2
exit 1
fi
# path to php script is also required so we must halt here too if there is no one
if [ -z "$filepath" ]; then
echo "ERROR: no script filepath is specified!" >&2
echo "Use $0 --help to get info about how to use this wrapper." >&2
echo "Exit code 2." >&2
exit 2
fi
cmdline="${php_cmd[*]} ${script_args[*]}" # make a command to execute
# print some important data collected above
if [ "$quiet" = 0 ]; then
echo "NetBeans docker wrapper for php"
echo "==============================="
echo -e "Container name: $container"
echo -e "Script path: $filepath"
echo -e "Directory mapping: ${map:-(none)}"
echo -e "Command line:\n$cmdline\n"
fi
# some debug output
# echo "=== some debug output ========="
# cat <<EOF | column -t
# is_global $is_global
# container $container
# pwd $pwd
# wrapper_dir $wrapper_dir
# nbproject $nbproject
# project_dir $project_dir
# map $map
# map_arr[0] ${map_arr[0]}
# map_arr[1] ${map_arr[1]}
# filepath $filepath
# EOF
# echo "==============================="
$cmdline # execute
# that's folks!

106
shell/quick-backup.sh Normal file
View File

@ -0,0 +1,106 @@
#!/bin/bash
#####################################################################
# #
# Stupidly simple backup script for own projects #
# #
# Author: Anthony Axenov (Антон Аксенов) #
# Version: 1.0 #
# License: WTFPLv2 More info: https://axenov.dev/?p=1423 #
# #
#####################################################################
# database credentials ==============================================
DBUSER=
DBPASS=
DBNAME=
DBCHARSET="utf8"
# date formats ======================================================
FMT_DT_DIR="%Y.%m.%d" # 2021.03.19
FMT_DT_FILE="%H.%M" # 08.24
FMT_DT_LOG="%H:%M:%S" # 08:24:15.168149413
# local storage =====================================================
LOCAL_BAK_DIR="/backup/$(date +$FMT_DT_DIR)"
# database backup file
LOCAL_SQL_FILE="$(date +$FMT_DT_FILE)-db.sql.gz"
LOCAL_SQL_PATH="$LOCAL_BAK_DIR/$LOCAL_SQL_FILE"
# project path and backup file
LOCAL_SRC_DIR="/var/www/"
LOCAL_SRC_FILE="$(date +$FMT_DT_FILE)-src.tar.gz"
LOCAL_SRC_PATH="$LOCAL_BAK_DIR/$LOCAL_SRC_FILE"
# log file
LOG_FILE="$(date +$FMT_DT_FILE).log"
LOG_PATH="$LOCAL_BAK_DIR/$LOG_FILE"
log() {
echo -e "[$(date +$FMT_DT_LOG)] $*" | tee -a "$LOG_PATH"
}
# remote storage ====================================================
REMOTE_HOST="user@example.com"
REMOTE_BAK_DIR="/backup/$(date +$FMT_DT_DIR)"
REMOTE_SQL_PATH="$REMOTE_BAK_DIR/$LOCAL_SQL_FILE"
REMOTE_SRC_PATH="$REMOTE_BAK_DIR/$LOCAL_SRC_FILE"
REMOTE_LOG_PATH="$REMOTE_BAK_DIR/$LOG_FILE"
# start =============================================================
echo
log "Start ----------------------------------------------------------------"
log "Initialized parameters:"
log "\tDB_USER\t\t= $DB_USER"
log "\tDB_NAME\t\t= $DB_NAME"
log "\tDB_CHARSET\t= $DB_CHARSET"
log "\tLOCAL_SRC_DIR\t= $LOCAL_SRC_DIR"
log "\tLOCAL_SRC_PATH\t= $LOCAL_SRC_PATH"
log "\tLOCAL_SQL_PATH\t= $LOCAL_SQL_PATH"
log "\tLOG_PATH\t= $LOG_PATH"
log "\tREMOTE_HOST\t= $REMOTE_HOST"
log "\tREMOTE_SQL_PATH\t= $REMOTE_SQL_PATH"
log "\tREMOTE_SRC_PATH\t= $REMOTE_SRC_PATH"
log "\tREMOTE_LOG_PATH\t= $REMOTE_LOG_PATH"
mkdir -p $LOCAL_BAK_DIR
log "Created local dir: $LOCAL_BAK_DIR"
ssh $REMOTE_HOST mkdir -p $REMOTE_BAK_DIR
log "Created remote dir: $REMOTE_BAK_DIR"
log "1/4 Dumping DB: $DBNAME..."
mysqldump \
--user="$DBUSER" \
--password="$DBPASS" \
--default-character-set="$DBCHARSET" \
--opt \
--quick \
"$DBNAME" | gzip > "$LOCAL_SQL_PATH"
# --opt Same as --add-drop-table, --add-locks, --create-options,
# --quick, --extended-insert, --lock-tables, --set-charset,
# and --disable-keys
[ $? -gt 0 ] && log "ERROR: failed to create dump. Exit-code: $?" || log "\t- OK"
log "2/4 Sending database backup to $REMOTE_HOST..."
rsync --progress "$LOCAL_SQL_PATH" "$REMOTE_HOST:$REMOTE_SQL_PATH"
[ $? -gt 0 ] && log "ERROR: failed to send database backup. Exit-code: $?" || log "\t- OK"
log "3/4 Compressing project dir: $LOCAL_SRC_DIR..."
tar -zcf "$LOCAL_SRC_PATH" "$LOCAL_SRC_DIR"
[ $? -gt 0 ] && log "ERROR: failed to compress project. Exit-code: $?" || log "\t- OK"
log "4/4 Sending project backup to ${REMOTE_HOST}..."
rsync --progress "$LOCAL_SRC_PATH" "$REMOTE_HOST:$REMOTE_SRC_PATH"
[ $? -gt 0 ] && log "ERROR: failed to send project backup. Exit-code: $?" || log "\t- OK"
rsync --progress "$LOG_PATH" "$REMOTE_HOST:$REMOTE_LOG_PATH"
log "Finish!"
log "Used space: $(du -h "$LOCAL_BAK_DIR" | tail -n1)"
log "Free space: $(df -h | tail -n1 | awk '{print $4}')"

341
shell/s3-backup.sh Normal file
View File

@ -0,0 +1,341 @@
#!/bin/bash
#####################################################################
# #
# Stupidly simple backup script for own projects #
# #
# Author: Anthony Axenov (Антон Аксенов) #
# Version: 1.2 #
# License: WTFPLv2 More info (RU): https://axenov.dev/?p=1272 #
# #
#####################################################################
# use remote storages ===============================================
USE_SSH=1
USE_S3=1
# database credentials ==============================================
DBUSER=
DBPASS=
DBNAME=
DBCHARSET="utf8"
# dates for file structure ==========================================
TODAY_DIR="$(date +%Y.%m.%d)"
TODAY_FILE="$(date +%H.%M)"
# local storage =====================================================
LOCAL_BAK_DIR="/backup"
LOCAL_BAK_PATH="$LOCAL_BAK_DIR/$TODAY_DIR"
# database backup file
LOCAL_SQL_FILE="$TODAY_FILE-db.sql.gz"
LOCAL_SQL_PATH="$LOCAL_BAK_PATH/$LOCAL_SQL_FILE"
# project path and backup file
LOCAL_SRC_DIR="/var/www/html"
LOCAL_SRC_FILE="$TODAY_FILE-src.tar.gz"
LOCAL_SRC_PATH="$LOCAL_BAK_PATH/$LOCAL_SRC_FILE"
# log file
LOG_FILE="$TODAY_FILE.log"
LOG_PATH="$LOCAL_BAK_PATH/$LOG_FILE"
# remote storages ===================================================
SSH_HOST="user@example.com"
SSH_BAK_DIR="/backup"
SSH_BAK_PATH="$SSH_BAK_DIR/$TODAY_DIR"
SSH_SQL_FILE="$SSH_BAK_PATH/$LOCAL_SQL_FILE"
SSH_SRC_FILE="$SSH_BAK_PATH/$LOCAL_SRC_FILE"
SSH_LOG_FILE="$SSH_BAK_PATH/$LOG_FILE"
S3_BUCKET="s3://my.bucket"
S3_DIR="$S3_BUCKET/$TODAY_DIR"
S3_SQL_FILE="$S3_DIR/$LOCAL_SQL_FILE"
S3_SRC_FILE="$S3_DIR/$LOCAL_SRC_FILE"
S3_LOG_FILE="$S3_DIR/$LOG_FILE"
# autoremove ========================================================
# time to live on different storages
TTL_LOCAL=3
TTL_SSH=7
TTL_S3=60
# autoremove flags
CLEAR_SSH=1
CLEAR_S3=1
# notifications =====================================================
USE_NTFY=1
NTFY_TITLE="Backup script"
NTFY_CHANNEL=
#====================================================================
#
# Functions used for the whole backup flow
#
#====================================================================
# prints arguments to stdout and into log file
log() {
echo -e "[$(date +%H:%M:%S)] $*" | tee -a "$LOG_PATH"
}
# sends notification with information
ntfy_info() {
[ $USE_NTFY == 1 ] && ntfy send \
--title "$NTFY_TITLE" \
--message "$1" \
--priority 1 \
"$NTFY_CHANNEL"
}
# sends notification with warning
ntfy_warn() {
[ $USE_NTFY == 1 ] && ntfy send \
--title "$NTFY_TITLE" \
--tags "warning" \
--message "$1" \
--priority 5 \
"$NTFY_CHANNEL"
}
# prints initialized parameters
show_params() {
log "Initialized parameters:"
log "├ [ Remotes ]"
log "│\t├ USE_SSH = $USE_SSH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_HOST = $SSH_HOST"
log "│\t├ USE_S3 = $USE_S3"
[ $USE_S3 == 1 ] && log "│\t├ S3_BUCKET = $S3_BUCKET"
log "├ [ Database ]"
log "│\t├ DBUSER = $DBUSER"
log "│\t├ DBNAME = $DBNAME"
log "│\t├ DBCHARSET = $DBCHARSET"
log "│\t├ LOCAL_SQL_PATH = $LOCAL_SQL_PATH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_SQL_FILE = $SSH_SQL_FILE"
[ $USE_S3 == 1 ] && log "│\t├ S3_SQL_FILE = $S3_SQL_FILE"
log "├ [ Sources ]"
log "│\t├ LOCAL_SRC_DIR = $LOCAL_SRC_DIR"
log "│\t├ LOCAL_SRC_PATH = $LOCAL_SRC_PATH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_SRC_FILE = $SSH_SRC_FILE"
[ $USE_S3 == 1 ] && log "│\t├ S3_SRC_FILE = $S3_SRC_FILE"
log "├ [ Log ]"
log "│\t├ LOG_PATH = $LOG_PATH"
[ $USE_SSH == 1 ] && log "│\t├ SSH_LOG_FILE = $SSH_LOG_FILE"
[ $USE_S3 == 1 ] && log "│\t├ S3_LOG_FILE = $S3_LOG_FILE"
log "├ [ Autoclear ]"
log "│\t├ TTL_LOCAL = $TTL_LOCAL"
[ $USE_SSH == 1 ] && {
log "│\t├ CLEAR_SSH = $CLEAR_SSH"
log "│\t├ TTL_SSH = $TTL_SSH"
}
[ $USE_S3 == 1 ] && {
log "│\t├ CLEAR_S3 = $CLEAR_S3"
log "│\t├ TTL_S3 = $TTL_S3"
}
log "└ [ ntfy ]"
log "\t├ USE_NTFY = $USE_NTFY"
[ $USE_NTFY == 1 ] && log "\t├ NTFY_TITLE = $NTFY_TITLE"
[ $USE_NTFY == 1 ] && log "\t└ NTFY_CHANNEL = $NTFY_CHANNEL"
}
# initializes directories for backup
init_dirs() {
if [ ! -d "$LOCAL_BAK_PATH" ]; then
mkdir -p $LOCAL_BAK_PATH
fi
[ $USE_SSH == 1 ] && ssh $SSH_HOST "mkdir -p $SSH_BAK_PATH"
}
# clears old local backups
clear_local_backups() {
log "\tLocal:"
log $(find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | sort)
find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | xargs rm -rf
}
# clears old backups on remote ssh storage
clear_ssh_backups() {
if [ $USE_SSH == 1 ] && [ $CLEAR_SSH == 1 ]; then
log "\tSSH:"
log $(ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH" | sort)
ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH | xargs rm -rf"
else
log "\tSSH: disabled (\$USE_SSH, \$CLEAR_SSH)"
fi
}
# clears backups on remote s3 storage
clear_s3_backups() {
# https://gist.github.com/JProffitt71/9044744?permalink_comment_id=3539681#gistcomment-3539681
if [ $USE_S3 == 1 ] && [ $CLEAR_S3 == 1 ]; then
log "\tS3:"
OLDER_THAN=$(date -d "$TTL_S3 days ago" "+%s")
s3cmd ls -r $S3_DIR | while read -r line; do
FILETIME=$(echo "$line" | awk {'print $1" "$2'})
FILETIME=$(date -d "$FILETIME" "+%s")
if [[ $FILETIME -le $OLDER_THAN ]]; then
FILEPATH=$(echo "$line" | awk {'print $4'})
if [ $FILEPATH != "" ]; then
log "$line"
s3cmd del $FILEPATH
fi
fi
done
else
log "\tS3: disabled (\$USE_S3 + \$CLEAR_S3)"
fi
}
# clears old backups
clear_backups() {
echo
log "1/7 Removing old backups..."
clear_local_backups
clear_ssh_backups
clear_s3_backups
}
# makes archive with database dump
backup_db() {
echo
log "2/7 Dumping DB: $DBNAME..."
mysqldump \
--user=$DBUSER \
--password=$DBPASS \
--opt \
--default-character-set=$DBCHARSET \
--quick \
$DBNAME | gzip > $LOCAL_SQL_PATH
if [ $? == 0 ]; then
log "\t- OK"
send_db_ssh
send_db_s3
else
log "\t- ERROR: failed to create dump. Exit-code: $?"
ntfy_warn "ERROR: failed to create dump"
log "3/7 Sending database backup to $SSH_HOST... skipped"
log "4/7 Sending database backup to $S3_DIR... skipped"
fi
}
# sends database archive into ssh remote storage
send_db_ssh() {
echo
log "3/7 Sending database backup to $SSH_HOST..."
if [ $USE_SSH == 1 ]; then
rsync --progress "$LOCAL_SQL_PATH" "$SSH_HOST:$SSH_SQL_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send DB backup to $SSH_HOST. Exit-code: $?"
ntfy_warn "ERROR: failed to send DB backup to $SSH_HOST"
fi
else
log "\t- disabled (\$USE_SSH)"
fi
}
# sends database archive into s3 remote storage
send_db_s3() {
echo
log "4/7 Sending database backup to $S3_DIR..."
if [ $USE_S3 == 1 ]; then
s3cmd put "$LOCAL_SQL_PATH" "$S3_SQL_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send DB backup to $S3_DIR. Exit-code: $?"
ntfy_warn "ERROR: failed to send DB backup to $S3_DIR"
fi
else
log "\t- disabled (\$USE_SSH)"
fi
}
# makes archive with project sources
backup_src() {
echo
log "5/7 Compressing project dir: $LOCAL_SRC_DIR..."
tar -zcf "$LOCAL_SRC_PATH" "$LOCAL_SRC_DIR"
if [ $? == 0 ]; then
log "\t- OK"
send_src_ssh
send_src_s3
else
log "\t- ERROR: failed to compress project. Exit-code: $?"
ntfy_warn "ERROR: failed to compress project"
log "6/7 Sending project backup to $SSH_HOST... skipped"
log "7/7 Sending project backup to $S3_DIR... skipped"
fi
}
# sends sources archive into ssh remote storage
send_src_ssh() {
echo
log "6/7 Sending project backup to $SSH_HOST..."
if [ $USE_SSH == 1 ]; then
rsync --progress "$LOCAL_SRC_PATH" "$SSH_HOST:$SSH_SRC_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send project backup to $SSH_HOST. Exit-code: $?"
ntfy_warn "ERROR: failed to send project backup to $SSH_HOST"
fi
else
log "\t- disabled"
fi
}
# sends sources archive into s3 remote storage
send_src_s3() {
echo
log "7/7 Sending project backup to $S3_DIR..."
s3cmd put "$LOCAL_SRC_PATH" "$S3_SRC_FILE"
if [ $? == 0 ]; then
log "\t- OK"
else
log "\t- ERROR: failed to send database backup to $S3_DIR. Exit-code: $?"
ntfy_warn "ERROR: failed to send project backup to $S3_DIR"
fi
}
# prints used/free space on local storage
show_finish() {
echo
log "Finish!"
log "Used space: $(du -h "$LOCAL_BAK_PATH" | tail -n1)" # вывод размера папки с бэкапами за текущий день
log "Free space: $(df -h "$LOCAL_BAK_PATH" | tail -n1 | awk '{print $4}')" # вывод свободного места на локальном диске
echo
}
# sends log file into both remote storage
send_log() {
[ $USE_SSH == 1 ] && rsync --progress "$LOG_PATH" "$SSH_HOST:$SSH_LOG_FILE"
[ $USE_S3 == 1 ] && s3cmd put "$LOG_PATH" "$S3_LOG_FILE"
}
# main flow =========================================================
log "Start ----------------------------------------------------------"
show_params
init_dirs
clear_backups
backup_db
backup_src
show_finish
send_log
ntfy_info "Finish!"

View File

@ -1,19 +0,0 @@
#!/bin/bash
# https://gist.github.com/anthonyaxenov/925e2db217730a49f20600520b748039
# Original: https://gist.github.com/akostadinov/33bb2606afe1b334169dfbf202991d36
# The difference is that this func outputs stacktrace in reverse order (from top level to lower ones)
function print_stacktrace () {
STACK=""
local i
local stack_size=${#FUNCNAME[@]}
echo "Stacktrace:"
# skip this function and "MAIN non_file_source:0"
for (( i=$stack_size-1; i>=1; i-- )); do
local func="${FUNCNAME[$i]}"
[ x$func = x ] && func=MAIN
local linen="${BASH_LINENO[$(( i - 1 ))]}"
local src="${BASH_SOURCE[$i]}"
[ x"$src" = x ] && src=non_file_source
echo -e "\n at $func $src:$linen"
done
}

View File

@ -1,95 +0,0 @@
#!/bin/bash
# https://gist.github.com/anthonyaxenov/b17c6fbd7895c6049e1ceddc7c54bb5b
. ./io.sh
########################################################
# Тестировочные функции
# Позволяют проверять результаты выполнения команд
########################################################
# тестирует выполнение команды с указанными параметрами
# $1 - команда для тестирования (обяз)
expect_exec() {
[ "$1" ] || exit 1
local prefix="`dt`${BOLD}${FWHITE}[TEST EXEC]"
if $($1 1>/dev/null 2>&1); then
local text="${BGREEN} PASSED"
else
local text="${BLRED} FAILED"
fi
print "${prefix} ${text} ${BRESET} ($?):${RESET} $1"
}
# использование:
# func1() {
# return 0
# }
# func2() {
# return 1
# }
# expect_exec "func1" # PASSED
# expect_exec "func2" # FAILED
# expect_exec "whoami" # PASSED
# тестирует выполнение команды с указанными параметрами и проверяет вывод
# $1 - команда для тестирования (обяз)
# $2 - ожидаемый вывод
expect_output() {
[ "$1" ] || exit 1
[ "$2" ] && local expected="$2" || local expected=''
local prefix="`dt`${BOLD}${FWHITE}[TEST OUTP]"
local output=$($1 2>&1)
local code=$?
if [[ "$output" == *"$expected"* ]]; then
local text="${BGREEN} PASSED"
else
local text="${BLRED} FAILED"
fi
print "${prefix} ${text} ${BRESET} (${code}|${expected}):${RESET} $1"
# print "\tOutput > $output"
}
# использование:
# func1() {
# echo "some string"
# }
# func2() {
# echo "another string"
# }
# expect_output "func1" "string" # PASSED
# expect_output "func2" "some" # FAILED
# expect_output "func2" "string" # PASSED
# тестирует выполнение команды с указанными параметрами и проверяет код выхода
# $1 - команда для тестирования (обяз)
# $2 - ожидаемый код выхода
expect_code() {
[ "$1" ] || exit 1
[ "$2" ] && local expected=$2 || local expected=0
local prefix="`dt`${BOLD}${FWHITE}[TEST CODE]"
$($1 1>/dev/null 2>&1)
local code=$?
if [[ $code -eq $expected ]]; then
local text="${BGREEN} PASSED"
else
local text="${BLRED} FAILED"
fi
print "${prefix} ${text} ${BRESET} (${code}|${expected}):${RESET} $1"
}
# использование:
# func1() {
# # exit 0
# return 0
# }
# func2() {
# # exit 1
# return 1
# }
# expect_code "func1" 0 # PASSED
# expect_code "func1" 1 # FAILED
# expect_code "func2" 0 # FAILED
# expect_code "func2" 1 # PASSED

View File

@ -1,50 +1,66 @@
#!/bin/bash #!/bin/bash
# Установка расширений vscode
# https://gist.github.com/anthonyaxenov/7ba8d648d80fdaca95c4a5b579d214dd
declare -a exts=( declare -a exts=(
'af4jm.vscode-m3u'
'ahmadalli.vscode-nginx-conf'
'akamud.vscode-theme-onedark' 'akamud.vscode-theme-onedark'
'Anjali.clipboard-history' 'AndrewButson.vscode-jwt-decoder'
'anweber.statusbar-commands' 'anweber.statusbar-commands'
'bmalehorn.shell-syntax' 'Avetis.nord-palette'
'baincd.mini-command-palettes'
'bungcip.better-toml'
'codezombiech.gitignore' 'codezombiech.gitignore'
'cweijan.vscode-redis-client'
'deitry.apt-source-list-syntax' 'deitry.apt-source-list-syntax'
'DEVSENSE.composer-php-vscode'
'DEVSENSE.phptools-vscode'
'DEVSENSE.profiler-php-vscode'
'DotJoshJohnson.xml' 'DotJoshJohnson.xml'
'dunstontc.vscode-go-syntax'
'dustypomerleau.rust-syntax'
'eamodio.gitlens' 'eamodio.gitlens'
'Equinusocio.vsc-community-material-theme' 'EditorConfig.EditorConfig'
'Equinusocio.vsc-material-theme'
'equinusocio.vsc-material-theme-icons'
'esbenp.prettier-vscode' 'esbenp.prettier-vscode'
'formulahendry.auto-rename-tag' 'golang.go'
'formulahendry.vscode-mysql'
'GrapeCity.gc-excelviewer' 'GrapeCity.gc-excelviewer'
'HookyQR.beautify'
'humao.rest-client' 'humao.rest-client'
'jakebathman.mysql-syntax' 'IronGeek.vscode-env'
'jebbs.plantuml' 'jebbs.plantuml'
'jeff-hykin.better-go-syntax'
'jinsihou.diff-tool' 'jinsihou.diff-tool'
'kenhowardpdx.vscode-gist' 'kenhowardpdx.vscode-gist'
'mads-hartmann.bash-ide-vscode'
'mamoru.vscode-fish-text' 'mamoru.vscode-fish-text'
'mhutchie.git-graph' 'mhutchie.git-graph'
'mp.vscode-oracle-format'
'mrmlnc.vscode-apache' 'mrmlnc.vscode-apache'
'ms-azuretools.vscode-docker' 'ms-azuretools.vscode-docker'
'MS-CEINTL.vscode-language-pack-ru' 'MS-CEINTL.vscode-language-pack-ru'
'ms-python.python' 'ms-vscode.hexeditor'
'neilbrayfield.php-docblocker' 'ms-vscode.makefile-tools'
'neonxp.gotools'
'nickdemayo.vscode-json-editor'
'nico-castell.linux-desktop-file'
'OPEN-RPC.OPEN-RPC'
'PKief.material-icon-theme' 'PKief.material-icon-theme'
'pranaygp.vscode-css-peek'
'qcz.text-power-tools' 'qcz.text-power-tools'
'rangav.vscode-thunder-client'
'rogalmic.bash-debug' 'rogalmic.bash-debug'
'rogalmic.zsh-debug' 'rust-lang.rust-analyzer'
'RomanPeshkov.vscode-text-tables'
'ryu1kn.partial-diff' 'ryu1kn.partial-diff'
'suntobright.vscode-sftp' 'serayuzgur.crates'
'WallabyJs.quokka-vscode' 'srmeyers.git-prefix'
'whatwedo.twig' 'sumneko.lua'
'william-voyek.vscode-nginx' 'Syler.ignore'
'Tyriar.lorem-ipsum'
'vitorsalgado.vscode-redis'
'waderyan.gitblame'
'wayou.vscode-todo-highlight'
'xyz.plsql-language' 'xyz.plsql-language'
'yinfei.luahelper'
'Yog.yog-plantuml-highlight' 'Yog.yog-plantuml-highlight'
'yves.schema-tree'
'yzane.markdown-pdf'
'yzhang.markdown-all-in-one' 'yzhang.markdown-all-in-one'
'zgm.cuesheet'
) )
for ext in "$exts[@]"; do for ext in "$exts[@]"; do
code --install-extension $ext code --install-extension $ext

93
shell/ytdlcue.sh Normal file
View File

@ -0,0 +1,93 @@
#!/bin/bash
# CUE-sheet generator for youtube-dl
# Usage:
# 0. Install 'jq' utility
# 1. Download any audio file with metadata from YouTube or Youtube Music, e.g.
# $ youtube-dl \
# --extract-audio \
# --audio-format flac \
# --audio-quality 0 \
# --format bestaudio \
# --write-info-json \
# --output "/tmp/ytm/%(playlist_title)s/%(channel)s - %(title)s.%(ext)s" \
# https://www.youtube.com/watch?v=lVpDQnXz34M
#
# If audio file is already downloaded earlier then just fetch only its metadata:
# $ youtube-dl \
# --write-info-json \
# --skip-download \
# --output "/tmp/ytm/%(playlist_title)s/%(channel)s - %(title)s.%(ext)s" \
# https://www.youtube.com/watch?v=lVpDQnXz34M
#
# 2. Audio and metadata files MUST be named exactly similar (except extenstion),
# but it is not necessary to keep original names. Also they MUST be placed in
# the same directory. Example:
# /tmp/ytm/ABGT496.flac
# /tmp/ytm/ABGT496.info.json
#
# 3. To create CUE file run ytdlcue with a path to audio file:
# $ ytdlcue.sh /tmp/ytm/ABGT496.flac
#
# A new file will be created in the same directory:
# /tmp/ytm/ABGT496.cue
installed() {
command -v "$1" >/dev/null 2>&1
}
! installed 'jq' && {
echo "ERROR: you need to install jq!"
exit 1
}
audio_path="$1" # path to audiofile
audio_file=`basename "$audio_path"` # audiofile name with extension
audio_name=${audio_file%.*} # audiofile name without extension
audio_ext=${audio_file##*.} # audiofile name extension
path="`dirname "$audio_path"`/$audio_name" # path to audiofile and its name without ext
json_path="$path.info.json" # path to json file with metadata created by youtube-dl
cue_path="$path.cue" # path to cue sheet to be generated
# echo -e "audio_path:\t$audio_path"
# echo -e "audio_file:\t$audio_file"
# echo -e "audio_name:\t$audio_name"
# echo -e "audio_ext:\t$audio_ext"
# echo -e "path:\t\t$path"
# echo -e "json_path:\t$json_path"
# echo -e "cue_path:\t$cue_path"
[ ! -f "$audio_path" ] && {
echo "ERROR: File not found: $audio_path"
exit 2
}
[ ! -f "$json_path" ] && {
echo "ERROR: File not found: $json_path"
exit 3
}
echo "PERFORMER `cat "$json_path" | jq -Mc '.channel'`" > "$cue_path"
echo "TITLE `cat "$json_path" | jq -Mc '.title'`" >> "$cue_path"
echo "FILE \"$audio_file\" ${audio_ext^^}" >> "$cue_path"
counter=1 # track counter (works only inside loop!)
cat "$json_path" | jq -Mc '.chapters[]' \
| while read chapter; do
number=`printf %0.2d $counter` # pad current counter with zeros
time=`echo "$chapter" | jq -Mc '.start_time'` # get initial start time in seconds
time=`printf '%0.2d:%0.2d:00' $((time/60)) $((time%60))` # convert start time to minutes:seconds
title=`echo "$chapter" | jq -Mc '.title' | sed -r "s#[\"]##g"` # get initial chapter title
performer=`echo "$title" | cut -d "-" -f 1 | sed 's#^[[:space:]]*##g' | sed 's# *$##g'` # get and trim chapter's performer (before '-')
title2=`echo "$title" | cut -d "-" -f 2 | sed 's#^[[:space:]]*##g' | sed 's# *$##g'` # get and trim chapter's title (after '-')
#TODO: what if dash is not delimiter between performer and title?
#TODO: take $title2 if $performer and (or?) $title2 are empty
printf "%-2sTRACK $number AUDIO\n" >> "$cue_path"
printf "%-4sPERFORMER \"$performer\"\n" >> "$cue_path"
printf "%-4sTITLE \"$title2\"\n" >> "$cue_path"
printf "%-4sINDEX 01 $time\n" >> "$cue_path"
counter=`expr $counter + 1` # increase counter
done
echo "Done! Cue file:"
echo "$cue_path"