tools -> scripts
This commit is contained in:
6
scripts/README.md
Normal file
6
scripts/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Shell-скрипты
|
||||
|
||||
Эти скрипты я писал в разное время для решения разных задач.
|
||||
Чтобы они не растерялись по репозиториям и носителям, я решил собрать их здесь в одну кучу.
|
||||
|
||||
Я всегда использую Ubuntu в качестве своих настольных и серверных ОС, поэтому все эти скрипты писались и использовались в этих средах с версий 18.*.
|
||||
5
scripts/disable-ipv6.sh
Executable file
5
scripts/disable-ipv6.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
sudo sysctl -w net.ipv6.conf.all.disable_ipv6="$1"
|
||||
sudo sysctl -w net.ipv6.conf.default.disable_ipv6="$1"
|
||||
sudo sysctl -w net.ipv6.conf.lo.disable_ipv6="$1"
|
||||
55
scripts/display-resolution.sh
Executable file
55
scripts/display-resolution.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
# https://gist.github.com/anthonyaxenov/c16e1181d4b8a8644c57ec8a1f6cf21c
|
||||
#########################################################################
|
||||
# #
|
||||
# Set output resolution #
|
||||
# #
|
||||
# Author: Anthony Axenov (Антон Аксенов) #
|
||||
# Version: 1.0 #
|
||||
# License: WTFPLv2 #
|
||||
# #
|
||||
#########################################################################
|
||||
# #
|
||||
# Using this script you can change your output resolution #
|
||||
# to any one you need. Just adjust some vars below and run script #
|
||||
# (chmod +x needed). #
|
||||
# #
|
||||
#########################################################################
|
||||
|
||||
# Set output name to work with. You can get it via 'xrandr --listactivemonitors'
|
||||
output="HDMI-3"
|
||||
# Set width of this output in px
|
||||
width=1920
|
||||
# Set height of this output in px
|
||||
height=1080
|
||||
# Set refresh rate in Hz of this output in px
|
||||
refresh=120
|
||||
|
||||
# Sometimes cvt and gtf generates different modelines.
|
||||
# You can play around and look which of them gives best result:
|
||||
modeline=$(cvt ${width} ${height} ${refresh} | grep "Modeline")
|
||||
# modeline=$(gtf ${width} ${height} ${refresh} | grep "Modeline")
|
||||
|
||||
# Some important data needed to xrandr:
|
||||
modename="${width}x${height}@${refresh}_my"
|
||||
params=$(echo "$modeline" | sed "s|^\s*Modeline\s*\"[0-9x_.]*\"\s*||")
|
||||
|
||||
echo "Set resolution ${width}x${height}@${refresh} on output $output:"
|
||||
echo "$modename $params"
|
||||
|
||||
# Simple logic:
|
||||
# 1. Switch output to safe mode which always exists (I believe) to avoid errors
|
||||
xrandr --output $output --mode 640x480 --verbose
|
||||
# 2. If output aready have our mode -- we must delete it to avoid errors
|
||||
if $(xrandr | grep -q "$modename"); then
|
||||
# 2.1. Detach mode from output
|
||||
xrandr --delmode $output $modename
|
||||
# 2.2. Remove mode itself
|
||||
xrandr --rmmode $modename
|
||||
fi
|
||||
# 3. Create new mode with freshly generated parameters
|
||||
xrandr --newmode $modename $params --verbose
|
||||
# 4. Attach mode to our output
|
||||
xrandr --addmode $output $modename --verbose
|
||||
# 5. Switch output to this mode immidiately
|
||||
xrandr --output $output --mode $modename --refresh $refresh --verbose
|
||||
11
scripts/display-rotate.sh
Executable file
11
scripts/display-rotate.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# X11:
|
||||
# xrandr --listactivemonitors
|
||||
# xrandr --output $OUTPUT --rotate (left|right|normal|...)
|
||||
|
||||
# Wayland KDE: https://www.reddit.com/r/kde/comments/11vrbwc/how_do_i_rotate_the_screen_on_kde_with_wayland/
|
||||
# kscreen-doctor --outputs
|
||||
OUTPUT='HDMI-A-1'
|
||||
[ "$1" ] && DIRECTION="$1" || DIRECTION="normal" # (left|right|normal|inverted)
|
||||
kscreen-doctor "output.$OUTPUT.rotation.$DIRECTION"
|
||||
47
scripts/docker-volume-snapshot.sh
Normal file
47
scripts/docker-volume-snapshot.sh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env bash
|
||||
# Original filename: docker-volume-snapshot
|
||||
# Author: Juned Khatri
|
||||
# License: MIT
|
||||
# Repo: https://github.com/junedkhatri31/docker-volume-snapshot
|
||||
|
||||
set -e -o pipefail
|
||||
|
||||
programname=`basename "$0"`
|
||||
|
||||
display_usage() {
|
||||
echo "usage: $programname (create|restore) source destination"
|
||||
echo " create create snapshot file from docker volume"
|
||||
echo " restore restore snapshot file to docker volume"
|
||||
echo " source source path"
|
||||
echo " destination destination path"
|
||||
echo
|
||||
echo "Tip: Supports tar's compression algorithms automatically"
|
||||
echo " based on the file extention, for example .tar.gz"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo "docker-volume-snapshot create xyz_volume xyz_volume.tar"
|
||||
echo "docker-volume-snapshot create xyz_volume xyz_volume.tar.gz"
|
||||
echo "docker-volume-snapshot restore xyz_volume.tar xyz_volume"
|
||||
echo "docker-volume-snapshot restore xyz_volume.tar.gz xyz_volume"
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
"create")
|
||||
if [[ -z "$2" || -z "$3" ]]; then display_usage; exit 1; fi
|
||||
directory=`dirname "$3"`
|
||||
if [ "$directory" == "." ]; then directory=$(pwd); fi
|
||||
filename=`basename "$3"`
|
||||
docker run --rm -v "$2:/source" -v "$directory:/dest" busybox tar cvaf "/dest/$filename" -C /source .
|
||||
;;
|
||||
"restore")
|
||||
if [[ -z "$2" || -z "$3" ]]; then display_usage; exit 1; fi
|
||||
directory=`dirname "$2"`
|
||||
if [ "$directory" == "." ]; then directory=$(pwd); fi
|
||||
filename=`basename "$2"`
|
||||
docker run --rm -v "$3:/dest" -v "$directory:/source" busybox tar xvf "/source/$filename" -C /dest
|
||||
;;
|
||||
*)
|
||||
display_usage
|
||||
exit 1 # Command to come out of the program with status 1
|
||||
;;
|
||||
esac
|
||||
3
scripts/duckdns/.env.example
Normal file
3
scripts/duckdns/.env.example
Normal file
@@ -0,0 +1,3 @@
|
||||
DUCK_TOKEN=
|
||||
DUCK_DOMAINS=
|
||||
DUCK_IP=
|
||||
2
scripts/duckdns/.gitignore
vendored
Normal file
2
scripts/duckdns/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.env
|
||||
*.log
|
||||
9
scripts/duckdns/install.sh
Executable file
9
scripts/duckdns/install.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
thisdir="$( dirname "$(readlink -e -- "${BASH_SOURCE[0]}")")"
|
||||
|
||||
# https://www.duckdns.org/install.jsp
|
||||
|
||||
croncmd="$thisdir/update.sh"
|
||||
cronjob="*/30 * * * * $croncmd" # every 30 min
|
||||
( crontab -l | grep -v -F "$croncmd" ; echo "$cronjob" ) | crontab -
|
||||
cp -f "$thisdir/.env.example" "$thisdir/.env"
|
||||
3
scripts/duckdns/uninstall.sh
Executable file
3
scripts/duckdns/uninstall.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
( crontab -l | grep -v -F "duckdns" ) | crontab -
|
||||
30
scripts/duckdns/update.sh
Executable file
30
scripts/duckdns/update.sh
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
# shellcheck disable=SC1091
|
||||
thisdir="$( dirname "$(readlink -e -- "${BASH_SOURCE[0]}")")"
|
||||
|
||||
log() {
|
||||
timestamp="$(date +'%Y-%m-%d %H:%M:%S')"
|
||||
datestamp="$(date +'%Y-%m-%d')"
|
||||
[ ! -d "$thisdir/log" ] && mkdir "$thisdir/log"
|
||||
echo "[$timestamp] $*" >> "$thisdir/log/$datestamp.log"
|
||||
}
|
||||
|
||||
[ ! -f "$thisdir/.env" ] && {
|
||||
log "ERROR: .env not exists"
|
||||
exit 1
|
||||
}
|
||||
|
||||
source "$thisdir/.env"
|
||||
|
||||
[ -z "$DUCK_TOKEN" ] && {
|
||||
log "ERROR: env var DUCK_TOKEN not specified"
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ -z "$DUCK_DOMAINS" ] && {
|
||||
log "ERROR: env var DUCK_DOMAINS not specified"
|
||||
exit 1
|
||||
}
|
||||
|
||||
result=$(curl -s "https://www.duckdns.org/update?domains=${DUCK_DOMAINS}&token=${DUCK_TOKEN}&ip=${DUCK_IP}")
|
||||
log "$result"
|
||||
50
scripts/fix-obs-vcam.sh
Executable file
50
scripts/fix-obs-vcam.sh
Executable file
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# https://obsproject.com/kb/virtual-camera-troubleshooting
|
||||
# https://obsproject.com/forum/threads/how-to-start-virtual-camera-without-sudo-privileges.139783/
|
||||
# https://blog.csdn.net/qq_43008667/article/details/128041455
|
||||
# https://blog.jbrains.ca/permalink/using-obs-studio-as-a-virtual-cam-on-linux
|
||||
# https://github.com/obsproject/obs-studio/issues/4808
|
||||
|
||||
# v4l2loopback-dkms
|
||||
|
||||
# obs_start()
|
||||
# {
|
||||
# #This function is intended to prevent blank cameras in OBS upon OBS restart / exit
|
||||
# #1. load/refresh uvcvideo before starting obs
|
||||
# if lsmod | grep -q 'uvcvideo'; then
|
||||
# sudo rmmod uvcvideo
|
||||
# fi
|
||||
# sudo modprobe uvcvideo
|
||||
# #2. since no environment with a keyring to prompt for allowing virtual webcams prior is a must
|
||||
# sudo modprobe v4l2loopback video_nr=10 card_label='OBS Virtual Camera'
|
||||
# sleep 1
|
||||
# sh -c "$obs_cmd --startvirtualcam || sleep 3; sudo rmmod uvcvideo"
|
||||
# }
|
||||
|
||||
installed () {
|
||||
command -v $1 > /dev/null
|
||||
}
|
||||
|
||||
installed 'obs' && obs_cmd='obs'
|
||||
installed 'obs-studio' && obs_cmd='obs-studio'
|
||||
|
||||
# obs executes this command when you start virtual camera
|
||||
# sudo modprobe v4l2loopback exclusive_caps=1 card_label="OBS Virtual Camera"
|
||||
|
||||
# another version from one of links below
|
||||
# sudo modprobe v4l2loopback video_nr=2 devices=1 card_label="OBS Virtual Camera"
|
||||
|
||||
sudo modprobe -r v4l2loopback || sudo rmmod v4l2loopback
|
||||
if [[ $? == 0 ]]; then
|
||||
sudo modprobe v4l2loopback video_nr=2 devices=1 card_label="OBS Virtual Camera"
|
||||
if [[ $? == 0 ]]; then
|
||||
$obs_cmd --startvirtualcam & disown
|
||||
else
|
||||
echo "Cannot run modprobe. Ensure v4l2loopback-dkms is installed and try again"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Cannot remove v4l2loopback device"
|
||||
exit 2
|
||||
fi
|
||||
11
scripts/fix-repo-vivaldi.sh
Executable file
11
scripts/fix-repo-vivaldi.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# https://gist.github.com/pkorpine/16fcdbe070222cf1d99e67cf542e60c2
|
||||
|
||||
sudo rm -rf /etc/apt/sources.list.d/vivaldi*
|
||||
cd /tmp
|
||||
wget -qO- https://repo.vivaldi.com/archive/linux_signing_key.pub | gpg --dearmor > packages.vivaldi.gpg
|
||||
sudo install -o root -g root -m 644 packages.vivaldi.gpg /etc/apt/trusted.gpg.d
|
||||
sudo sh -c 'echo "deb [arch=amd64,armhf signed-by=/etc/apt/trusted.gpg.d/packages.vivaldi.gpg] https://repo.vivaldi.com/archive/deb stable main" > /etc/apt/sources.list.d/vivaldi.list'
|
||||
rm -f packages.vivaldi.gpg
|
||||
sudo apt update
|
||||
12
scripts/fix-repo-vscode.sh
Executable file
12
scripts/fix-repo-vscode.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# https://code.visualstudio.com/docs/setup/linux
|
||||
|
||||
sudo rm -rf /etc/apt/sources.list.d/vscode*
|
||||
cd /tmp
|
||||
sudo apt install wget gpg apt-transport-https
|
||||
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" |sudo tee /etc/apt/sources.list.d/vscode.list > /dev/null
|
||||
rm -f packages.microsoft.gpg
|
||||
sudo apt update
|
||||
57
scripts/free-space.sh
Executable file
57
scripts/free-space.sh
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
# Очистка места на диске
|
||||
# https://gist.github.com/anthonyaxenov/02c00c965be4eb5bb163a153abdf4c2b
|
||||
# https://itsfoss.com/free-up-space-ubuntu-linux/
|
||||
|
||||
echo
|
||||
echo
|
||||
df -hx tmpfs
|
||||
echo
|
||||
|
||||
echo
|
||||
echo "[1/5] Removing apt caches and unused packages"
|
||||
echo
|
||||
|
||||
sudo apt autoremove --purge
|
||||
sudo apt autoclean
|
||||
sudo apt clean
|
||||
|
||||
echo
|
||||
echo "[2/5] Removing old system logs"
|
||||
echo
|
||||
|
||||
sudo journalctl --vacuum-time=1d
|
||||
sudo rm -rf /var/log/journal/user-*@*
|
||||
sudo rm -rf /var/log/journal/system*@*
|
||||
sudo rm /var/log/{syslog,dmesg,btmp}.*
|
||||
sudo rm /var/log/{auth,dpkg,kern,alternatives,dmesg}.log.*
|
||||
|
||||
echo
|
||||
echo "[3/5] Cleaning user trash and thumbnails"
|
||||
echo
|
||||
|
||||
rm -rf ~/.local/share/Trash/files/*
|
||||
rm -rf ~/.cache/thumbnails/*
|
||||
|
||||
echo
|
||||
echo "[4/5] Cleaning out dangling docker objects"
|
||||
echo
|
||||
|
||||
docker system prune -f
|
||||
# docker system prune -af
|
||||
|
||||
echo
|
||||
echo "[5/5] Removing disabled unused snaps"
|
||||
echo
|
||||
|
||||
sudo snap list --all \
|
||||
| awk '/disabled/{print $1, $3}' \
|
||||
| while read snapname revision; do
|
||||
sudo snap remove "$snapname" --revision="$revision"
|
||||
done
|
||||
|
||||
echo
|
||||
echo
|
||||
df -hx tmpfs
|
||||
echo
|
||||
echo
|
||||
115
scripts/frkn.sh
Executable file
115
scripts/frkn.sh
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
function disconnect() {
|
||||
echo "Disconnecting: $1"
|
||||
sudo wg-quick down "$1"
|
||||
sudo sysctl -w net.ipv6.conf.all.disable_ipv6=1
|
||||
sudo sysctl -w net.ipv6.conf.default.disable_ipv6=1
|
||||
sudo sysctl -w net.ipv6.conf.lo.disable_ipv6=1
|
||||
echo
|
||||
}
|
||||
|
||||
function connect() {
|
||||
echo "Connecting: frkn-$1"
|
||||
sudo sysctl -w net.ipv6.conf.all.disable_ipv6=0
|
||||
sudo sysctl -w net.ipv6.conf.default.disable_ipv6=0
|
||||
sudo sysctl -w net.ipv6.conf.lo.disable_ipv6=0
|
||||
sudo wg-quick up "frkn-$1"
|
||||
echo
|
||||
}
|
||||
|
||||
function in_array() {
|
||||
local find=$1
|
||||
shift
|
||||
for e in "$@"; do
|
||||
[[ "$e" == "$find" ]] && return 0
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
function update_wg() {
|
||||
sudo apt install -y wireguard jq && wg --version
|
||||
}
|
||||
|
||||
function update_frkn() {
|
||||
local countries=(uk ru nl nl2 ch)
|
||||
for idx in ${!countries[@]}; do
|
||||
country=${countries[idx]}
|
||||
echo "Downloading config for $country ($(expr $idx + 1)/${#countries[@]})"
|
||||
|
||||
json=$(curl -s "https://api.frkn.org/peer?location=$country" | jq)
|
||||
|
||||
iface_address=$(echo $json | jq -r .iface.address)
|
||||
iface_privkey=$(echo $json | jq -r .iface.key)
|
||||
iface_dns=$(echo $json | jq -r .iface.dns)
|
||||
peer_pubkey=$(echo $json | jq -r .peer.pubkey)
|
||||
peer_psk=$(echo $json | jq -r .peer.psk)
|
||||
peer_allowed_ips=$(echo $json | jq -r .peer.allowed_ips)
|
||||
peer_endpoint=$(echo $json | jq -r .peer.endpoint)
|
||||
|
||||
cat << EOF > "./frkn-$country.conf"
|
||||
[Interface]
|
||||
Address = $iface_address
|
||||
DNS = $iface_dns
|
||||
PrivateKey = $iface_privkey
|
||||
|
||||
[Peer]
|
||||
PublicKey = $peer_pubkey
|
||||
PresharedKey = $peer_psk
|
||||
AllowedIPs = $peer_allowed_ips
|
||||
Endpoint = $peer_endpoint
|
||||
PersistentKeepalive = 25
|
||||
EOF
|
||||
done
|
||||
sudo mv -f ./frkn-*.conf /etc/wireguard/
|
||||
}
|
||||
|
||||
command="$1"
|
||||
countries=()
|
||||
current=$(sudo wg show | head -n 1 | awk '{print $2}')
|
||||
|
||||
for file in /etc/wireguard/*.conf; do
|
||||
filename=${file/\/etc\/wireguard\/frkn-}
|
||||
code=${filename/.conf/}
|
||||
countries+=($code)
|
||||
done
|
||||
|
||||
correct=-1
|
||||
if [ -z "$command" ] ; then
|
||||
while [ $correct -lt 0 ]; do
|
||||
read -rp "Entry on of country code (${countries[*]}): " command
|
||||
if in_array "$command" ${countries[@]}; then
|
||||
correct=1
|
||||
else
|
||||
echo "Неверный код страны!"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
case "$command" in
|
||||
"update" )
|
||||
if update_wg && update_frkn; then
|
||||
echo "Wireguard and FRKN updated"
|
||||
else
|
||||
echo "Something went wrong"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
|
||||
"down" )
|
||||
if [ -n "$current" ]; then
|
||||
disconnect "$current"
|
||||
fi
|
||||
;;
|
||||
|
||||
"show" )
|
||||
sudo wg show
|
||||
;;
|
||||
|
||||
* )
|
||||
if [ -n "$current" ]; then
|
||||
disconnect "$current"
|
||||
fi
|
||||
connect "$command"
|
||||
;;
|
||||
esac
|
||||
111
scripts/helpers/arg-parser/README.md
Normal file
111
scripts/helpers/arg-parser/README.md
Normal file
@@ -0,0 +1,111 @@
|
||||
# Argument parser for bash scripts v1.6
|
||||
|
||||
## Usage
|
||||
|
||||
```shell
|
||||
# 1. add these lines after shebang:
|
||||
|
||||
__RAW_ARGS__=("$@")
|
||||
source args.sh
|
||||
|
||||
# 2. read arguments as flags:
|
||||
|
||||
arg a 1 flag_a
|
||||
echo "Flag -a has value '$flag_a'"
|
||||
echo "Flag -a has value '$(arg a 1)'"
|
||||
|
||||
arg b 1 flag_b
|
||||
echo "Flag -b has value '$flag_b'"
|
||||
echo "Flag -b has value '$(arg b 1)'"
|
||||
|
||||
arg c 1 flag_c
|
||||
echo "Flag -c has value '$flag_c'"
|
||||
echo "Flag -c has value '$(arg c 1)'"
|
||||
|
||||
arg d 1 flag_d
|
||||
echo "Flag -d has value '$flag_d'"
|
||||
echo "Flag -d has value '$(arg d 1)'"
|
||||
|
||||
argl flag1 1 flag_1
|
||||
echo "Flag --flag1 has value '$flag_1'"
|
||||
echo "Flag --flag1 has value '$(argl flag1 1)'"
|
||||
|
||||
argl flag2 1 flag_2
|
||||
echo "Flag --flag2 has value '$flag_2'"
|
||||
echo "Flag --flag2 has value '$(argl flag2 1)'"
|
||||
|
||||
# 3. and/or read arguments' values:
|
||||
|
||||
arg a 0 arg_a
|
||||
echo "Arg -a has value '$arg_a'"
|
||||
echo "Arg -a has value '$(arg a 0)'"
|
||||
|
||||
arg b 0 arg_b
|
||||
echo "Arg -b has value '$arg_b'"
|
||||
echo "Arg -b has value '$(arg b 0)'"
|
||||
|
||||
argl arg1 0 arg_1
|
||||
echo "Arg --arg1 has value '$arg_1'"
|
||||
echo "Arg --arg1 has value '$(argl arg1 0)'"
|
||||
|
||||
argl arg2 0 arg_2
|
||||
echo "Arg --arg2 has value '$arg_2'"
|
||||
echo "Arg --arg2 has value '$(argl arg2 0)'"
|
||||
```
|
||||
|
||||
## How it works
|
||||
|
||||
1. Short arguments can be specified contiguously or separately
|
||||
and their order does not matter, but before each of them
|
||||
(or the first of them) one leading dash must be specified.
|
||||
|
||||
> Valid combinations: `-a -b -c`, `-cba`, `-b -azc "value of z"`
|
||||
|
||||
2. Short arguments can have values and if are - value must go
|
||||
next to argument itself.
|
||||
|
||||
> Valid combinations: `-ab avalue`, `-ba avalue`, `-a avalue -b`
|
||||
|
||||
3. Long arguments cannot be combined like short ones and each
|
||||
of them must be specified separately with two leading dashes.
|
||||
|
||||
> Valid combinations: `--foo --bar`, `--bar --foo`
|
||||
|
||||
4. Long arguments can have a value which must be specified after `=`.
|
||||
|
||||
> Valid combinations: `--foo value --bar`, `--bar --foo=value`
|
||||
|
||||
5. If arg value may contain space then value must be "double-quoted".
|
||||
|
||||
6. You can use arg() or argl() to check presence of any arg, no matter
|
||||
if it has value or not.
|
||||
|
||||
More info:
|
||||
* 🇷🇺 [axenov.dev/bash-args](https://axenov.dev/bash-args/)
|
||||
* 🇺🇸 [axenov.dev/en/bash-processing-arguments-in-a-script-when-called-from-the-shell/](https://axenov.dev/en/bash-processing-arguments-in-a-script-when-called-from-the-shell)
|
||||
|
||||
Tested in Ubuntu 20.04.2 LTS in:
|
||||
|
||||
```
|
||||
bash 5.0.17(1)-release (x86_64-pc-linux-gnu) and later
|
||||
zsh 5.8 (x86_64-ubuntu-linux-gnu) and later
|
||||
```
|
||||
|
||||
## Version history
|
||||
|
||||
```
|
||||
v1.0 - initial
|
||||
v1.1 - arg(): improved skipping uninteresting args
|
||||
- arg(): check next arg to be valid value
|
||||
v1.2 - removed all 'return' statements
|
||||
- arg(): error message corrected
|
||||
- new examples
|
||||
v1.3 - argl(): improved flag check
|
||||
- some text corrections
|
||||
v1.4 - new function argn()
|
||||
- some text corrections
|
||||
v1.5 - arg(), grep_match(): fixed searching for -e argument
|
||||
- grep_match(): redirect output into /dev/null
|
||||
v1.6 - removed useless argn()
|
||||
- arg() and argl() refactored and now support values with whitespaces
|
||||
```
|
||||
158
scripts/helpers/arg-parser/args.sh
Executable file
158
scripts/helpers/arg-parser/args.sh
Executable file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Argument parser for bash scripts
|
||||
#
|
||||
# Author: Anthony Axenov (Антон Аксенов)
|
||||
# Version: 1.6
|
||||
# License: MIT
|
||||
# Description: https://git.axenov.dev/anthony/shell/src/branch/master/helpers/arg-parser
|
||||
|
||||
#purpose Little helper to check if string matches PCRE
|
||||
#argument $1 - some string
|
||||
#argument $2 - regex
|
||||
#exitcode 0 - string valid
|
||||
#exitcode 1 - string is not valid
|
||||
grep_match() {
|
||||
printf "%s" "$1" | grep -qE "$2" >/dev/null
|
||||
}
|
||||
|
||||
#purpose Find short argument or its value
|
||||
#argument $1 - (string) argument (without leading dashes; only first letter will be processed)
|
||||
#argument $2 - (number) is it flag? 1 if is, otherwise 0 or nothing
|
||||
#argument $3 - (string) variable to return value into
|
||||
# (if not specified then it will be echo'ed in stdout)
|
||||
#returns (string) 1 (if $2 == 1), value (if correct and if $2 != 1) or nothing
|
||||
#usage To get value into var: arg v 0 myvar or myvalue=$(arg 'v')
|
||||
#usage To find flag into var: arg f 1 myvar or flag=$(arg 'f')
|
||||
#usage To echo value: arg v
|
||||
#usage To echo 1 if flag exists: arg f
|
||||
arg() {
|
||||
[ "$1" ] || { echo "Argument name is not specified!" >&2 && exit 1; }
|
||||
local arg_name="${1:0:1}" # first character of argument name to find
|
||||
local is_flag="$2" || 0 # 1 if we need just find a flag, 0 to get a value
|
||||
local var_name="$3" || 0 # variable name to return value into or 0 to echo it in stdout
|
||||
local value= # initialize empty value to check if we found one later
|
||||
local arg_found=0 # marker of found argument
|
||||
|
||||
for idx in "${!__RAW_ARGS__[@]}"; do # going through all args
|
||||
local arg_search=${__RAW_ARGS__[idx]} # get current argument
|
||||
|
||||
# skip $arg_search if it starts with '--' or letter
|
||||
grep_match "$arg_search" "^(\w|--)" && continue
|
||||
|
||||
# clear $arg_search from special and duplicate characters, e.g. 'fas-)dfs' will become 'fasd'
|
||||
local arg_chars="$(printf "%s" "$arg_search" \
|
||||
| tr -s "[$arg_search]" 2>/dev/null \
|
||||
| tr -d "[:punct:][:blank:]" 2>/dev/null)"
|
||||
|
||||
# if $arg_name is not one of $arg_chars the skip it
|
||||
grep_match "-$arg_name" "^-[$arg_chars]$" || continue
|
||||
arg_found=1
|
||||
|
||||
# then return '1'|'0' back into $value if we need flag or next arg value otherwise
|
||||
[ "$is_flag" = 1 ] && value=1 || value="${__RAW_ARGS__[idx+1]}"
|
||||
break
|
||||
done
|
||||
|
||||
[ "$is_flag" = 1 ] && [ -z "$value" ] && value=0;
|
||||
|
||||
# if value we found is empty or looks like another argument then exit with error message
|
||||
if [ "$arg_found" = 1 ] && ! grep_match "$value" "^[[:graph:]]+$" || grep_match "$value" "^--?\w+$"; then
|
||||
echo "ERROR: Argument '-$arg_name' must have correct value!" >&2 && exit 1
|
||||
fi
|
||||
|
||||
# return '$value' back into $var_name (if exists) or echo in stdout
|
||||
[ "$var_name" ] && eval "$var_name='$value'" || echo "$value"
|
||||
}
|
||||
|
||||
#purpose Find long argument or its value
|
||||
#argument $1 - argument (without leading dashes)
|
||||
#argument $2 - (number) is it flag? 1 if is, otherwise 0 or nothing
|
||||
#argument $3 - (string) variable to return value into
|
||||
# (if not specified then it will be echo'ed in stdout)
|
||||
#returns (string) 1 (if $2 == 1), value (if correct and if $2 != 1) or nothing
|
||||
#usage To get value into var: arg v 0 myvar or myvalue=$(arg 'v')
|
||||
#usage To find flag into var: arg f 1 myvar or flag=$(arg 'f')
|
||||
#usage To echo value: arg v
|
||||
#usage To echo 1 if flag exists: arg f
|
||||
argl() {
|
||||
[ "$1" ] || { echo "Argument name is not specified!" >&2 && exit 1; }
|
||||
local arg_name="$1" # argument name to find
|
||||
local is_flag="$2" || 0 # 1 if we need just find a flag, 0 to get a value
|
||||
local var_name="$3" || 0 # variable name to return value into or 0 to echo it in stdout
|
||||
local value= # initialize empty value to check if we found one later
|
||||
local arg_found=0 # marker of found argument
|
||||
|
||||
for idx in "${!__RAW_ARGS__[@]}"; do # going through all args
|
||||
local arg_search="${__RAW_ARGS__[idx]}" # get current argument
|
||||
|
||||
if [ "$arg_search" = "--$arg_name" ]; then # if current arg begins with two dashes
|
||||
# then return '1' back into $value if we need flag or next arg value otherwise
|
||||
[ "$is_flag" = 1 ] && value=1 || value="${__RAW_ARGS__[idx+1]}"
|
||||
break # stop the loop
|
||||
elif grep_match "$arg_search" "^--$arg_name=.+$"; then # check if $arg like '--foo=bar'
|
||||
# then return '1' back into $value if we need flag or part from '=' to arg's end as value otherwise
|
||||
[ "$is_flag" = 1 ] && value=1 || value="${arg_search#*=}"
|
||||
break # stop the loop
|
||||
fi
|
||||
done
|
||||
|
||||
[ "$is_flag" = 1 ] && [ -z "$value" ] && value=0;
|
||||
|
||||
# if value we found is empty or looks like another argument then exit with error message
|
||||
if [ "$arg_found" = 1 ] && ! grep_match "$value" "^[[:graph:]]+$" || grep_match "$value" "^--?\w+$"; then
|
||||
echo "ERROR: Argument '--$arg_name' must have correct value!" >&2 && exit 1;
|
||||
fi
|
||||
|
||||
# return '$value' back into $var_name (if exists) or echo in stdout
|
||||
[ "$var_name" ] && eval "$var_name='$value'" || echo "$value"
|
||||
}
|
||||
|
||||
################################
|
||||
|
||||
# This is simple examples which you can play around with.
|
||||
# 1. uncomment code below
|
||||
# 2. call thi sscript to see what happens:
|
||||
# /args.sh -abcd --flag1 --flag2 -e evalue -f fvalue --arg1=value1 --arg2 value2
|
||||
|
||||
# __RAW_ARGS__=("$@")
|
||||
|
||||
# arg a 1 flag_a
|
||||
# echo "Flag -a has value '$flag_a'"
|
||||
# echo "Flag -a has value '$(arg a 1)'"
|
||||
|
||||
# arg b 1 flag_b
|
||||
# echo "Flag -b has value '$flag_b'"
|
||||
# echo "Flag -b has value '$(arg b 1)'"
|
||||
|
||||
# arg c 1 flag_c
|
||||
# echo "Flag -c has value '$flag_c'"
|
||||
# echo "Flag -c has value '$(arg c 1)'"
|
||||
|
||||
# arg d 1 flag_d
|
||||
# echo "Flag -d has value '$flag_d'"
|
||||
# echo "Flag -d has value '$(arg d 1)'"
|
||||
|
||||
# argl flag1 1 flag_1
|
||||
# echo "Flag --flag1 has value '$flag_1'"
|
||||
# echo "Flag --flag1 has value '$(argl flag1 1)'"
|
||||
|
||||
# argl flag2 1 flag_2
|
||||
# echo "Flag --flag2 has value '$flag_2'"
|
||||
# echo "Flag --flag2 has value '$(argl flag2 1)'"
|
||||
|
||||
# arg e 0 arg_e
|
||||
# echo "Arg -e has value '$arg_e'"
|
||||
# echo "Arg -e has value '$(arg e 0)'"
|
||||
|
||||
# arg f 0 arg_f
|
||||
# echo "Arg -f has value '$arg_f'"
|
||||
# echo "Arg -f has value '$(arg f 0)'"
|
||||
|
||||
# argl arg1 0 arg_1
|
||||
# echo "Arg --arg1 has value '$arg_1'"
|
||||
# echo "Arg --arg1 has value '$(argl arg1 0)'"
|
||||
|
||||
# argl arg2 0 arg_2
|
||||
# echo "Arg --arg2 has value '$arg_2'"
|
||||
# echo "Arg --arg2 has value '$(argl arg2 0)'"
|
||||
127
scripts/helpers/basic.sh
Normal file
127
scripts/helpers/basic.sh
Normal file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env bash
|
||||
source $( dirname $(readlink -e -- "${BASH_SOURCE}"))/io.sh || exit 255
|
||||
|
||||
########################################################
|
||||
# Little handy helpers for scripting
|
||||
########################################################
|
||||
|
||||
is_bash() {
|
||||
[[ "$(basename "$SHELL")" != "bash" ]]
|
||||
}
|
||||
|
||||
is_sourced() {
|
||||
[[ "${BASH_SOURCE[0]}" != "$0" ]]
|
||||
}
|
||||
|
||||
is_root() {
|
||||
[[ "$(id -u)" -ne 0 || $(ps -o comm= -p $PPID) == "sudo" ]]
|
||||
}
|
||||
|
||||
get_os() {
|
||||
case "$(uname -s)" in
|
||||
Linux*) echo Linux ;;
|
||||
Darwin*) echo Macos ;;
|
||||
CYGWIN*) echo Cygwin ;;
|
||||
MINGW*) echo MinGw ;;
|
||||
MSYS_NT*) echo Git ;;
|
||||
*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
get_os_id() {
|
||||
[ -f /etc/os-release ] && source /etc/os-release
|
||||
echo "$ID"
|
||||
}
|
||||
|
||||
# convert relative path $1 to full one
|
||||
abspath() {
|
||||
echo $(realpath -q "${1/#\~/$HOME}")
|
||||
}
|
||||
|
||||
# check if path $1 is writable
|
||||
is_writable() {
|
||||
[ -w "$(abspath $1)" ]
|
||||
}
|
||||
|
||||
# check if path $1 is a directory
|
||||
is_dir() {
|
||||
[ -d "$(abspath $1)" ]
|
||||
}
|
||||
|
||||
# check if path $1 is a file
|
||||
is_file() {
|
||||
[ -f "$(abspath $1)" ]
|
||||
}
|
||||
|
||||
# check if an argument is a shell function
|
||||
is_function() {
|
||||
declare -F "$1" > /dev/null
|
||||
}
|
||||
|
||||
# check if string $1 matches regex $2
|
||||
regex_match() {
|
||||
printf "%s" "$1" | grep -qP "$2"
|
||||
}
|
||||
|
||||
# check if array $2 contains string $1
|
||||
in_array() {
|
||||
local find=$1
|
||||
shift
|
||||
for e in "$@"; do
|
||||
[[ "$e" == "$find" ]] && return 0
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# join all elements of array $2 with delimiter $1
|
||||
implode() {
|
||||
local d=${1-}
|
||||
local f=${2-}
|
||||
if shift 2; then
|
||||
printf %s "$f" "${@/#/$d}"
|
||||
fi
|
||||
}
|
||||
|
||||
# open url $1 in system web-browser
|
||||
open_url() {
|
||||
if which xdg-open > /dev/null; then
|
||||
xdg-open "$1" </dev/null >/dev/null 2>&1 & disown
|
||||
elif which gnome-open > /dev/null; then
|
||||
gnome-open "$1" </dev/null >/dev/null 2>&1 & disown
|
||||
fi
|
||||
}
|
||||
|
||||
# unpack .tar.gz file $1 into path $2
|
||||
unpack_targz() {
|
||||
require tar
|
||||
tar -xzvf "$1" -C "$2"
|
||||
}
|
||||
|
||||
# make soft symbolic link of path $1 to path $2
|
||||
symlink() {
|
||||
ln -sf "$1" "$2"
|
||||
}
|
||||
|
||||
# download file $1 into path $2 using wget
|
||||
download() {
|
||||
require wget
|
||||
wget "$1" -O "$2"
|
||||
}
|
||||
|
||||
# download file $1 into path $2 using curl
|
||||
cdownload() {
|
||||
require curl
|
||||
curl -fsSL "$1" -o "$2"
|
||||
}
|
||||
|
||||
is_int() {
|
||||
[[ "$1" =~ ^[0-9]+$ ]]
|
||||
}
|
||||
|
||||
is_number() {
|
||||
[[ "$1" =~ ^[0-9]+([.,][0-9]+)?$ ]]
|
||||
}
|
||||
|
||||
trim() {
|
||||
echo "$1" | xargs
|
||||
}
|
||||
3
scripts/helpers/debug.sh
Normal file
3
scripts/helpers/debug.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env bash
|
||||
source $( dirname $(readlink -e -- "${BASH_SOURCE}"))/io.sh || exit 255
|
||||
|
||||
79
scripts/helpers/docker.sh
Normal file
79
scripts/helpers/docker.sh
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
########################################################
|
||||
# Docker wrappers
|
||||
########################################################
|
||||
|
||||
# Вызывает корректную команду docker compose
|
||||
docker.compose() {
|
||||
require docker
|
||||
|
||||
argl profiles 0 profiles
|
||||
args=${*/--profiles=[a-zA-Z_,0-9]*/}
|
||||
|
||||
if $(docker compose &>/dev/null); then
|
||||
local cmd="docker compose $args"
|
||||
elif installed_pkg "docker-compose"; then
|
||||
local cmd="docker-compose $args"
|
||||
warn
|
||||
warn "docker-compose v1 устарел и не поддерживается, его поведение непредсказуемо."
|
||||
warn "Обнови docker согласно документации: https://docs.docker.com/engine/install/"
|
||||
warn
|
||||
else
|
||||
error "Должен быть установлен docker-compose-plugin!"
|
||||
die "Установи docker согласно документации: https://docs.docker.com/engine/install/" 2
|
||||
fi
|
||||
|
||||
if [[ "$profiles" ]]; then
|
||||
export COMPOSE_PROFILES=$profiles
|
||||
debug "Выполнено: export COMPOSE_PROFILES=$profiles"
|
||||
fi
|
||||
|
||||
debug "Команда: $cmd"
|
||||
$cmd
|
||||
}
|
||||
|
||||
# Выводит информацию о контейнере
|
||||
docker.inspect() {
|
||||
cmd="docker inspect $*"
|
||||
debug "Команда: $cmd"
|
||||
$cmd 2>/dev/null
|
||||
}
|
||||
|
||||
# Выполняет команду в контейнере от имени root
|
||||
docker.exec() {
|
||||
cmd="docker exec -u root -it $*"
|
||||
debug "Команда: $cmd"
|
||||
$cmd
|
||||
}
|
||||
|
||||
# Выводит информацию о контейнере
|
||||
docker.inspect() {
|
||||
cmd="docker inspect $*"
|
||||
debug "Команда: $cmd"
|
||||
$cmd 2>/dev/null
|
||||
}
|
||||
|
||||
docker.ip() { # not finished
|
||||
if [ "$1" ]; then
|
||||
if [ "$1" = "-a" ]; then
|
||||
docker ps -aq \
|
||||
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
|
||||
| sed -e 's#^/##' \
|
||||
| column -t
|
||||
elif [ "$1" = "-c" ]; then
|
||||
docker-compose ps -q \
|
||||
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
|
||||
| sed -e 's#^/##' \
|
||||
| column -t
|
||||
else
|
||||
docker inspect --format '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$1"
|
||||
docker port "$1"
|
||||
fi
|
||||
else
|
||||
docker ps -q \
|
||||
| xargs -n 1 docker inspect --format '{{.Name}}{{range .NetworkSettings.Networks}} {{.IPAddress}}{{end}}' \
|
||||
| sed -e 's#^/##' \
|
||||
| column -t
|
||||
fi
|
||||
}
|
||||
178
scripts/helpers/git.sh
Normal file
178
scripts/helpers/git.sh
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env bash
|
||||
_dir=$( dirname $(readlink -e -- "${BASH_SOURCE}"))
|
||||
source "$_dir/io.sh" || exit 255
|
||||
source "$_dir/basic.sh" || exit 255
|
||||
source "$_dir/packages.sh" || exit 255
|
||||
|
||||
########################################################
|
||||
# Shorthands for git
|
||||
########################################################
|
||||
|
||||
git.clone_quick() {
|
||||
require git
|
||||
git clone --depth=1 --single-branch "$@"
|
||||
}
|
||||
|
||||
git.is_repo() {
|
||||
require git
|
||||
[ "$1" ] || die "Path is not specified" 101
|
||||
require_dir "$1/"
|
||||
check_dir "$1/.git"
|
||||
}
|
||||
|
||||
git.require_repo() {
|
||||
require git
|
||||
git.is_repo "$1" || die "'$1' is not git repository!" 10
|
||||
}
|
||||
|
||||
git.cfg() {
|
||||
require git
|
||||
[ "$1" ] || die "Key is not specified" 101
|
||||
if [[ "$2" ]]; then
|
||||
git config --global --replace-all "$1" "$2"
|
||||
else
|
||||
echo $(git config --global --get-all "$1")
|
||||
fi
|
||||
}
|
||||
|
||||
git.set_user() {
|
||||
require git
|
||||
[ "$1" ] || die "git.set_user: Repo is not specified" 100
|
||||
git.cfg "$1" "user.name" "$2"
|
||||
git.cfg "$1" "user.email" "$3"
|
||||
success "User set to '$name <$email>' in ${FWHITE}$1"
|
||||
}
|
||||
|
||||
git.fetch() {
|
||||
require git
|
||||
if [ "$1" ]; then
|
||||
if git.remote_branch_exists "origin/$1"; then
|
||||
git fetch origin "refs/heads/$1:refs/remotes/origin/$1" --progress --prune --quiet 2>&1 || die "Could not fetch $1 from origin" 12
|
||||
else
|
||||
warn "Tried to fetch branch 'origin/$1' but it does not exist."
|
||||
fi
|
||||
else
|
||||
git fetch origin --progress --prune --quiet 2>&1 || exit 12
|
||||
fi
|
||||
}
|
||||
|
||||
git.reset() {
|
||||
require git
|
||||
git reset --hard HEAD
|
||||
git clean -fd
|
||||
}
|
||||
|
||||
git.clone() {
|
||||
require git
|
||||
git clone "$*" 2>&1
|
||||
}
|
||||
|
||||
git.co() {
|
||||
require git
|
||||
git checkout "$*" 2>&1
|
||||
}
|
||||
|
||||
git.is_it_current_branch() {
|
||||
require git
|
||||
[ "$1" ] || die "git.is_it_current_branch: Branch is not specified" 19
|
||||
[[ "$(git.current_branch)" = "$1" ]]
|
||||
}
|
||||
|
||||
git.pull() {
|
||||
require git
|
||||
[ "$1" ] && BRANCH=$1 || BRANCH=$(git.current_branch)
|
||||
# note "Updating branch $BRANCH..."
|
||||
git pull origin "refs/heads/$BRANCH:refs/remotes/origin/$BRANCH" --prune --force --quiet 2>&1 || exit 13
|
||||
git pull origin --tags --force --quiet 2>&1 || exit 13
|
||||
# [ "$1" ] || die "git.pull: Branch is not specified" 19
|
||||
# if [ "$1" ]; then
|
||||
# note "Updating branch $1..."
|
||||
# git pull origin "refs/heads/$1:refs/remotes/origin/$1" --prune --force --quiet 2>&1 || exit 13
|
||||
# else
|
||||
# note "Updating current branch..."
|
||||
# git pull
|
||||
# fi
|
||||
}
|
||||
|
||||
git.current_branch() {
|
||||
require git
|
||||
git branch --show-current || exit 18
|
||||
}
|
||||
|
||||
git.local_branch_exists() {
|
||||
require git
|
||||
[ -n "$(git for-each-ref --format='%(refname:short)' refs/heads/$1)" ]
|
||||
}
|
||||
|
||||
git.update_refs() {
|
||||
require git
|
||||
info "Updating local refs..."
|
||||
git remote update origin --prune 1>/dev/null 2>&1 || exit 18
|
||||
}
|
||||
|
||||
git.delete_remote_branch() {
|
||||
require git
|
||||
[ "$1" ] || die "git.remote_branch_exists: Branch is not specified" 19
|
||||
if git.remote_branch_exists "origin/$1"; then
|
||||
git push origin :"$1" # || die "Could not delete the remote $1 in $ORIGIN"
|
||||
return 0
|
||||
else
|
||||
warn "Trying to delete the remote branch $1, but it does not exists in origin"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
git.is_clean_worktree() {
|
||||
require git
|
||||
git rev-parse --verify HEAD >/dev/null || exit 18
|
||||
git update-index -q --ignore-submodules --refresh
|
||||
git diff-files --quiet --ignore-submodules || return 1
|
||||
git diff-index --quiet --ignore-submodules --cached HEAD -- || return 2
|
||||
return 0
|
||||
}
|
||||
|
||||
git.is_branch_merged_into() {
|
||||
require git
|
||||
[ "$1" ] || die "git.remote_branch_exists: Branch1 is not specified" 19
|
||||
[ "$2" ] || die "git.remote_branch_exists: Branch2 is not specified" 19
|
||||
git.update_refs
|
||||
local merge_hash=$(git merge-base "$1"^{} "$2"^{})
|
||||
local base_hash=$(git rev-parse "$1"^{})
|
||||
[ "$merge_hash" = "$base_hash" ]
|
||||
}
|
||||
|
||||
git.remote_branch_exists() {
|
||||
require git
|
||||
[ "$1" ] || die "git.remote_branch_exists: Branch is not specified" 19
|
||||
git.update_refs
|
||||
[ -n "$(git for-each-ref --format='%(refname:short)' refs/remotes/$1)" ]
|
||||
}
|
||||
|
||||
git.new_branch() {
|
||||
require git
|
||||
[ "$1" ] || die "git.new_branch: Branch is not specified" 19
|
||||
if [ "$2" ] && ! git.local_branch_exists "$2" && git.remote_branch_exists "origin/$2"; then
|
||||
git.co -b "$1" origin/"$2"
|
||||
else
|
||||
git.co -b "$1" "$2"
|
||||
fi
|
||||
}
|
||||
|
||||
git.require_clean_worktree() {
|
||||
require git
|
||||
if ! git.is_clean_worktree; then
|
||||
warn "Your working tree is dirty! Look at this:"
|
||||
git status -bs
|
||||
_T="What should you do now?\n"
|
||||
_T="${_T}\t${BOLD}${FWHITE}0.${RESET} try to continue as is\t- errors may occur!\n"
|
||||
_T="${_T}\t${BOLD}${FWHITE}1.${RESET} hard reset\t\t\t- clear current changes and new files\n"
|
||||
_T="${_T}\t${BOLD}${FWHITE}2.${RESET} stash changes (default)\t- save all changes in safe to apply them later via 'git stash pop'\n"
|
||||
_T="${_T}\t${BOLD}${FWHITE}3.${RESET} cancel\n"
|
||||
ask "${_T}${BOLD}${FWHITE}Your choice [0-3]" reset_answer
|
||||
case $reset_answer in
|
||||
1 ) warn "Clearing your work..." && git.reset ;;
|
||||
3 ) exit ;;
|
||||
* ) git stash -a -u -m "WIP before switch to $branch_task" ;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
28
scripts/helpers/help.sh
Normal file
28
scripts/helpers/help.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
#TODO source basic.sh
|
||||
#TODO source args-parser/args.sh
|
||||
|
||||
########################################################
|
||||
# Help functions
|
||||
########################################################
|
||||
|
||||
process_help_arg() {
|
||||
command="${FUNCNAME[1]}"
|
||||
need_help=$(arg help 1)
|
||||
[[ "$need_help" -eq 0 ]] && need_help=$(argl help 1)
|
||||
[[ "$need_help" -eq 1 ]] && help "$command"
|
||||
}
|
||||
|
||||
help() {
|
||||
is_function "help.$1" && help."$1" && exit
|
||||
echo "Main help message"
|
||||
}
|
||||
|
||||
help.example() {
|
||||
echo "Example help message"
|
||||
}
|
||||
|
||||
example() {
|
||||
process_help_arg
|
||||
echo "Example command"
|
||||
}
|
||||
393
scripts/helpers/io.sh
Normal file
393
scripts/helpers/io.sh
Normal file
@@ -0,0 +1,393 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
########################################################
|
||||
# Simple and fancy input & output
|
||||
########################################################
|
||||
|
||||
which tput > /dev/null 2>&1 && [[ $(tput -T$TERM colors) -gt 8 ]] && CAN_USE_COLORS=1 || CAN_USE_COLORS=0
|
||||
USE_COLORS=${USE_COLORS:-$CAN_USE_COLORS}
|
||||
|
||||
# Icons (message prefixes)
|
||||
[[ "$USE_COLORS" == 1 ]] && IINFO="( i )" || IINFO=''
|
||||
[[ "$USE_COLORS" == 1 ]] && INOTE="( * )" || INOTE=''
|
||||
[[ "$USE_COLORS" == 1 ]] && IWARN="( # )" || IWARN=''
|
||||
[[ "$USE_COLORS" == 1 ]] && IERROR="( ! )" || IERROR=''
|
||||
[[ "$USE_COLORS" == 1 ]] && IFATAL="( @ )" || IFATAL=''
|
||||
[[ "$USE_COLORS" == 1 ]] && ISUCCESS="( ! )" || ISUCCESS=''
|
||||
[[ "$USE_COLORS" == 1 ]] && IASK="( ? )" || IASK=''
|
||||
[[ "$USE_COLORS" == 1 ]] && IDEBUG="(DBG)" || IDEBUG=''
|
||||
[[ "$USE_COLORS" == 1 ]] && IVRB="( + )" || IVRB=''
|
||||
|
||||
# Text attributes
|
||||
[[ "$USE_COLORS" == 1 ]] && FRESET="$(tput sgr0)" || FRESET='' # Normal
|
||||
[[ "$USE_COLORS" == 1 ]] && FBOLD="$(tput bold)" || FBOLD='' # Bold
|
||||
[[ "$USE_COLORS" == 1 ]] && FDIM="$(tput dim)" || FDIM='' # Dimmed
|
||||
[[ "$USE_COLORS" == 1 ]] && FLINE="$(tput smul)" || FLINE='' # Underlined
|
||||
[[ "$USE_COLORS" == 1 ]] && FENDLINE="$(tput rmul)" || FENDLINE='' # End of underlined
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLINK="$(tput blink)" || FBLINK='' # Blink
|
||||
[[ "$USE_COLORS" == 1 ]] && FREV="$(tput rev)" || FREV='' # Reversed
|
||||
|
||||
# Text colors - normal
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLACK="$(tput setaf 0)" || FBLACK='' # Black
|
||||
[[ "$USE_COLORS" == 1 ]] && FRED="$(tput setaf 1)" || FRED='' # Red
|
||||
[[ "$USE_COLORS" == 1 ]] && FGREEN="$(tput setaf 2)" || FGREEN='' # Green
|
||||
[[ "$USE_COLORS" == 1 ]] && FYELLOW="$(tput setaf 3)" || FYELLOW='' # Yellow
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLUE="$(tput setaf 4)" || FBLUE='' # Blue
|
||||
[[ "$USE_COLORS" == 1 ]] && FPURPLE="$(tput setaf 5)" || FPURPLE='' # Purple
|
||||
[[ "$USE_COLORS" == 1 ]] && FCYAN="$(tput setaf 6)" || FCYAN='' # Cyan
|
||||
[[ "$USE_COLORS" == 1 ]] && FWHITE="$(tput setaf 7)" || FWHITE='' # White
|
||||
|
||||
# Text colors - bright
|
||||
[[ "$USE_COLORS" == 1 ]] && FLBLACK="$(tput setaf 8)" || FLBLACK='' # Black
|
||||
[[ "$USE_COLORS" == 1 ]] && FLRED="$(tput setaf 9)" || FLRED='' # Red
|
||||
[[ "$USE_COLORS" == 1 ]] && FLGREEN="$(tput setaf 10)" || FLGREEN='' # Green
|
||||
[[ "$USE_COLORS" == 1 ]] && FLYELLOW="$(tput setaf 11)" || FLYELLOW='' # Yellow
|
||||
[[ "$USE_COLORS" == 1 ]] && FLBLUE="$(tput setaf 12)" || FLBLUE='' # Blue
|
||||
[[ "$USE_COLORS" == 1 ]] && FLPURPLE="$(tput setaf 13)" || FLPURPLE='' # Purple
|
||||
[[ "$USE_COLORS" == 1 ]] && FLCYAN="$(tput setaf 14)" || FLCYAN='' # Cyan
|
||||
[[ "$USE_COLORS" == 1 ]] && FLWHITE="$(tput setaf 15)" || FLWHITE='' # White
|
||||
|
||||
# Background colors - normal
|
||||
[[ "$USE_COLORS" == 1 ]] && FBBLACK="$(tput setab 0)" || FBBLACK='' # Black
|
||||
[[ "$USE_COLORS" == 1 ]] && FBRED="$(tput setab 1)" || FBRED='' # Red
|
||||
[[ "$USE_COLORS" == 1 ]] && FBGREEN="$(tput setab 2)" || FBGREEN='' # Green
|
||||
[[ "$USE_COLORS" == 1 ]] && FBYELLOW="$(tput setab 3)" || FBYELLOW='' # Yellow
|
||||
[[ "$USE_COLORS" == 1 ]] && FBBLUE="$(tput setab 4)" || FBBLUE='' # Blue
|
||||
[[ "$USE_COLORS" == 1 ]] && FBPURPLE="$(tput setab 5)" || FBPURPLE='' # Purple
|
||||
[[ "$USE_COLORS" == 1 ]] && FBCYAN="$(tput setab 6)" || FBCYAN='' # Cyan
|
||||
[[ "$USE_COLORS" == 1 ]] && FBWHITE="$(tput setab 7)" || FBWHITE='' # White
|
||||
|
||||
# Background colors - bright
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLBLACK="$(tput setab 8)" || FBLBLACK='' # Black
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLRED="$(tput setab 9)" || FBLRED='' # Red
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLGREEN="$(tput setab 10)" || FBLGREEN='' # Green
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLYELLOW="$(tput setab 11)" || FBLYELLOW='' # Yellow
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLBLUE="$(tput setab 12)" || FBLBLUE='' # Blue
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLPURPLE="$(tput setab 13)" || FBLPURPLE='' # Purple
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLCYAN="$(tput setab 14)" || FBLCYAN='' # Cyan
|
||||
[[ "$USE_COLORS" == 1 ]] && FBLWHITE="$(tput setab 15)" || FBLWHITE='' # White
|
||||
|
||||
now() {
|
||||
echo "[$(date +'%H:%M:%S')] "
|
||||
}
|
||||
|
||||
ask() {
|
||||
IFS= read -rp "$(print ${FBOLD}${FBBLUE}${FWHITE}${IASK}${FRESET}\ ${FBOLD}$1 ): " $2
|
||||
}
|
||||
|
||||
print() {
|
||||
# if [ -n "$SPINNER_PID" ] && ps -p $SPINNER_PID > /dev/null; then kill $SPINNER_PID > /dev/null; fi
|
||||
echo -e "$*${FRESET}"
|
||||
}
|
||||
|
||||
link() {
|
||||
echo -e "\e]8;;$2\a$1\e]8;;\a"
|
||||
}
|
||||
|
||||
debug() {
|
||||
if [ "$2" ]; then
|
||||
print "${FDIM}${FBOLD}${FRESET}${FDIM}$(now)${IDEBUG} ${FUNCNAME[1]:-?}():${BASH_LINENO:-?}\t$1 " >&2
|
||||
else
|
||||
print "${FDIM}${FBOLD}${FRESET}${FDIM}$(now)${IDEBUG} $1 " >&2
|
||||
fi
|
||||
}
|
||||
|
||||
var_dump() {
|
||||
debug "$1 = ${!1}"
|
||||
}
|
||||
|
||||
print_stacktrace() {
|
||||
STACK=""
|
||||
local i
|
||||
local stack_size=${#FUNCNAME[@]}
|
||||
debug "Callstack:"
|
||||
# for (( i=$stack_size-1; i>=1; i-- )); do
|
||||
for (( i=1; i<$stack_size; i++ )); do
|
||||
local func="${FUNCNAME[$i]}"
|
||||
[ x$func = x ] && func=MAIN
|
||||
local linen="${BASH_LINENO[$(( i - 1 ))]}"
|
||||
local src="${BASH_SOURCE[$i]}"
|
||||
[ x"$src" = x ] && src=non_file_source
|
||||
debug " at $func $src:$linen"
|
||||
done
|
||||
}
|
||||
|
||||
verbose() {
|
||||
print "${FBOLD}$(now)${IVRB}${FRESET}${FYELLOW} $1 "
|
||||
}
|
||||
|
||||
info() {
|
||||
print "${FBOLD}$(now)${FWHITE}${FBLBLUE}${IINFO}${FRESET}${FWHITE} $1 "
|
||||
}
|
||||
|
||||
note() {
|
||||
print "${FBOLD}$(now)${FDIM}${FWHITE}${INOTE}${FRESET} $1 "
|
||||
}
|
||||
|
||||
success() {
|
||||
print "${FBOLD}$(now)${FBGREEN}${FWHITE}${ISUCCESS}${FRESET}$FGREEN $1 "
|
||||
}
|
||||
|
||||
warn() {
|
||||
print "${FBOLD}$(now)${FBYELLOW}${FBLACK}${IWARN}${FRESET}${FYELLOW} Warning:${FRESET} $1 "
|
||||
}
|
||||
|
||||
error() {
|
||||
print "${FBOLD}$(now)${FBLRED}${FWHITE}${IERROR} Error: ${FRESET}${FLRED} $1 " >&2
|
||||
}
|
||||
|
||||
fatal() {
|
||||
print "${FBOLD}$(now)${FBRED}${FWHITE}${IFATAL} FATAL: $1 " >&2
|
||||
print_stacktrace
|
||||
}
|
||||
|
||||
die() {
|
||||
error "${1:-halted}"
|
||||
exit ${2:-255}
|
||||
}
|
||||
|
||||
# var='test var_dump'
|
||||
# var_dump var
|
||||
# debug 'test debug'
|
||||
# verbose 'test verbose'
|
||||
# info 'test info'
|
||||
# note 'test note'
|
||||
# success 'test success'
|
||||
# warn 'test warn'
|
||||
# error 'test error'
|
||||
# fatal 'test fatal'
|
||||
# die 'test die'
|
||||
|
||||
# experiments ==============================================================================
|
||||
|
||||
# spinner() {
|
||||
# local frames=('⠋' '⠙' '⠹' '⠸' '⠼' '⠴' '⠦' '⠧' '⠇' '⠏')
|
||||
# local spin_i=0
|
||||
# local interval=0.1
|
||||
# printf "\e[?25l"
|
||||
|
||||
# local color="${FGREEN}"
|
||||
|
||||
# while true; do
|
||||
# printf "\r ${color}%s${CL}" "${frames[spin_i]}"
|
||||
# spin_i=$(( (spin_i + 1) % ${#frames[@]} ))
|
||||
# sleep "$interval"
|
||||
# done
|
||||
# }
|
||||
|
||||
# echo "lorem ipsum dolor sit amet"
|
||||
# spinner &
|
||||
# SPINNER_PID=$!
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# ===========
|
||||
|
||||
|
||||
# https://unix.stackexchange.com/a/269085
|
||||
# https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit
|
||||
# https://linuxcommand.org/lc3_adv_tput.php
|
||||
# https://gist.github.com/nowmilano/4055d6df5b6e4ea87c5a72dc2d604193
|
||||
# https://gist.github.com/nick3499/402a6d7dccd474f2bdb679f4311b1238
|
||||
# https://gist.github.com/connorjan/2b02126868157c2b69f9aa0a052cdc86
|
||||
|
||||
# tput setaf 0
|
||||
# echo "BLACK FOREGROUND"
|
||||
# tput setaf 1
|
||||
# echo "RED FOREGROUND"
|
||||
# tput setaf 2
|
||||
# echo "GREEN FOREGROUND"
|
||||
# tput setaf 3
|
||||
# echo "YELLOW FOREGROUND"
|
||||
# tput setaf 4
|
||||
# echo "BLUE FOREGROUND"
|
||||
# tput setaf 5
|
||||
# echo "MAGENTA FOREGROUND"
|
||||
# tput setaf 6
|
||||
# echo "CYAN FOREGROUND"
|
||||
# tput setaf 7
|
||||
# echo "WHITE FOREGROUND"
|
||||
# tput reset
|
||||
|
||||
# ===========
|
||||
|
||||
|
||||
# ===========
|
||||
|
||||
# tohex(){
|
||||
# dec=$(($1%256)) ### input must be a number in range 0-255.
|
||||
# if [ "$dec" -lt "16" ]; then
|
||||
# bas=$(( dec%16 ))
|
||||
# mul=128
|
||||
# [ "$bas" -eq "7" ] && mul=192
|
||||
# [ "$bas" -eq "8" ] && bas=7
|
||||
# [ "$bas" -gt "8" ] && mul=255
|
||||
# a="$(( (bas&1) *mul ))"
|
||||
# b="$(( ((bas&2)>>1)*mul ))"
|
||||
# c="$(( ((bas&4)>>2)*mul ))"
|
||||
# printf 'dec= %3s basic= #%02x%02x%02x\n' "$dec" "$a" "$b" "$c"
|
||||
# elif [ "$dec" -gt 15 ] && [ "$dec" -lt 232 ]; then
|
||||
# b=$(( (dec-16)%6 )); b=$(( b==0?0: b*40 + 55 ))
|
||||
# g=$(( (dec-16)/6%6)); g=$(( g==0?0: g*40 + 55 ))
|
||||
# r=$(( (dec-16)/36 )); r=$(( r==0?0: r*40 + 55 ))
|
||||
# printf 'dec= %3s color= #%02x%02x%02x\n' "$dec" "$r" "$g" "$b"
|
||||
# else
|
||||
# gray=$(( (dec-232)*10+8 ))
|
||||
# printf 'dec= %3s gray= #%02x%02x%02x\n' "$dec" "$gray" "$gray" "$gray"
|
||||
# fi
|
||||
# }
|
||||
|
||||
# for i in $(seq 0 255); do
|
||||
# tohex ${i}
|
||||
# done
|
||||
|
||||
# ===========
|
||||
|
||||
# fromhex(){
|
||||
# hex=${1#"#"}
|
||||
# r=$(printf '0x%0.2s' "$hex")
|
||||
# g=$(printf '0x%0.2s' ${hex#??})
|
||||
# b=$(printf '0x%0.2s' ${hex#????})
|
||||
# printf '%03d' "$(( (r<75?0:(r-35)/40)*6*6 +
|
||||
# (g<75?0:(g-35)/40)*6 +
|
||||
# (b<75?0:(b-35)/40) + 16 ))"
|
||||
# }
|
||||
|
||||
# fromhex 00fc7b
|
||||
|
||||
# ===========
|
||||
|
||||
# mode2header(){
|
||||
# #### For 16 Million colors use \e[0;38;2;R;G;Bm each RGB is {0..255}
|
||||
# printf '\e[mR\n' # reset the colors.
|
||||
# printf '\n\e[m%59s\n' "Some samples of colors for r;g;b. Each one may be 000..255"
|
||||
# printf '\e[m%59s\n' "for the ansi option: \e[0;38;2;r;g;bm or \e[0;48;2;r;g;bm :"
|
||||
# }
|
||||
|
||||
# mode2colors(){
|
||||
# # foreground or background (only 3 or 4 are accepted)
|
||||
# local fb="$1"
|
||||
# [[ $fb != 3 ]] && fb=4
|
||||
# local samples=(0 63 127 191 255)
|
||||
# for r in "${samples[@]}"; do
|
||||
# for g in "${samples[@]}"; do
|
||||
# for b in "${samples[@]}"; do
|
||||
# printf '\e[0;%s8;2;%s;%s;%sm%03d;%03d;%03d ' "$fb" "$r" "$g" "$b" "$r" "$g" "$b"
|
||||
# done; printf '\e[m\n'
|
||||
# done; printf '\e[m'
|
||||
# done; printf '\e[mReset\n'
|
||||
# }
|
||||
# mode2header
|
||||
# mode2colors 3
|
||||
# mode2colors 4
|
||||
|
||||
# ===========
|
||||
|
||||
# printf '\e[48;5;%dm ' {0..255}; printf '\e[0m \n'
|
||||
# for r in {200..255..5}; do
|
||||
# fb=4
|
||||
# g=1
|
||||
# b=1
|
||||
# printf '\e[0;%s8;2;%s;%s;%sm ' "$fb" "$r" "$g" "$b"
|
||||
# done
|
||||
# echo
|
||||
|
||||
# ===========
|
||||
|
||||
# color(){
|
||||
# for c; do
|
||||
# printf '\e[48;5;%dm%03d' $c $c
|
||||
# done
|
||||
# printf '\e[0m \n'
|
||||
# }
|
||||
|
||||
# IFS=$' \t\n'
|
||||
# color {0..15}
|
||||
# for ((i=0;i<6;i++)); do
|
||||
# color $(seq $((i*36+16)) $((i*36+51)))
|
||||
# done
|
||||
# color {232..255}
|
||||
|
||||
# ===========
|
||||
|
||||
# for ((i=0; i<256; i++)) ;do
|
||||
# echo -n ' '
|
||||
# tput setab $i
|
||||
# tput setaf $(( ( (i>231&&i<244 ) || ( (i<17)&& (i%8<2)) ||
|
||||
# (i>16&&i<232)&& ((i-16)%6 <(i<100?3:2) ) && ((i-16)%36<15) )?7:16))
|
||||
# printf " C %03d " $i
|
||||
# tput op
|
||||
# (( ((i<16||i>231) && ((i+1)%8==0)) || ((i>16&&i<232)&& ((i-15)%6==0)) )) &&
|
||||
# printf "\n" ''
|
||||
# done
|
||||
|
||||
# ===========
|
||||
|
||||
# echo "tput character test"
|
||||
# echo "==================="
|
||||
# echo
|
||||
|
||||
# tput bold; echo "This text has the bold attribute."; tput sgr0
|
||||
|
||||
# tput smul; echo "This text is underlined (smul)."; tput rmul
|
||||
|
||||
# # Most terminal emulators do not support blinking text (though xterm
|
||||
# # does) because blinking text is considered to be in bad taste ;-)
|
||||
# tput blink; echo "This text is blinking (blink)."; tput sgr0
|
||||
|
||||
# tput rev; echo "This text has the reverse attribute"; tput sgr0
|
||||
|
||||
# # Standout mode is reverse on many terminals, bold on others.
|
||||
# tput smso; echo "This text is in standout mode (smso)."; tput rmso
|
||||
|
||||
# tput sgr0
|
||||
# echo
|
||||
|
||||
|
||||
# experiments ==============================================================================
|
||||
|
||||
# function delay_spinner(){
|
||||
# ##
|
||||
# ## Usage:
|
||||
# ##
|
||||
# ## $ long-running-command &
|
||||
# ## $ delay_spinner " Please wait msg..."
|
||||
# ##
|
||||
# ## Spinner exists when long-running-command completes
|
||||
# ##
|
||||
# local PROGRESSTXT
|
||||
# if [ ! "$1" ]; then
|
||||
# PROGRESSTXT=" Please wait..."
|
||||
# else
|
||||
# PROGRESSTXT="$1"
|
||||
# fi
|
||||
# # visual progress marker function
|
||||
# # http://stackoverflow.com/users/2869509/wizurd
|
||||
# # vars
|
||||
# local pid=$!
|
||||
# local delay=0.1
|
||||
# local spinstr='|/-\'
|
||||
# echo -e "\n\n"
|
||||
# while [ "$(ps a | awk '{print $1}' | grep $pid)" ]; do
|
||||
# local temp=${spinstr#?}
|
||||
# printf "\r$PROGRESSTXT[%c] " "$spinstr"
|
||||
# local spinstr=$temp${spinstr%"$temp"}
|
||||
# sleep $delay
|
||||
# printf "\b\b\b\b\b\b"
|
||||
# done
|
||||
# printf -- '\n\n'
|
||||
# #
|
||||
# # <-- end function ec2cli_spinner -->
|
||||
# #
|
||||
# }
|
||||
# sleep 10 && echo 'test' &
|
||||
# delay_spinner "Please wait msg..."
|
||||
13
scripts/helpers/log.sh
Normal file
13
scripts/helpers/log.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
########################################################
|
||||
# Logging functions
|
||||
########################################################
|
||||
|
||||
# write some message $1 in log file and stdout with timestamp
|
||||
log_path="/home/$USER/logs"
|
||||
log() {
|
||||
[ ! -d "$log_path" ] && log_path="./log"
|
||||
[ ! -d "$log_path" ] && mkdir -p "$log_path"
|
||||
echo -e "[$(date '+%H:%M:%S')] $*" | tee -a "$log_path/$(date '+%Y%m%d').log"
|
||||
}
|
||||
29
scripts/helpers/misc.sh
Normal file
29
scripts/helpers/misc.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
########################################################
|
||||
# Misc
|
||||
########################################################
|
||||
|
||||
# https://askubuntu.com/a/30414
|
||||
is_full_screen() {
|
||||
local WINDOW=$(xwininfo -id "$(xdotool getactivewindow)" -stats \
|
||||
| grep -E '(Width|Height):' \
|
||||
| awk '{print $NF}' \
|
||||
| sed -e 's/ /x/')
|
||||
local SCREEN=$(xdpyinfo | grep -m1 dimensions | awk '{print $2}')
|
||||
if [ "$WINDOW" = "$SCREEN" ]; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
ytm() {
|
||||
youtube-dl \
|
||||
--extract-audio \
|
||||
--audio-format flac \
|
||||
--audio-quality 0 \
|
||||
--format bestaudio \
|
||||
--write-info-json \
|
||||
--output "$HOME/Downloads/ytm/%(playlist_title)s/%(channel)s - %(title)s.%(ext)s" \
|
||||
"$@"
|
||||
}
|
||||
51
scripts/helpers/net.sh
Normal file
51
scripts/helpers/net.sh
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
########################################################
|
||||
# Networking functions
|
||||
########################################################
|
||||
|
||||
get_current_ip() {
|
||||
local CURRENT_IP
|
||||
[ -f /etc/os-release ] && source /etc/os-release
|
||||
case "$ID" in
|
||||
debian|ubuntu) CURRENT_IP=$(hostname -I | awk '{print $1}') ;;
|
||||
alpine) CURRENT_IP=$(ip -4 addr show eth0 | awk '/inet / {print $2}' | cut -d/ -f1 | head -n 1) ;;
|
||||
*) CURRENT_IP="Unknown" ;;
|
||||
esac
|
||||
echo "$CURRENT_IP"
|
||||
}
|
||||
|
||||
get_external_ip() {
|
||||
local ip="$(curl -s https://api.myip.com | jq .ip)"
|
||||
echo "$ip" | tr -d '"'
|
||||
}
|
||||
|
||||
is_valid_ipv4() {
|
||||
local ip="$1"
|
||||
local regex="^([0-9]{1,3}\.){3}[0-9]{1,3}$"
|
||||
|
||||
if [[ $ip =~ $regex ]]; then
|
||||
IFS='.' read -r -a parts <<< "$ip"
|
||||
for part in "${parts[@]}"; do
|
||||
if ! [[ $part =~ ^[0-9]+$ ]] || ((part < 0 || part > 255)); then
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
curltime() {
|
||||
curl -w @- -o /dev/null -s "$@" <<'EOF'
|
||||
time_namelookup: %{time_namelookup} sec\n
|
||||
time_connect: %{time_connect} sec\n
|
||||
time_appconnect: %{time_appconnect} sec\n
|
||||
time_pretransfer: %{time_pretransfer} sec\n
|
||||
time_redirect: %{time_redirect} sec\n
|
||||
time_starttransfer: %{time_starttransfer} sec\n
|
||||
---------------\n
|
||||
time_total: %{time_total} sec\n
|
||||
EOF
|
||||
}
|
||||
|
||||
63
scripts/helpers/notif.sh
Normal file
63
scripts/helpers/notif.sh
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
########################################################
|
||||
# Notifications
|
||||
########################################################
|
||||
|
||||
TITLE="$0"
|
||||
NTFY_CHANNEL="example"
|
||||
|
||||
# отправляет простую нотификацию
|
||||
ntfy_info() {
|
||||
require ntfy
|
||||
ntfy send \
|
||||
--title "$TITLE" \
|
||||
--message "$1" \
|
||||
--priority 1 \
|
||||
"$NTFY_CHANNEL"
|
||||
}
|
||||
|
||||
# отправляет нотификацию с предупреждением
|
||||
ntfy_warn() {
|
||||
require ntfy
|
||||
ntfy send \
|
||||
--title "$TITLE" \
|
||||
--tags "warning" \
|
||||
--message "$1" \
|
||||
--priority 5 \
|
||||
"$NTFY_CHANNEL"
|
||||
}
|
||||
|
||||
|
||||
notify () {
|
||||
if ! installed "notify-send"; then
|
||||
warning "Notifications toggled on, but 'notify-send' is not installed!"
|
||||
return 1
|
||||
fi
|
||||
[ -n "$1" ] && local title="$1"
|
||||
local text="$2"
|
||||
local level="$3"
|
||||
local icon="$4"
|
||||
case "$level" in
|
||||
critical) local timeout=0 ;;
|
||||
low) local timeout=5000 ;;
|
||||
*) local timeout=10000 ;;
|
||||
esac
|
||||
debug "$title / $text / $level / $icon / $timeout"
|
||||
notify-send "$title" "$text" -a "$0" -u "$level" -i "$icon" -t $timeout
|
||||
}
|
||||
|
||||
# TODO: docblock
|
||||
notify_error() {
|
||||
notify "Error" "$1" "critical" "dialog-error"
|
||||
}
|
||||
|
||||
# TODO: docblock
|
||||
notify_warning() {
|
||||
notify "Warning" "$1" "normal" "dialog-warning"
|
||||
}
|
||||
|
||||
# TODO: docblock
|
||||
notify_info() {
|
||||
notify "" "$1" "low" "dialog-information"
|
||||
}
|
||||
32
scripts/helpers/notifications.sh
Normal file
32
scripts/helpers/notifications.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env bash
|
||||
source $( dirname $(readlink -e -- "${BASH_SOURCE}"))/packages.sh || exit 255
|
||||
|
||||
########################################################
|
||||
# Desktop notifications
|
||||
########################################################
|
||||
|
||||
notify () {
|
||||
require "notify-send"
|
||||
[ -n "$1" ] && local title="$1" || local title="My notification"
|
||||
local text="$2"
|
||||
local level="$3"
|
||||
local icon="$4"
|
||||
case $level in
|
||||
"critical") local timeout=0 ;;
|
||||
"low") local timeout=5000 ;;
|
||||
*) local timeout=10000 ;;
|
||||
esac
|
||||
notify-send "$title" "$text" -a "MyScript" -u "$level" -i "$icon" -t $timeout
|
||||
}
|
||||
|
||||
notify_error() {
|
||||
notify "Error" "$1" "critical" "dialog-error"
|
||||
}
|
||||
|
||||
notify_warning() {
|
||||
notify "Warning" "$1" "normal" "dialog-warning"
|
||||
}
|
||||
|
||||
notify_info() {
|
||||
notify "" "$1" "low" "dialog-information"
|
||||
}
|
||||
72
scripts/helpers/packages.sh
Normal file
72
scripts/helpers/packages.sh
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bash
|
||||
source $( dirname $(readlink -e -- "${BASH_SOURCE}"))/io.sh || exit 255
|
||||
|
||||
########################################################
|
||||
# Functions to control system packages
|
||||
########################################################
|
||||
|
||||
installed() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
installed_pkg() {
|
||||
dpkg --list | grep -qw "ii $1"
|
||||
}
|
||||
|
||||
apt_ppa_add() {
|
||||
sudo add-apt-repository -y $*
|
||||
}
|
||||
|
||||
apt_ppa_remove() {
|
||||
sudo add-apt-repository -ry $*
|
||||
}
|
||||
|
||||
apt_update() {
|
||||
sudo apt update $*
|
||||
}
|
||||
|
||||
apt_install() {
|
||||
sudo apt install -y $*
|
||||
}
|
||||
|
||||
apt_remove() {
|
||||
sudo apt purge -y $*
|
||||
}
|
||||
|
||||
dpkg_install() {
|
||||
sudo dpkg -i $*
|
||||
}
|
||||
|
||||
dpkg_remove() {
|
||||
sudo dpkg -r $*
|
||||
}
|
||||
|
||||
dpkg_arch() {
|
||||
dpkg --print-architecture
|
||||
}
|
||||
|
||||
require() {
|
||||
sw=()
|
||||
for package in "$@"; do
|
||||
if ! installed "$package" && ! installed_pkg "$package"; then
|
||||
sw+=("$package")
|
||||
fi
|
||||
done
|
||||
if [ ${#sw[@]} -gt 0 ]; then
|
||||
info "These packages will be installed in your system:\n${sw[*]}"
|
||||
apt_install ${sw[*]}
|
||||
[ $? -gt 0 ] && die "installation cancelled" 201
|
||||
fi
|
||||
}
|
||||
|
||||
require_pkg() {
|
||||
sw=()
|
||||
for package in "$@"; do
|
||||
if ! installed "$package" && ! installed_pkg "$package"; then
|
||||
sw+=("$package")
|
||||
fi
|
||||
done
|
||||
if [ ${#sw[@]} -gt 0 ]; then
|
||||
die "These packages must be installed in your system:\n${sw[*]}" 200
|
||||
fi
|
||||
}
|
||||
89
scripts/helpers/testing.sh
Normal file
89
scripts/helpers/testing.sh
Normal file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env bash
|
||||
source $( dirname $(readlink -e -- "${BASH_SOURCE}"))/io.sh || exit 255
|
||||
|
||||
########################################################
|
||||
# Testing functions
|
||||
########################################################
|
||||
|
||||
# $1 - command to exec
|
||||
assert_exec() {
|
||||
[ "$1" ] || exit 1
|
||||
local prefix="$(dt)${BOLD}${FWHITE}[TEST EXEC]"
|
||||
if $($1 1>/dev/null 2>&1); then
|
||||
local text="${BGREEN} PASSED"
|
||||
else
|
||||
local text="${BLRED} FAILED"
|
||||
fi
|
||||
print "${prefix} ${text} ${BRESET} ($?):${RESET} $1"
|
||||
}
|
||||
# usage:
|
||||
|
||||
# func1() {
|
||||
# return 0
|
||||
# }
|
||||
# func2() {
|
||||
# return 1
|
||||
# }
|
||||
# assert_exec "func1" # PASSED
|
||||
# assert_exec "func2" # FAILED
|
||||
# assert_exec "whoami" # PASSED
|
||||
|
||||
|
||||
# $1 - command to exec
|
||||
# $2 - expected output
|
||||
assert_output() {
|
||||
[ "$1" ] || exit 1
|
||||
[ "$2" ] && local expected="$2" || local expected=''
|
||||
local prefix="$(dt)${BOLD}${FWHITE}[TEST OUTP]"
|
||||
local output=$($1 2>&1)
|
||||
local code=$?
|
||||
if [[ "$output" == *"$expected"* ]]; then
|
||||
local text="${BGREEN} PASSED"
|
||||
else
|
||||
local text="${BLRED} FAILED"
|
||||
fi
|
||||
print "${prefix} ${text} ${BRESET} (${code}|${expected}):${RESET} $1"
|
||||
# print "\tOutput > $output"
|
||||
}
|
||||
# usage:
|
||||
|
||||
# func1() {
|
||||
# echo "some string"
|
||||
# }
|
||||
# func2() {
|
||||
# echo "another string"
|
||||
# }
|
||||
# expect_output "func1" "string" # PASSED
|
||||
# expect_output "func2" "some" # FAILED
|
||||
# expect_output "func2" "string" # PASSED
|
||||
|
||||
|
||||
# $1 - command to exec
|
||||
# $2 - expected exit-code
|
||||
assert_code() {
|
||||
[ "$1" ] || exit 1
|
||||
[ "$2" ] && local expected=$2 || local expected=0
|
||||
local prefix="$(dt)${BOLD}${FWHITE}[TEST CODE]"
|
||||
$($1 1>/dev/null 2>&1)
|
||||
local code=$?
|
||||
if [[ $code -eq $expected ]]; then
|
||||
local text="${BGREEN} PASSED"
|
||||
else
|
||||
local text="${BLRED} FAILED"
|
||||
fi
|
||||
print "${prefix} ${text} ${BRESET} (${code}|${expected}):${RESET} $1"
|
||||
}
|
||||
# usage:
|
||||
|
||||
# func1() {
|
||||
# # exit 0
|
||||
# return 0
|
||||
# }
|
||||
# func2() {
|
||||
# # exit 1
|
||||
# return 1
|
||||
# }
|
||||
# expect_code "func1" 0 # PASSED
|
||||
# expect_code "func1" 1 # FAILED
|
||||
# expect_code "func2" 0 # FAILED
|
||||
# expect_code "func2" 1 # PASSED
|
||||
21
scripts/helpers/traps.sh
Normal file
21
scripts/helpers/traps.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
#TODO source basic.sh
|
||||
#TODO source args-parser/args.sh
|
||||
|
||||
########################################################
|
||||
# Trap usage examples
|
||||
########################################################
|
||||
|
||||
# for sig in SIGHUP SIGINT SIGQUIT SIGABRT SIGKILL SIGTERM SIGTSTP; do
|
||||
# # shellcheck disable=SC2064
|
||||
# trap "set +x && echo && echo && echo '*** Прервано сигналом $sig, остановка ***' && exit" $sig
|
||||
# done
|
||||
|
||||
for sig in SIGHUP SIGINT SIGQUIT SIGABRT SIGKILL SIGTERM SIGTSTP; do
|
||||
trap "myfunc" $sig
|
||||
done
|
||||
|
||||
myfunc() {
|
||||
echo "trapped!"
|
||||
exit
|
||||
}
|
||||
13
scripts/ignore-veth.sh
Executable file
13
scripts/ignore-veth.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
# https://forums.opensuse.org/t/networkmanager-shows-connection-to-lo/164441/13
|
||||
# https://man.archlinux.org/man/NetworkManager.conf.5.en#Device_List_Format
|
||||
# https://access.redhat.com/documentation/ru-ru/red_hat_enterprise_linux/8/html/configuring_and_managing_networking/configuring-networkmanager-to-ignore-certain-devices_configuring-and-managing-networking
|
||||
|
||||
sudo touch /etc/NetworkManager/conf.d/10-ignore-veth.conf
|
||||
sudo tee <<EOF /etc/NetworkManager/conf.d/10-ignore-veth.conf > /dev/null
|
||||
# Disable virtual interfaces to be managed via NetworkManager
|
||||
[keyfile]
|
||||
unmanaged-devices=interface-name:veth*
|
||||
|
||||
EOF
|
||||
sudo systemctl reload NetworkManager
|
||||
57
scripts/inotifywait-cp.sh
Executable file
57
scripts/inotifywait-cp.sh
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
# My use case:
|
||||
# syncthing synchronizes ALL changes in DCIM directory on my android to PC.
|
||||
# I wanted files to be copied somewhere else on my PC to stay forever, so I
|
||||
# could sort them later and safely free some space on mobile without loss.
|
||||
# Also I wish to have some stupid log with history of such events.
|
||||
|
||||
# inotify-tools package must be installed!
|
||||
|
||||
# CHANGE THIS PARAMETERS to ones you needed
|
||||
dir_src="$HOME/Syncthing/Mobile/Camera"
|
||||
dir_dest="$HOME/some/safe/place"
|
||||
dir_logs="$HOME/inotifywait-cp-logs"
|
||||
regexp="[0-9]{8}_[0-9]{6}.*\.(jpg|mp4|gif)"
|
||||
mkdir -p "$dir_dest" "$dir_logs"
|
||||
|
||||
print() {
|
||||
echo -e "[`date '+%H:%M:%S'`] $*" \
|
||||
| tee -a "$dir_logs/`date '+%Y%m%d'`.log"
|
||||
}
|
||||
|
||||
copy () {
|
||||
if [ -f "$dir_dest/$1" ]; then
|
||||
print "SKIPPED:\t$dir_dest/$1"
|
||||
else
|
||||
cp "$dir_src/$1" "$dir_dest/$1"
|
||||
print "COPIED:\t$dir_src/$1 => $dir_dest/$1"
|
||||
fi
|
||||
}
|
||||
|
||||
print "START\t========================="
|
||||
|
||||
# First, try to backup files synced since last exec of this script
|
||||
ls -1 "$dir_src" \
|
||||
| grep -E "^${regexp}$" \
|
||||
| while read filename; do copy "$filename"; done
|
||||
|
||||
# Next, run inotifywait against source directory with args:
|
||||
# --quiet -- print less (only print events)
|
||||
# --monitor -- don't stop after first event (like infinite loop)
|
||||
# --event -- first syncthing creates hidden file to write data into
|
||||
# then renames it according to source file name, so here
|
||||
# we listen to MOVED_TO event to catch final filename
|
||||
# --format %f -- print only filename
|
||||
# --include -- filename regexp to catch event from, ensure your $regexp
|
||||
# is correct or remove line 53 to catch synced ALL files
|
||||
|
||||
inotifywait \
|
||||
--quiet \
|
||||
--monitor \
|
||||
--event moved_to \
|
||||
--format %f \
|
||||
--include "$regexp" \
|
||||
"$dir_src" \
|
||||
| while read filename; do copy "$filename"; done
|
||||
|
||||
print "FINISH\t========================="
|
||||
5
scripts/inotifywait-cp/README.md
Normal file
5
scripts/inotifywait-cp/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Backing up photos from Syncthing
|
||||
|
||||
More info:
|
||||
* 🇷🇺 [axenov.dev/резервное-копирование-фотографий-со](https://axenov.dev/резервное-копирование-фотографий-со/)
|
||||
* 🇺🇸 (planned to translate)
|
||||
19
scripts/inotifywait-cp/inotifywait-cp.service
Normal file
19
scripts/inotifywait-cp/inotifywait-cp.service
Normal file
@@ -0,0 +1,19 @@
|
||||
# Daemon file
|
||||
# Place or symlink it to /etc/systemd/system/inotifywait-cp.service
|
||||
# Enable and start: sudo systemctl enable --now inotifywait-cp
|
||||
# Check it: sudo systemctl status inotifywait-cp
|
||||
|
||||
[Unit]
|
||||
Description=Photosync from android
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
Restart=always
|
||||
# correct these parameters as needed:
|
||||
User=user
|
||||
WorkingDirectory=/home/user
|
||||
ExecStart=bash /home/user/.local/bin/photosync-a53.sh
|
||||
|
||||
|
||||
[Install]
|
||||
WantedBy=network.target
|
||||
59
scripts/inotifywait-cp/inotifywait-cp.sh
Normal file
59
scripts/inotifywait-cp/inotifywait-cp.sh
Normal file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env bash
|
||||
# My use case:
|
||||
# syncthing synchronizes ALL changes in DCIM directory on my android to PC.
|
||||
# I wanted files to be copied somewhere else on my PC to stay forever, so I
|
||||
# could sort them later and safely free some space on mobile without loss.
|
||||
# Also I wish to have some stupid log with history of such events.
|
||||
|
||||
# inotify-tools package must be installed!
|
||||
|
||||
# CHANGE THIS PARAMETERS to ones you needed
|
||||
dir_src="$HOME/Syncthing/Mobile/Camera"
|
||||
dir_dest="$HOME/some/safe/place"
|
||||
dir_logs="$HOME/inotifywait-cp-logs"
|
||||
regexp="[0-9]{8}_[0-9]{6}.*\.(jpg|mp4|gif)"
|
||||
|
||||
print() {
|
||||
echo -e "[`date '+%H:%M:%S'`] $*" \
|
||||
| tee -a "$dir_logs/`date '+%Y%m%d'`.log"
|
||||
}
|
||||
|
||||
copy () {
|
||||
mkdir -p "$dir_src" "$dir_dest" "$dir_logs"
|
||||
if [ -f "$dir_dest/$1" ]; then
|
||||
print "SKIPPED:\t$dir_dest/$1"
|
||||
else
|
||||
cp "$dir_src/$1" "$dir_dest/$1"
|
||||
print "COPIED:\t$dir_src/$1 => $dir_dest/$1"
|
||||
fi
|
||||
}
|
||||
|
||||
mkdir -p "$dir_src" "$dir_dest" "$dir_logs"
|
||||
|
||||
print "START\t========================="
|
||||
|
||||
# First, try to backup files synced since last exec of this script
|
||||
ls -1 "$dir_src" \
|
||||
| grep -E "^$regexp$" \
|
||||
| while read filename; do copy "$filename"; done
|
||||
|
||||
# Next, run inotifywait against source directory with args:
|
||||
# --quiet -- print less (only print events)
|
||||
# --monitor -- don't stop after first event (like infinite loop)
|
||||
# --event -- first syncthing creates hidden file to write data into
|
||||
# then renames it according to source file name, so here
|
||||
# we listen to MOVED_TO event to catch final filename
|
||||
# --format %f -- print only filename
|
||||
# --include -- filename regexp to catch event from, ensure your $regexp
|
||||
# is correct or remove line 56 to catch synced ALL files
|
||||
|
||||
inotifywait \
|
||||
--quiet \
|
||||
--monitor \
|
||||
--event moved_to \
|
||||
--format %f \
|
||||
--include "$regexp" \
|
||||
"$dir_src" \
|
||||
| while read filename; do copy "$filename"; done
|
||||
|
||||
print "FINISH\t========================="
|
||||
31
scripts/make-swapfile.sh
Executable file
31
scripts/make-swapfile.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
set -exo pipefail
|
||||
|
||||
# https://www.linode.com/docs/guides/how-to-increase-swap-space-in-ubuntu/
|
||||
# https://help.ubuntu.com/community/SwapFaq
|
||||
|
||||
sudo swapon --show
|
||||
free -h
|
||||
df -h
|
||||
|
||||
if [ -f /swapfile ]; then
|
||||
set +x
|
||||
echo "Already exists"
|
||||
exit 1
|
||||
fi;
|
||||
|
||||
sudo fallocate -l 1G /swapfile
|
||||
sudo chmod 600 /swapfile
|
||||
sudo mkswap /swapfile
|
||||
sudo swapon /swapfile
|
||||
|
||||
sudo swapon --show
|
||||
free -h
|
||||
df -h
|
||||
|
||||
sudo cp /etc/fstab /etc/fstab.bak
|
||||
echo "/swapfile swap swap defaults 0 0" | sudo tee -a /etc/fstab
|
||||
sudo sysctl vm.swappiness=50
|
||||
echo "vm.swappiness=50" | sudo tee -a /etc/fstab
|
||||
|
||||
sudo swapon --show
|
||||
157
scripts/netbeans-php-wrapper/php
Normal file
157
scripts/netbeans-php-wrapper/php
Normal file
@@ -0,0 +1,157 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Welcome to amusement park!
|
||||
|
||||
[[ "$1" = '--help' ]] || [[ "$1" = '-h' ]] && cat <<EOF && exit
|
||||
NetBeans docker wrapper for php
|
||||
===============================
|
||||
Anthony Axenov (c) 2023, The MIT License
|
||||
https://axenov.dev
|
||||
https://opensource.org/license/mit
|
||||
Replacement host php interpreter with dockerized one to run & debug cli php scripts.
|
||||
Usage:
|
||||
./$(basename $0) --container=<NAME> [--map=<PATH1>:<PATH2>] [PHP_ARGS] <SCRIPT> [SCRIPT_ARGS]
|
||||
Arguments:
|
||||
--container : docker container where your SCRIPT is located. Required.
|
||||
--map : sources path mapped from the host to container. Not required.
|
||||
PATH1 is an absolute path to php sources directory on the host.
|
||||
PATH2 is an absolute path of the same directory inside of container.
|
||||
Delimiter ':' is required. If PATH1, PATH2 or delimiter is missed
|
||||
or value is empty then error will be thrown.
|
||||
PHP_ARGS : arguments you can pass to real php interpreter according to its --help.
|
||||
Not required.
|
||||
SCRIPT : a path to script file (.php) to be executed in container. Required.
|
||||
Note that this file must exist inside or be available from that container.
|
||||
SCRIPT_ARGS : arguments to call your script with. They will be passed to script as is.
|
||||
Not required.
|
||||
Read this article to know how to set this helper as interpreter for NetBeans:
|
||||
ru: https://axenov.dev/netbeans-php-docker-xdebug-cli
|
||||
en: https://axenov.dev/en/netbeans-php-docker-xdebug-cli-en
|
||||
EOF
|
||||
|
||||
pwd=$(pwd) # current working directory
|
||||
cmdline=($@) # copy currently called command line to array
|
||||
collect_php_args=1 # should we collect php args or script ones?
|
||||
quiet=0 # should we print some useful data before executing?
|
||||
|
||||
# find a path where this wrapper is located
|
||||
wrapper_dir="$(dirname $0)"
|
||||
|
||||
# find a path where project is probably located
|
||||
project_dir="$(dirname $wrapper_dir)"
|
||||
|
||||
# here we check if this wrapper is global or local
|
||||
# but if it is set as global from nbproject dir of
|
||||
# current project then it is not detected as global
|
||||
# anyway behavior will be correct
|
||||
nbproject="$(basename $wrapper_dir)"
|
||||
[ "$nbproject" = 'nbproject' ] && is_global=0 || is_global=1
|
||||
|
||||
# prepare new array to collect php args
|
||||
declare -a php_cmd=("docker" "exec")
|
||||
|
||||
# and another one for script args
|
||||
declare -a script_args=()
|
||||
|
||||
# and one more for directory mapping
|
||||
declare -a map_arr=()
|
||||
|
||||
# iterate over arguments we received from netbeans
|
||||
for arg in "${cmdline[@]}"; do
|
||||
|
||||
# if this is a container name
|
||||
if [ "${arg::11}" = '--container' ]; then
|
||||
container="${arg:12}" # save it
|
||||
php_cmd+=("$container" 'php') # add php itself
|
||||
continue # jump to next iteration
|
||||
fi
|
||||
|
||||
# if this is a path map
|
||||
if [ "${arg::5}" = '--map' ]; then
|
||||
map="${arg:6}" # save it
|
||||
map_arr=(${map//:/ }) # split it and check if it is correct
|
||||
if [ -z "${map_arr[0]}" ] || [ -z "${map_arr[1]}" ]; then
|
||||
echo "ERROR: directory map is incorrect!"
|
||||
echo "Use $0 --help to get info about how to use this wrapper."
|
||||
echo "Exit code 3."
|
||||
exit 3
|
||||
fi
|
||||
continue # jump to next iteration
|
||||
fi
|
||||
|
||||
# if this is a container name
|
||||
if [ "${arg::7}" = '--quiet' ]; then
|
||||
quiet=1
|
||||
continue # jump to next iteration
|
||||
fi
|
||||
|
||||
# if this is an absolute path to a script file
|
||||
if [ -f "$arg" ]; then
|
||||
# make its path correct for container
|
||||
if [ "$map" ]; then # when paths are mapped
|
||||
# remove first part of map from an absolute filepath and append result to second map part
|
||||
filepath="${map_arr[1]}${arg##${map_arr[0]}}"
|
||||
else # when paths are NOT mapped
|
||||
# remove project path from absolute filepath
|
||||
filepath="${arg##$project_dir/}"
|
||||
fi
|
||||
php_cmd+=("$filepath") # append php args with filepath
|
||||
collect_php_args=0 # now we need to collect script args
|
||||
continue # jump to next iteration
|
||||
fi
|
||||
|
||||
if [ "$collect_php_args" = 1 ]; then # if we collect php args
|
||||
php_cmd+=("$arg") # add current arg to php args as is
|
||||
continue # jump to next iteration
|
||||
fi
|
||||
|
||||
script_args+=("$arg") # otherwise add current arg to script args as is
|
||||
done
|
||||
|
||||
# docker container name is required so we must halt here if there is no one
|
||||
if [ -z "$container" ]; then
|
||||
echo "ERROR: no docker container is specified!" >&2
|
||||
echo "Use $0 --help to get info about how to use this wrapper." >&2
|
||||
echo "Exit code 1." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# path to php script is also required so we must halt here too if there is no one
|
||||
if [ -z "$filepath" ]; then
|
||||
echo "ERROR: no script filepath is specified!" >&2
|
||||
echo "Use $0 --help to get info about how to use this wrapper." >&2
|
||||
echo "Exit code 2." >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
cmdline="${php_cmd[*]} ${script_args[*]}" # make a command to execute
|
||||
|
||||
# print some important data collected above
|
||||
if [ "$quiet" = 0 ]; then
|
||||
echo "NetBeans docker wrapper for php"
|
||||
echo "==============================="
|
||||
echo -e "Container name: $container"
|
||||
echo -e "Script path: $filepath"
|
||||
echo -e "Directory mapping: ${map:-(none)}"
|
||||
echo -e "Command line:\n$cmdline\n"
|
||||
fi
|
||||
|
||||
# some debug output
|
||||
# echo "=== some debug output ========="
|
||||
# cat <<EOF | column -t
|
||||
# is_global $is_global
|
||||
# container $container
|
||||
# pwd $pwd
|
||||
# wrapper_dir $wrapper_dir
|
||||
# nbproject $nbproject
|
||||
# project_dir $project_dir
|
||||
# map $map
|
||||
# map_arr[0] ${map_arr[0]}
|
||||
# map_arr[1] ${map_arr[1]}
|
||||
# filepath $filepath
|
||||
# EOF
|
||||
# echo "==============================="
|
||||
|
||||
$cmdline # execute
|
||||
|
||||
# that's folks!
|
||||
107
scripts/quick-backup.sh
Executable file
107
scripts/quick-backup.sh
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env bash
|
||||
# https://gist.github.com/anthonyaxenov/b8336a2bc9e6a742b5a050fa2588d71e
|
||||
#####################################################################
|
||||
# #
|
||||
# Stupidly simple backup script for own projects #
|
||||
# #
|
||||
# Author: Anthony Axenov (Антон Аксенов) #
|
||||
# Version: 1.0 #
|
||||
# License: WTFPLv2 More info: https://axenov.dev/?p=1423 #
|
||||
# #
|
||||
#####################################################################
|
||||
|
||||
# database credentials ==============================================
|
||||
|
||||
DBUSER=
|
||||
DBPASS=
|
||||
DBNAME=
|
||||
DBCHARSET="utf8"
|
||||
|
||||
# date formats ======================================================
|
||||
|
||||
FMT_DT_DIR="%Y.%m.%d" # 2021.03.19
|
||||
FMT_DT_FILE="%H.%M" # 08.24
|
||||
FMT_DT_LOG="%H:%M:%S" # 08:24:15.168149413
|
||||
|
||||
# local storage =====================================================
|
||||
|
||||
LOCAL_BAK_DIR="/backup/$(date +$FMT_DT_DIR)"
|
||||
|
||||
# database backup file
|
||||
LOCAL_SQL_FILE="$(date +$FMT_DT_FILE)-db.sql.gz"
|
||||
LOCAL_SQL_PATH="$LOCAL_BAK_DIR/$LOCAL_SQL_FILE"
|
||||
|
||||
# project path and backup file
|
||||
LOCAL_SRC_DIR="/var/www/"
|
||||
LOCAL_SRC_FILE="$(date +$FMT_DT_FILE)-src.tar.gz"
|
||||
LOCAL_SRC_PATH="$LOCAL_BAK_DIR/$LOCAL_SRC_FILE"
|
||||
|
||||
# log file
|
||||
LOG_FILE="$(date +$FMT_DT_FILE).log"
|
||||
LOG_PATH="$LOCAL_BAK_DIR/$LOG_FILE"
|
||||
|
||||
log() {
|
||||
echo -e "[$(date +$FMT_DT_LOG)] $*" | tee -a "$LOG_PATH"
|
||||
}
|
||||
|
||||
# remote storage ====================================================
|
||||
|
||||
REMOTE_HOST="user@example.com"
|
||||
REMOTE_BAK_DIR="/backup/$(date +$FMT_DT_DIR)"
|
||||
REMOTE_SQL_PATH="$REMOTE_BAK_DIR/$LOCAL_SQL_FILE"
|
||||
REMOTE_SRC_PATH="$REMOTE_BAK_DIR/$LOCAL_SRC_FILE"
|
||||
REMOTE_LOG_PATH="$REMOTE_BAK_DIR/$LOG_FILE"
|
||||
|
||||
# start =============================================================
|
||||
|
||||
echo
|
||||
log "Start ----------------------------------------------------------------"
|
||||
log "Initialized parameters:"
|
||||
log "\tDB_USER\t\t= $DB_USER"
|
||||
log "\tDB_NAME\t\t= $DB_NAME"
|
||||
log "\tDB_CHARSET\t= $DB_CHARSET"
|
||||
log "\tLOCAL_SRC_DIR\t= $LOCAL_SRC_DIR"
|
||||
log "\tLOCAL_SRC_PATH\t= $LOCAL_SRC_PATH"
|
||||
log "\tLOCAL_SQL_PATH\t= $LOCAL_SQL_PATH"
|
||||
log "\tLOG_PATH\t= $LOG_PATH"
|
||||
log "\tREMOTE_HOST\t= $REMOTE_HOST"
|
||||
log "\tREMOTE_SQL_PATH\t= $REMOTE_SQL_PATH"
|
||||
log "\tREMOTE_SRC_PATH\t= $REMOTE_SRC_PATH"
|
||||
log "\tREMOTE_LOG_PATH\t= $REMOTE_LOG_PATH"
|
||||
|
||||
mkdir -p $LOCAL_BAK_DIR
|
||||
log "Created local dir: $LOCAL_BAK_DIR"
|
||||
|
||||
ssh $REMOTE_HOST mkdir -p $REMOTE_BAK_DIR
|
||||
log "Created remote dir: $REMOTE_BAK_DIR"
|
||||
|
||||
log "1/4 Dumping DB: $DBNAME..."
|
||||
mysqldump \
|
||||
--user="$DBUSER" \
|
||||
--password="$DBPASS" \
|
||||
--default-character-set="$DBCHARSET" \
|
||||
--opt \
|
||||
--quick \
|
||||
"$DBNAME" | gzip > "$LOCAL_SQL_PATH"
|
||||
# --opt Same as --add-drop-table, --add-locks, --create-options,
|
||||
# --quick, --extended-insert, --lock-tables, --set-charset,
|
||||
# and --disable-keys
|
||||
[ $? -gt 0 ] && log "ERROR: failed to create dump. Exit-code: $?" || log "\t- OK"
|
||||
|
||||
log "2/4 Sending database backup to $REMOTE_HOST..."
|
||||
rsync --progress "$LOCAL_SQL_PATH" "$REMOTE_HOST:$REMOTE_SQL_PATH"
|
||||
[ $? -gt 0 ] && log "ERROR: failed to send database backup. Exit-code: $?" || log "\t- OK"
|
||||
|
||||
log "3/4 Compressing project dir: $LOCAL_SRC_DIR..."
|
||||
tar -zcf "$LOCAL_SRC_PATH" "$LOCAL_SRC_DIR"
|
||||
[ $? -gt 0 ] && log "ERROR: failed to compress project. Exit-code: $?" || log "\t- OK"
|
||||
|
||||
log "4/4 Sending project backup to ${REMOTE_HOST}..."
|
||||
rsync --progress "$LOCAL_SRC_PATH" "$REMOTE_HOST:$REMOTE_SRC_PATH"
|
||||
[ $? -gt 0 ] && log "ERROR: failed to send project backup. Exit-code: $?" || log "\t- OK"
|
||||
|
||||
rsync --progress "$LOG_PATH" "$REMOTE_HOST:$REMOTE_LOG_PATH"
|
||||
|
||||
log "Finish!"
|
||||
log "Used space: $(du -h "$LOCAL_BAK_DIR" | tail -n1)"
|
||||
log "Free space: $(df -h | tail -n1 | awk '{print $4}')"
|
||||
72
scripts/rsync-backup.sh
Executable file
72
scripts/rsync-backup.sh
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
RS_SRC_DEV=/dev/sdb1
|
||||
RS_DST_DEV=/dev/sdc1
|
||||
LOG_DIR="/home/$USER/rsync-logs"
|
||||
USE_NTFY=0
|
||||
NTFY_TITLE="Backup: $RS_SRC_DEV => $RS_DST_DEV"
|
||||
NTFY_CHANNEL=""
|
||||
|
||||
log() {
|
||||
[ ! -d "$LOG_DIR" ] && mkdir -p "$LOG_DIR"
|
||||
echo -e "[`date '+%H:%M:%S'`] $*" | tee -a "$LOG_DIR/`date '+%Y%m%d'`.log"
|
||||
}
|
||||
|
||||
# отправляет простую нотификацию
|
||||
ntfy_info() {
|
||||
[ $USE_NTFY == 1 ] && ntfy send \
|
||||
--title "$NTFY_TITLE" \
|
||||
--message "$1" \
|
||||
--priority 1 \
|
||||
"$NTFY_CHANNEL"
|
||||
}
|
||||
|
||||
# отправляет нотификацию с предупреждением
|
||||
ntfy_warn() {
|
||||
[ $USE_NTFY == 1 ] && ntfy send \
|
||||
--title "$NTFY_TITLE" \
|
||||
--tags "warning" \
|
||||
--message "$1" \
|
||||
--priority 5 \
|
||||
"$NTFY_CHANNEL"
|
||||
}
|
||||
|
||||
log "START\t========================="
|
||||
|
||||
mnt_check=$(findmnt -nf "$RS_SRC_DEV")
|
||||
if [ $? -gt 0 ]; then
|
||||
log "Source partition '$RS_SRC_DEV' is not mounted. Exit 1."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
RS_SRC_PATH=$(echo $mnt_check | awk '{ print $1 }')
|
||||
log "Source partition '$RS_SRC_DEV' is mounted at '$RS_SRC_PATH'"
|
||||
|
||||
mnt_check=$(findmnt -nf "$RS_DST_DEV")
|
||||
if [ $? -gt 0 ]; then
|
||||
log "Destination partition '$RS_DST_DEV' is not mounted. Exit 1."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
RS_DST_PATH=$(echo $mnt_check | awk '{ print $1 }')
|
||||
log "Destination partition '$RS_DST_DEV' is mounted at '$RS_DST_PATH'"
|
||||
|
||||
log "Executing rsync:"
|
||||
|
||||
rsync -huva \
|
||||
--progress \
|
||||
--delete \
|
||||
--exclude='lost+found' \
|
||||
--exclude='.Trash' \
|
||||
"$RS_SRC_PATH/" \
|
||||
"$RS_DST_PATH/" \
|
||||
| while read line; do log "$line"; done
|
||||
|
||||
if [ $? -gt 0 ]; then
|
||||
log "Something went wrong. Exit 3."
|
||||
ntfy_warn "Something went wrong, check log"
|
||||
exit 3
|
||||
fi
|
||||
ntfy_info "Success!"
|
||||
|
||||
log "FINISH\t========================="
|
||||
7
scripts/rutracker.sh
Executable file
7
scripts/rutracker.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "# https://rutracker.net/forum/viewtopic.php?p=75274766#75274766" | sudo tee -a /etc/hosts # > /dev/null
|
||||
echo "185.15.211.203 bt.t-ru.org" | sudo tee -a /etc/hosts # > /dev/null
|
||||
echo "185.15.211.203 bt2.t-ru.org" | sudo tee -a /etc/hosts # > /dev/null
|
||||
echo "185.15.211.203 bt3.t-ru.org" | sudo tee -a /etc/hosts # > /dev/null
|
||||
echo "185.15.211.203 bt4.t-ru.org" | sudo tee -a /etc/hosts # > /dev/null
|
||||
18
scripts/s3-backup-old.sh
Executable file
18
scripts/s3-backup-old.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
TTL_DAYS=1
|
||||
S3="s3://......"
|
||||
OLDER_THAN=$(date -d "$TTL_DAYS days ago" "+%s")
|
||||
echo $OLDER_THAN
|
||||
s3cmd ls -r $S3 | while read -r line; do
|
||||
FILETIME=$(echo "$line" | awk {'print $1" "$2'})
|
||||
FILETIME=$(date -d "$FILETIME" "+%s")
|
||||
echo $FILETIME - $OLDER_THAN
|
||||
if [[ $FILETIME -le $OLDER_THAN ]]; then
|
||||
FILEPATH=$(echo "$line" | awk {'print $4'})
|
||||
if [ $FILEPATH != "" ]; then
|
||||
printf 'Must delete: %s\n' $FILEPATH
|
||||
echo "s3cmd del $FILEPATH"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
342
scripts/s3-backup.sh
Normal file
342
scripts/s3-backup.sh
Normal file
@@ -0,0 +1,342 @@
|
||||
#!/usr/bin/env bash
|
||||
#####################################################################
|
||||
# #
|
||||
# Stupidly simple backup script for own projects #
|
||||
# #
|
||||
# Author: Anthony Axenov (Антон Аксенов) #
|
||||
# Version: 1.2 #
|
||||
# License: WTFPLv2 #
|
||||
# More info (RU): https://axenov.dev/?p=1272 #
|
||||
# #
|
||||
#####################################################################
|
||||
|
||||
# use remote storages ===============================================
|
||||
|
||||
USE_SSH=1
|
||||
USE_S3=1
|
||||
|
||||
# database credentials ==============================================
|
||||
|
||||
DBUSER=
|
||||
DBPASS=
|
||||
DBNAME=
|
||||
DBCHARSET="utf8"
|
||||
|
||||
# dates for file structure ==========================================
|
||||
|
||||
TODAY_DIR="$(date +%Y.%m.%d)"
|
||||
TODAY_FILE="$(date +%H.%M)"
|
||||
|
||||
# local storage =====================================================
|
||||
|
||||
LOCAL_BAK_DIR="/backup"
|
||||
LOCAL_BAK_PATH="$LOCAL_BAK_DIR/$TODAY_DIR"
|
||||
|
||||
# database backup file
|
||||
LOCAL_SQL_FILE="$TODAY_FILE-db.sql.gz"
|
||||
LOCAL_SQL_PATH="$LOCAL_BAK_PATH/$LOCAL_SQL_FILE"
|
||||
|
||||
# project path and backup file
|
||||
LOCAL_SRC_DIR="/var/www/html"
|
||||
LOCAL_SRC_FILE="$TODAY_FILE-src.tar.gz"
|
||||
LOCAL_SRC_PATH="$LOCAL_BAK_PATH/$LOCAL_SRC_FILE"
|
||||
|
||||
# log file
|
||||
LOG_FILE="$TODAY_FILE.log"
|
||||
LOG_PATH="$LOCAL_BAK_PATH/$LOG_FILE"
|
||||
|
||||
# remote storages ===================================================
|
||||
|
||||
SSH_HOST="user@example.com"
|
||||
SSH_BAK_DIR="/backup"
|
||||
SSH_BAK_PATH="$SSH_BAK_DIR/$TODAY_DIR"
|
||||
SSH_SQL_FILE="$SSH_BAK_PATH/$LOCAL_SQL_FILE"
|
||||
SSH_SRC_FILE="$SSH_BAK_PATH/$LOCAL_SRC_FILE"
|
||||
SSH_LOG_FILE="$SSH_BAK_PATH/$LOG_FILE"
|
||||
|
||||
S3_BUCKET="s3://my.bucket"
|
||||
S3_DIR="$S3_BUCKET/$TODAY_DIR"
|
||||
S3_SQL_FILE="$S3_DIR/$LOCAL_SQL_FILE"
|
||||
S3_SRC_FILE="$S3_DIR/$LOCAL_SRC_FILE"
|
||||
S3_LOG_FILE="$S3_DIR/$LOG_FILE"
|
||||
|
||||
# autoremove ========================================================
|
||||
|
||||
# time to live on different storages
|
||||
TTL_LOCAL=3
|
||||
TTL_SSH=7
|
||||
TTL_S3=60
|
||||
|
||||
# autoremove flags
|
||||
CLEAR_SSH=1
|
||||
CLEAR_S3=1
|
||||
|
||||
# notifications =====================================================
|
||||
|
||||
USE_NTFY=1
|
||||
NTFY_TITLE="Backup script"
|
||||
NTFY_CHANNEL=
|
||||
|
||||
#====================================================================
|
||||
#
|
||||
# Functions used for the whole backup flow
|
||||
#
|
||||
#====================================================================
|
||||
|
||||
# prints arguments to stdout and into log file
|
||||
log() {
|
||||
echo -e "[$(date +%H:%M:%S)] $*" | tee -a "$LOG_PATH"
|
||||
}
|
||||
|
||||
# sends notification with information
|
||||
ntfy_info() {
|
||||
[ $USE_NTFY == 1 ] && ntfy send \
|
||||
--title "$NTFY_TITLE" \
|
||||
--message "$1" \
|
||||
--priority 1 \
|
||||
"$NTFY_CHANNEL"
|
||||
}
|
||||
|
||||
# sends notification with warning
|
||||
ntfy_warn() {
|
||||
[ $USE_NTFY == 1 ] && ntfy send \
|
||||
--title "$NTFY_TITLE" \
|
||||
--tags "warning" \
|
||||
--message "$1" \
|
||||
--priority 5 \
|
||||
"$NTFY_CHANNEL"
|
||||
}
|
||||
|
||||
# prints initialized parameters
|
||||
show_params() {
|
||||
log "Initialized parameters:"
|
||||
|
||||
log "├ [ Remotes ]"
|
||||
log "│\t├ USE_SSH = $USE_SSH"
|
||||
[ $USE_SSH == 1 ] && log "│\t├ SSH_HOST = $SSH_HOST"
|
||||
log "│\t├ USE_S3 = $USE_S3"
|
||||
[ $USE_S3 == 1 ] && log "│\t├ S3_BUCKET = $S3_BUCKET"
|
||||
|
||||
log "├ [ Database ]"
|
||||
log "│\t├ DBUSER = $DBUSER"
|
||||
log "│\t├ DBNAME = $DBNAME"
|
||||
log "│\t├ DBCHARSET = $DBCHARSET"
|
||||
log "│\t├ LOCAL_SQL_PATH = $LOCAL_SQL_PATH"
|
||||
[ $USE_SSH == 1 ] && log "│\t├ SSH_SQL_FILE = $SSH_SQL_FILE"
|
||||
[ $USE_S3 == 1 ] && log "│\t├ S3_SQL_FILE = $S3_SQL_FILE"
|
||||
|
||||
log "├ [ Sources ]"
|
||||
log "│\t├ LOCAL_SRC_DIR = $LOCAL_SRC_DIR"
|
||||
log "│\t├ LOCAL_SRC_PATH = $LOCAL_SRC_PATH"
|
||||
[ $USE_SSH == 1 ] && log "│\t├ SSH_SRC_FILE = $SSH_SRC_FILE"
|
||||
[ $USE_S3 == 1 ] && log "│\t├ S3_SRC_FILE = $S3_SRC_FILE"
|
||||
|
||||
log "├ [ Log ]"
|
||||
log "│\t├ LOG_PATH = $LOG_PATH"
|
||||
[ $USE_SSH == 1 ] && log "│\t├ SSH_LOG_FILE = $SSH_LOG_FILE"
|
||||
[ $USE_S3 == 1 ] && log "│\t├ S3_LOG_FILE = $S3_LOG_FILE"
|
||||
|
||||
log "├ [ Autoclear ]"
|
||||
log "│\t├ TTL_LOCAL = $TTL_LOCAL"
|
||||
[ $USE_SSH == 1 ] && {
|
||||
log "│\t├ CLEAR_SSH = $CLEAR_SSH"
|
||||
log "│\t├ TTL_SSH = $TTL_SSH"
|
||||
}
|
||||
[ $USE_S3 == 1 ] && {
|
||||
log "│\t├ CLEAR_S3 = $CLEAR_S3"
|
||||
log "│\t├ TTL_S3 = $TTL_S3"
|
||||
}
|
||||
|
||||
log "└ [ ntfy ]"
|
||||
log "\t├ USE_NTFY = $USE_NTFY"
|
||||
[ $USE_NTFY == 1 ] && log "\t├ NTFY_TITLE = $NTFY_TITLE"
|
||||
[ $USE_NTFY == 1 ] && log "\t└ NTFY_CHANNEL = $NTFY_CHANNEL"
|
||||
}
|
||||
|
||||
# initializes directories for backup
|
||||
init_dirs() {
|
||||
if [ ! -d "$LOCAL_BAK_PATH" ]; then
|
||||
mkdir -p $LOCAL_BAK_PATH
|
||||
fi
|
||||
[ $USE_SSH == 1 ] && ssh $SSH_HOST "mkdir -p $SSH_BAK_PATH"
|
||||
}
|
||||
|
||||
# clears old local backups
|
||||
clear_local_backups() {
|
||||
log "\tLocal:"
|
||||
log $(find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | sort)
|
||||
find "$LOCAL_BAK_DIR" -type d -mtime +"$TTL_LOCAL" | xargs rm -rf
|
||||
}
|
||||
|
||||
# clears old backups on remote ssh storage
|
||||
clear_ssh_backups() {
|
||||
if [ $USE_SSH == 1 ] && [ $CLEAR_SSH == 1 ]; then
|
||||
log "\tSSH:"
|
||||
log $(ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH" | sort)
|
||||
ssh "$SSH_HOST" "find $SSH_BAK_DIR -type d -mtime +$TTL_SSH | xargs rm -rf"
|
||||
else
|
||||
log "\tSSH: disabled (\$USE_SSH, \$CLEAR_SSH)"
|
||||
fi
|
||||
}
|
||||
|
||||
# clears backups on remote s3 storage
|
||||
clear_s3_backups() {
|
||||
# https://gist.github.com/JProffitt71/9044744?permalink_comment_id=3539681#gistcomment-3539681
|
||||
if [ $USE_S3 == 1 ] && [ $CLEAR_S3 == 1 ]; then
|
||||
log "\tS3:"
|
||||
OLDER_THAN=$(date -d "$TTL_S3 days ago" "+%s")
|
||||
s3cmd ls -r $S3_DIR | while read -r line; do
|
||||
FILETIME=$(echo "$line" | awk {'print $1" "$2'})
|
||||
FILETIME=$(date -d "$FILETIME" "+%s")
|
||||
if [[ $FILETIME -le $OLDER_THAN ]]; then
|
||||
FILEPATH=$(echo "$line" | awk {'print $4'})
|
||||
if [ $FILEPATH != "" ]; then
|
||||
log "$line"
|
||||
s3cmd del $FILEPATH
|
||||
fi
|
||||
fi
|
||||
done
|
||||
else
|
||||
log "\tS3: disabled (\$USE_S3 + \$CLEAR_S3)"
|
||||
fi
|
||||
}
|
||||
|
||||
# clears old backups
|
||||
clear_backups() {
|
||||
echo
|
||||
log "1/7 Removing old backups..."
|
||||
clear_local_backups
|
||||
clear_ssh_backups
|
||||
clear_s3_backups
|
||||
}
|
||||
|
||||
# makes archive with database dump
|
||||
backup_db() {
|
||||
echo
|
||||
log "2/7 Dumping DB: $DBNAME..."
|
||||
mysqldump \
|
||||
--user=$DBUSER \
|
||||
--password=$DBPASS \
|
||||
--opt \
|
||||
--default-character-set=$DBCHARSET \
|
||||
--quick \
|
||||
$DBNAME | gzip > $LOCAL_SQL_PATH
|
||||
if [ $? == 0 ]; then
|
||||
log "\t- OK"
|
||||
send_db_ssh
|
||||
send_db_s3
|
||||
else
|
||||
log "\t- ERROR: failed to create dump. Exit-code: $?"
|
||||
ntfy_warn "ERROR: failed to create dump"
|
||||
log "3/7 Sending database backup to $SSH_HOST... skipped"
|
||||
log "4/7 Sending database backup to $S3_DIR... skipped"
|
||||
fi
|
||||
}
|
||||
|
||||
# sends database archive into ssh remote storage
|
||||
send_db_ssh() {
|
||||
echo
|
||||
log "3/7 Sending database backup to $SSH_HOST..."
|
||||
if [ $USE_SSH == 1 ]; then
|
||||
rsync --progress "$LOCAL_SQL_PATH" "$SSH_HOST:$SSH_SQL_FILE"
|
||||
if [ $? == 0 ]; then
|
||||
log "\t- OK"
|
||||
else
|
||||
log "\t- ERROR: failed to send DB backup to $SSH_HOST. Exit-code: $?"
|
||||
ntfy_warn "ERROR: failed to send DB backup to $SSH_HOST"
|
||||
fi
|
||||
else
|
||||
log "\t- disabled (\$USE_SSH)"
|
||||
fi
|
||||
}
|
||||
|
||||
# sends database archive into s3 remote storage
|
||||
send_db_s3() {
|
||||
echo
|
||||
log "4/7 Sending database backup to $S3_DIR..."
|
||||
if [ $USE_S3 == 1 ]; then
|
||||
s3cmd put "$LOCAL_SQL_PATH" "$S3_SQL_FILE"
|
||||
if [ $? == 0 ]; then
|
||||
log "\t- OK"
|
||||
else
|
||||
log "\t- ERROR: failed to send DB backup to $S3_DIR. Exit-code: $?"
|
||||
ntfy_warn "ERROR: failed to send DB backup to $S3_DIR"
|
||||
fi
|
||||
else
|
||||
log "\t- disabled (\$USE_SSH)"
|
||||
fi
|
||||
}
|
||||
|
||||
# makes archive with project sources
|
||||
backup_src() {
|
||||
echo
|
||||
log "5/7 Compressing project dir: $LOCAL_SRC_DIR..."
|
||||
tar -zcf "$LOCAL_SRC_PATH" "$LOCAL_SRC_DIR"
|
||||
if [ $? == 0 ]; then
|
||||
log "\t- OK"
|
||||
send_src_ssh
|
||||
send_src_s3
|
||||
else
|
||||
log "\t- ERROR: failed to compress project. Exit-code: $?"
|
||||
ntfy_warn "ERROR: failed to compress project"
|
||||
log "6/7 Sending project backup to $SSH_HOST... skipped"
|
||||
log "7/7 Sending project backup to $S3_DIR... skipped"
|
||||
fi
|
||||
}
|
||||
|
||||
# sends sources archive into ssh remote storage
|
||||
send_src_ssh() {
|
||||
echo
|
||||
log "6/7 Sending project backup to $SSH_HOST..."
|
||||
if [ $USE_SSH == 1 ]; then
|
||||
rsync --progress "$LOCAL_SRC_PATH" "$SSH_HOST:$SSH_SRC_FILE"
|
||||
if [ $? == 0 ]; then
|
||||
log "\t- OK"
|
||||
else
|
||||
log "\t- ERROR: failed to send project backup to $SSH_HOST. Exit-code: $?"
|
||||
ntfy_warn "ERROR: failed to send project backup to $SSH_HOST"
|
||||
fi
|
||||
else
|
||||
log "\t- disabled"
|
||||
fi
|
||||
}
|
||||
|
||||
# sends sources archive into s3 remote storage
|
||||
send_src_s3() {
|
||||
echo
|
||||
log "7/7 Sending project backup to $S3_DIR..."
|
||||
s3cmd put "$LOCAL_SRC_PATH" "$S3_SRC_FILE"
|
||||
if [ $? == 0 ]; then
|
||||
log "\t- OK"
|
||||
else
|
||||
log "\t- ERROR: failed to send database backup to $S3_DIR. Exit-code: $?"
|
||||
ntfy_warn "ERROR: failed to send project backup to $S3_DIR"
|
||||
fi
|
||||
}
|
||||
|
||||
# prints used/free space on local storage
|
||||
show_finish() {
|
||||
echo
|
||||
log "Finish!"
|
||||
log "Used space: $(du -h "$LOCAL_BAK_PATH" | tail -n1)" # вывод размера папки с бэкапами за текущий день
|
||||
log "Free space: $(df -h "$LOCAL_BAK_PATH" | tail -n1 | awk '{print $4}')" # вывод свободного места на локальном диске
|
||||
echo
|
||||
}
|
||||
|
||||
# sends log file into both remote storage
|
||||
send_log() {
|
||||
[ $USE_SSH == 1 ] && rsync --progress "$LOG_PATH" "$SSH_HOST:$SSH_LOG_FILE"
|
||||
[ $USE_S3 == 1 ] && s3cmd put "$LOG_PATH" "$S3_LOG_FILE"
|
||||
}
|
||||
|
||||
# main flow =========================================================
|
||||
|
||||
log "Start ----------------------------------------------------------"
|
||||
show_params
|
||||
init_dirs
|
||||
clear_backups
|
||||
backup_db
|
||||
backup_src
|
||||
show_finish
|
||||
send_log
|
||||
ntfy_info "Finish!"
|
||||
40
scripts/setup-wakeonlan.sh
Executable file
40
scripts/setup-wakeonlan.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
print() {
|
||||
echo -e "$*"
|
||||
}
|
||||
|
||||
state() {
|
||||
sudo ethtool "$iface" | grep -E '^\s+Wake-on:\s\w+' | awk '{print $2}'
|
||||
}
|
||||
|
||||
[ "$1" ] && iface="$1" || iface=enp3s0
|
||||
|
||||
[ -f "/sys/class/net/$iface/address" ] && mac=$(cat "/sys/class/net/$iface/address") || mac=''
|
||||
[ -z "$mac" ] && {
|
||||
print "Wrong interface! $iface" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
state=$(state)
|
||||
|
||||
print "Interface\t: $iface"
|
||||
print "MAC-address\t: $mac"
|
||||
print "WoL state\t: $state"
|
||||
|
||||
if [ $state == 'd' ]; then
|
||||
sudo ethtool -s "$iface" wol gu || true
|
||||
sudo mkdir -p /etc/networkd-dispatcher/configuring.d
|
||||
sudo tee /etc/networkd-dispatcher/configuring.d/wol <<EOF >/dev/null
|
||||
#!/usr/bin/env bash
|
||||
|
||||
ethtool -s $iface wol gu || true
|
||||
EOF
|
||||
sudo chmod 755 /etc/networkd-dispatcher/configuring.d/wol
|
||||
print "* New WOL state\t: $(state)"
|
||||
fi
|
||||
|
||||
print "\nTo wake up this device run this command from another one:\n"
|
||||
print "\twakeonlan -p 8 $mac\n"
|
||||
print "\twol $mac\n"
|
||||
|
||||
39
scripts/ubuntu/desktop-upgrade.sh
Executable file
39
scripts/ubuntu/desktop-upgrade.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
# https://dev.to/chefgs/upgrading-an-end-of-life-eol-ubuntu-os-to-lts-version-3a36
|
||||
# https://changelogs.ubuntu.com/meta-release
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source /etc/os-release
|
||||
|
||||
echo "Loading..."
|
||||
mapfile -t codenames < <(curl -s https://changelogs.ubuntu.com/meta-release | grep -oP '(?<=Dist:\s).*')
|
||||
thisCodename="$VERSION_CODENAME"
|
||||
for idx in "${!codenames[@]}"; do
|
||||
if [ "${codenames[idx]}" = "$thisCodename" ]; then
|
||||
nextCodename=${codenames[((idx+1))]}
|
||||
fi
|
||||
done
|
||||
|
||||
targetDownloadPath="$(pwd)/upgrade-$nextCodename"
|
||||
targetToolPath="$targetDownloadPath/unpacked"
|
||||
targetToolFile="$targetDownloadPath/$nextCodename.tar.gz"
|
||||
|
||||
echo "Current dist: $thisCodename"
|
||||
echo "Next dist: $nextCodename"
|
||||
echo "Target path: $targetToolFile"
|
||||
|
||||
rm -rf "$targetToolPath"
|
||||
mkdir -p "$targetToolPath"
|
||||
|
||||
echo "Downloading..."
|
||||
cd "$targetDownloadPath" || exit 1
|
||||
wget "http://archive.ubuntu.com/ubuntu/dists/${nextCodename}-updates/main/dist-upgrader-all/current/${nextCodename}.tar.gz"
|
||||
|
||||
echo "Unpacking..."
|
||||
tar -xaf "$targetToolFile" -C "$targetToolPath"
|
||||
|
||||
echo "Starting..."
|
||||
cd ./unpacked || exit 1
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
sudo ./$nextCodename
|
||||
33
scripts/ubuntu/lemp.sh
Normal file
33
scripts/ubuntu/lemp.sh
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
apt update && apt upgrade -y --autoremove
|
||||
apt install -y \
|
||||
apt-transport-https \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
cmake \
|
||||
curl \
|
||||
dialog \
|
||||
gettext \
|
||||
gnupg \
|
||||
htop \
|
||||
libaio1 \
|
||||
libcurl4-gnutls-dev \
|
||||
libexpat1-dev \
|
||||
libghc-zlib-dev \
|
||||
libssl-dev \
|
||||
make \
|
||||
mc \
|
||||
nano \
|
||||
net-tools \
|
||||
nmap \
|
||||
p7zip-full \
|
||||
software-properties-common \
|
||||
unzip \
|
||||
inotify-tools \
|
||||
git \
|
||||
mariadb-server \
|
||||
mariadb-client \
|
||||
nginx \
|
||||
certbot \
|
||||
python3-certbot-nginx
|
||||
114
scripts/ubuntu/mediasrv.sh
Normal file
114
scripts/ubuntu/mediasrv.sh
Normal file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
sudo apt update && sudo apt upgrade -y --autoremove
|
||||
sudo apt install -y \
|
||||
alien \
|
||||
apt-transport-https \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
cmake \
|
||||
curl \
|
||||
dconf-editor \
|
||||
default-jdk \
|
||||
dialog \
|
||||
gettext \
|
||||
gnupg \
|
||||
gparted \
|
||||
hardinfo \
|
||||
htop \
|
||||
libaio1 \
|
||||
libcurl4-gnutls-dev \
|
||||
libexpat1-dev \
|
||||
libghc-zlib-dev \
|
||||
libssl-dev \
|
||||
lsb-release \
|
||||
lsp-plugins \
|
||||
make \
|
||||
mc \
|
||||
nano \
|
||||
neofetch \
|
||||
net-tools \
|
||||
nmap \
|
||||
p7zip-full \
|
||||
easyeffects \
|
||||
software-properties-common \
|
||||
ubuntu-restricted-extras \
|
||||
unzip \
|
||||
vlc \
|
||||
ffmpeg \
|
||||
xclip \
|
||||
inotify-tools \
|
||||
notify-osd \
|
||||
fonts-open-sans \
|
||||
libnotify-bin \
|
||||
samba \
|
||||
dkms
|
||||
|
||||
|
||||
# https://selectel.ru/blog/tutorials/how-to-install-and-configure-samba-on-ubuntu-20-04/
|
||||
# https://linuxconfig.org/how-to-configure-samba-server-share-on-ubuntu-22-04-jammy-jellyfish-linux
|
||||
# https://phoenixnap.com/kb/ubuntu-samba
|
||||
# https://computingforgeeks.com/install-and-configure-samba-server-share-on-ubuntu/
|
||||
# https://linux.how2shout.com/how-to-install-samba-on-ubuntu-22-04-lts-jammy-linux/
|
||||
sudo cp /etc/samba/smb.conf /etc/samba/smb.conf.bak
|
||||
sudo bash -c 'grep -v -E "^#|^;" /etc/samba/smb.conf.bak | grep . > /etc/samba/smb.conf'
|
||||
sudo systemctl enable --now smbd
|
||||
sudo usermod -aG sambashare $USER
|
||||
sudo smbpasswd -a $USER
|
||||
|
||||
|
||||
|
||||
|
||||
sudo add-apt-repository -y ppa:agornostal/ulauncher && \
|
||||
sudo apt install -y --autoremove ulauncher
|
||||
|
||||
curl -L https://yt-dl.org/downloads/latest/youtube-dl -o "${HOME}/.local/bin/youtube-dl" && \
|
||||
sudo chmod +rx "${HOME}/.local/bin/youtube-dl"
|
||||
|
||||
wget "https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb" && \
|
||||
sudo dpkg -i google-chrome-stable_current_amd64.deb
|
||||
|
||||
git clone https://github.com/aircrack-ng/rtl8812au.git && \
|
||||
cd rtl8812au && \
|
||||
sudo make dkms_install
|
||||
|
||||
sudo curl -s -o /usr/share/keyrings/syncthing-archive-keyring.gpg https://syncthing.net/release-key.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/syncthing-archive-keyring.gpg] https://apt.syncthing.net/ syncthing stable" | sudo tee /etc/apt/sources.list.d/syncthing.list && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/syncthing-archive-keyring.gpg] https://apt.syncthing.net/ syncthing candidate" | sudo tee /etc/apt/sources.list.d/syncthing.list && \
|
||||
sudo apt update && sudo apt install -y --autoremove syncthing && \
|
||||
wget "https://raw.githubusercontent.com/syncthing/syncthing/main/etc/linux-desktop/syncthing-start.desktop" -O $HOME/.local/share/applications/syncthing-start.desktop && \
|
||||
wget "https://raw.githubusercontent.com/syncthing/syncthing/main/etc/linux-desktop/syncthing-ui.desktop" -O $HOME/.local/share/applications/syncthing-ui.desktop && \
|
||||
ln -sf $HOME/.local/share/applications/syncthing-start.desktop $HOME/.config/autostart/syncthing-start.desktop
|
||||
|
||||
|
||||
|
||||
#####################################################################
|
||||
|
||||
sudo apt install -y kodi kodi-pvr-iptvsimple
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
51
scripts/ubuntu/server.sh
Normal file
51
scripts/ubuntu/server.sh
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
sudo apt install -y ca-certificates curl && \
|
||||
sudo install -m 0755 -d /etc/apt/keyrings && \
|
||||
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc && \
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.asc && \
|
||||
source /etc/os-release && \
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \
|
||||
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null && \
|
||||
sudo apt update && \
|
||||
sudo apt upgrade -y --autoremove && \
|
||||
sudo apt install -y \
|
||||
apt-transport-https \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
git \
|
||||
cmake \
|
||||
curl \
|
||||
dialog \
|
||||
gettext \
|
||||
gnupg \
|
||||
htop \
|
||||
libcurl4-gnutls-dev \
|
||||
libexpat1-dev \
|
||||
libghc-zlib-dev \
|
||||
libssl-dev \
|
||||
lsb-release \
|
||||
make \
|
||||
mc \
|
||||
meld \
|
||||
nano \
|
||||
neofetch \
|
||||
net-tools \
|
||||
nmap \
|
||||
p7zip-full \
|
||||
unzip \
|
||||
ffmpeg \
|
||||
inotify-tools \
|
||||
notify-osd \
|
||||
fonts-open-sans \
|
||||
libnotify-bin \
|
||||
tree \
|
||||
nginx \
|
||||
certbot \
|
||||
python3-certbot-nginx \
|
||||
docker-ce \
|
||||
docker-ce-cli \
|
||||
containerd.io \
|
||||
docker-buildx-plugin \
|
||||
docker-compose-plugin
|
||||
6
scripts/utils/clock
Executable file
6
scripts/utils/clock
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
exec watch -tn 1 date '+%l:%M:%S%p'
|
||||
18
scripts/utils/copy
Executable file
18
scripts/utils/copy
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
|
||||
if hash pbcopy 2>/dev/null; then
|
||||
exec pbcopy
|
||||
elif hash xclip 2>/dev/null; then
|
||||
exec xclip -selection clipboard
|
||||
elif hash putclip 2>/dev/null; then
|
||||
exec putclip
|
||||
else
|
||||
rm -f /tmp/clipboard 2> /dev/null
|
||||
if [ $# -eq 0 ]; then
|
||||
cat > /tmp/clipboard
|
||||
else
|
||||
cat "$1" > /tmp/clipboard
|
||||
fi
|
||||
fi
|
||||
26
scripts/utils/dc
Executable file
26
scripts/utils/dc
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
CONTAINER="my-container" # the name of the container in which to 'exec' something
|
||||
CONFIG="$(dirname $([ -L $0 ] && readlink -f $0 || echo $0))/docker-compose.yml" # path to compose yml file
|
||||
CMD="docker-compose -f $CONFIG" # docker-compose command
|
||||
APP_URL='http://localhost:8000/'
|
||||
|
||||
open_browser() {
|
||||
if which xdg-open > /dev/null; then
|
||||
xdg-open "$1" </dev/null >/dev/null 2>&1 & disown
|
||||
elif which gnome-open > /dev/null; then
|
||||
gnome-open "$1" </dev/null >/dev/null 2>&1 & disown
|
||||
fi
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
'' | 'help' ) echo -e "Provide one of operations: \t start, stop, up, down, restart, rebuild, open";
|
||||
echo "Otherwise all args will be passed to 'docker exec -ti $CONTAINER ...'" ;;
|
||||
'open' ) open_browser $APP_URL ;;
|
||||
'up' ) $CMD up -d --build ;; # build and start containers
|
||||
'down' ) $CMD down --remove-orphans ;; # stop and remove containers
|
||||
'start' ) $CMD start ;; # start containers
|
||||
'stop' ) $CMD stop ;; # stop containers
|
||||
'restart' ) $CMD stop && $CMD start ;; # restart containers
|
||||
'rebuild' ) $CMD down --remove-orphans && $CMD up -d --build ;; # rebuild containers
|
||||
* ) docker exec -ti $CONTAINER $@ # exec anything in container
|
||||
esac
|
||||
69
scripts/utils/httpcode
Executable file
69
scripts/utils/httpcode
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
|
||||
statuses="100 Continue
|
||||
101 Switching Protocols
|
||||
102 Processing
|
||||
200 OK
|
||||
201 Created
|
||||
202 Accepted
|
||||
203 Non-Authoritative Information
|
||||
204 No Content
|
||||
205 Reset Content
|
||||
206 Partial Content
|
||||
207 Multi-Status
|
||||
208 Already Reported
|
||||
300 Multiple Choices
|
||||
301 Moved Permanently
|
||||
302 Found
|
||||
303 See Other
|
||||
304 Not Modified
|
||||
305 Use Proxy
|
||||
307 Temporary Redirect
|
||||
400 Bad Request
|
||||
401 Unauthorized
|
||||
402 Payment Required
|
||||
403 Forbidden
|
||||
404 Not Found
|
||||
405 Method Not Allowed
|
||||
406 Not Acceptable
|
||||
407 Proxy Authentication Required
|
||||
408 Request Timeout
|
||||
409 Conflict
|
||||
410 Gone
|
||||
411 Length Required
|
||||
412 Precondition Failed
|
||||
413 Request Entity Too Large
|
||||
414 Request-URI Too Large
|
||||
415 Unsupported Media Type
|
||||
416 Request Range Not Satisfiable
|
||||
417 Expectation Failed
|
||||
418 I'm a teapot
|
||||
420 Blaze it
|
||||
422 Unprocessable Entity
|
||||
423 Locked
|
||||
424 Failed Dependency
|
||||
425 No code
|
||||
426 Upgrade Required
|
||||
428 Precondition Required
|
||||
429 Too Many Requests
|
||||
431 Request Header Fields Too Large
|
||||
449 Retry with
|
||||
500 Internal Server Error
|
||||
501 Not Implemented
|
||||
502 Bad Gateway
|
||||
503 Service Unavailable
|
||||
504 Gateway Timeout
|
||||
505 HTTP Version Not Supported
|
||||
506 Variant Also Negotiates
|
||||
507 Insufficient Storage
|
||||
509 Bandwidth Limit Exceeded
|
||||
510 Not Extended
|
||||
511 Network Authentication Required"
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "$statuses"
|
||||
else
|
||||
echo "$statuses" | grep -i --color=never "$@"
|
||||
fi
|
||||
5
scripts/utils/len
Executable file
5
scripts/utils/len
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
|
||||
echo -n "$@" | wc -c | awk '{print $1}'
|
||||
6
scripts/utils/line
Executable file
6
scripts/utils/line
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
|
||||
lineno="$1"; shift
|
||||
sed -n "${lineno}p" -- "$@"
|
||||
23
scripts/utils/mksh
Executable file
23
scripts/utils/mksh
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
if [ ! $# -eq 1 ]; then
|
||||
echo 'mksh takes one argument' 1>&2
|
||||
exit 1
|
||||
elif [ -e "$1" ]; then
|
||||
echo "$1 already exists" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo '#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
' > "$1"
|
||||
|
||||
chmod u+x "$1"
|
||||
|
||||
"$EDITOR" "$1"
|
||||
73
scripts/utils/note
Executable file
73
scripts/utils/note
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Note taking tool
|
||||
#
|
||||
# Usage:
|
||||
# note [name] - Create a new note with optional name (defaults to "New note")
|
||||
# note -e|--edit [name] - Edit an existing note by name
|
||||
#
|
||||
# Arguments explanation:
|
||||
# [name] - Note filename (without extension) and title
|
||||
# -e, --edit [name] - Edit existing note by name
|
||||
#
|
||||
# Detailed usage:
|
||||
# note - Creates "New note" with current timestamp
|
||||
# note my-idea - Creates note titled "my-idea"
|
||||
# note my-idea "My Great Idea" - Creates note file "my-idea" but titled "My Great Idea"
|
||||
# note -e my-idea - Edits existing note with name "my-idea"
|
||||
#
|
||||
# Notes are stored as markdown files in ~/notes/ with timestamps
|
||||
# When multiple notes have the same name, you'll be prompted to select which one to edit
|
||||
|
||||
arg1="$1"
|
||||
arg2="$2"
|
||||
path="$HOME/notes"
|
||||
[[ ! -d "$path" ]] && mkdir -p "$path"
|
||||
|
||||
shopt -s nullglob
|
||||
files=("$path"/*.md)
|
||||
shopt -u nullglob
|
||||
|
||||
case "$arg1" in
|
||||
-e|--edit)
|
||||
[[ -z "$arg2" ]] && {
|
||||
echo "Note name is required"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2207
|
||||
found=($(echo "${files[@]}" | grep -P "[0-9]{10}-$arg2.md"))
|
||||
[[ ${#found[@]} -eq 0 ]] && {
|
||||
echo "Note with name '$arg2' not found."
|
||||
echo "Create it with using 'note $arg2'"
|
||||
exit
|
||||
}
|
||||
|
||||
[[ ${#found[@]} -eq 1 ]] && {
|
||||
nano "${found[0]}"
|
||||
exit
|
||||
}
|
||||
|
||||
PS3="Select a note to edit: "
|
||||
select selection in "${found[@]}" "Exit"; do
|
||||
[[ "$selection" == "Exit" ]] && exit
|
||||
[[ -f "$selection" ]] && {
|
||||
nano "$selection"
|
||||
exit
|
||||
}
|
||||
continue
|
||||
done
|
||||
;;
|
||||
|
||||
*)
|
||||
[[ -z "$arg2" ]] && arg2="${arg1:-New note}"
|
||||
file="$path/$(date +%s)-$arg1.md"
|
||||
cat <<EOF > "$file"
|
||||
# $arg2
|
||||
|
||||
Note taken: $(date '+%d.%m.%Y %H:%M:%S')
|
||||
|
||||
EOF
|
||||
nano "$file"
|
||||
;;
|
||||
esac
|
||||
45
scripts/utils/notes
Executable file
45
scripts/utils/notes
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
# Notes listing tool
|
||||
#
|
||||
# Purpose:
|
||||
# Lists all markdown notes stored in ~/notes/ directory
|
||||
#
|
||||
# Usage:
|
||||
# notes - Display all available notes
|
||||
#
|
||||
# Output:
|
||||
# - Shows filenames of all .md files in ~/notes/
|
||||
# - If no notes exist or directory is empty, displays "Empty"
|
||||
# - Provides hint to use 'note -e' for editing
|
||||
#
|
||||
# Example output:
|
||||
# 1703123456-my-idea.md
|
||||
# 1703123789-shopping-list.md
|
||||
# 1703124012-project-notes.md
|
||||
#
|
||||
# Use 'note -e' to edit existing notes
|
||||
|
||||
path="$HOME/notes"
|
||||
|
||||
[[ ! -d "$path" ]] && {
|
||||
echo "Empty"
|
||||
exit 0
|
||||
}
|
||||
|
||||
shopt -s nullglob
|
||||
files=("$path"/*.md)
|
||||
shopt -u nullglob
|
||||
|
||||
[[ "${#files}" -eq 0 ]] && {
|
||||
echo "Empty"
|
||||
exit 0
|
||||
}
|
||||
for file in "${files[@]}"; do
|
||||
echo "${file/$path\//}"
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Use 'note -e' to edit existing notes"
|
||||
3
scripts/utils/now
Executable file
3
scripts/utils/now
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
date '+%d.%m.%Y %H:%M:%S'
|
||||
13
scripts/utils/pasta
Executable file
13
scripts/utils/pasta
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
|
||||
if hash pbpaste 2>/dev/null; then
|
||||
exec pbpaste
|
||||
elif hash xclip 2>/dev/null; then
|
||||
exec xclip -selection clipboard -o
|
||||
elif [[ -e /tmp/clipboard ]]; then
|
||||
exec cat /tmp/clipboard
|
||||
else
|
||||
echo ''
|
||||
fi
|
||||
13
scripts/utils/running
Executable file
13
scripts/utils/running
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
|
||||
process_list="$(ps -eo 'pid command')"
|
||||
if [[ $# != 0 ]]; then
|
||||
process_list="$(echo "$process_list" | grep -Fiw "$@")"
|
||||
fi
|
||||
|
||||
echo "$process_list" |
|
||||
grep -Fv "${BASH_SOURCE[0]}" |
|
||||
grep -Fv grep |
|
||||
GREP_COLORS='mt=00;35' grep -E --colour=auto '^\s*[[:digit:]]+'
|
||||
8
scripts/utils/scratch
Executable file
8
scripts/utils/scratch
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
file="$(mktemp)"
|
||||
echo "Editing $file"
|
||||
exec "$EDITOR" "$file"
|
||||
25
scripts/utils/serve
Executable file
25
scripts/utils/serve
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
port='8888'
|
||||
if [ $# -eq 1 ]; then
|
||||
port="$1"
|
||||
fi
|
||||
|
||||
if hash php 2>/dev/null; then
|
||||
exec php -S "localhost:$port"
|
||||
elif hash python3 2>/dev/null; then
|
||||
exec python3 -m http.server "$port"
|
||||
elif hash python 2>/dev/null; then
|
||||
major_version="$(python -c 'import platform as p;print(p.python_version_tuple()[0])')"
|
||||
if [[ "$major_version" == '3' ]]; then
|
||||
exec python -m http.server "$port"
|
||||
else
|
||||
exec python -m SimpleHTTPServer "$port"
|
||||
fi
|
||||
else
|
||||
echo 'unable to start HTTP server' 1>&2
|
||||
exit 1
|
||||
fi
|
||||
11
scripts/utils/timer
Executable file
11
scripts/utils/timer
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
sleep "$1"
|
||||
notify-send 'Timer complete!' \
|
||||
-u normal \
|
||||
-t 10000 \
|
||||
-i clock \
|
||||
-a 'Timer script'
|
||||
6
scripts/utils/trash
Executable file
6
scripts/utils/trash
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
gio trash "$@"
|
||||
8
scripts/utils/tryna
Executable file
8
scripts/utils/tryna
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -u
|
||||
|
||||
"$@"
|
||||
while [[ ! "$?" -eq 0 ]]; do
|
||||
sleep 0.5
|
||||
"$@"
|
||||
done
|
||||
8
scripts/utils/trynafail
Executable file
8
scripts/utils/trynafail
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -u
|
||||
|
||||
"$@"
|
||||
while [[ "$?" -eq 0 ]]; do
|
||||
sleep 0.5
|
||||
"$@"
|
||||
done
|
||||
14
scripts/utils/url2md
Executable file
14
scripts/utils/url2md
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
curl "https://r.jina.ai/$1" \
|
||||
-sS \
|
||||
-H "DNT: 1" \
|
||||
-H "X-Base: final" \
|
||||
-H "X-Engine: direct" \
|
||||
-H "X-Md-Em-Delimiter: *" \
|
||||
-H "X-Md-Heading-Style: setext" \
|
||||
-H "X-Md-Link-Reference-Style: collapsed" \
|
||||
-H "X-Md-Link-Style: referenced"
|
||||
13
scripts/utils/waitfor
Executable file
13
scripts/utils/waitfor
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
if hash systemd-inhibit 2>/dev/null; then
|
||||
systemd-inhibit \
|
||||
--who=waitfor \
|
||||
--why="Awaiting PID $1" \
|
||||
tail --pid="$1" -f /dev/null
|
||||
else
|
||||
tail --pid="$1" -f /dev/null
|
||||
fi
|
||||
82
scripts/vscode-ext.sh
Normal file
82
scripts/vscode-ext.sh
Normal file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env bash
|
||||
exts=(
|
||||
'af4jm.vscode-m3u'
|
||||
'ahmadalli.vscode-nginx-conf'
|
||||
'akamud.vscode-theme-onedark'
|
||||
'anweber.statusbar-commands'
|
||||
'baincd.mini-command-palettes'
|
||||
'bmewburn.vscode-intelephense-client'
|
||||
'codezombiech.gitignore'
|
||||
'cweijan.vscode-redis-client'
|
||||
'darkriszty.markdown-table-prettify'
|
||||
'davidmarek.jsonpath-extract'
|
||||
'deitry.apt-source-list-syntax'
|
||||
'devsense.composer-php-vscode'
|
||||
'devsense.intelli-php-vscode'
|
||||
'devsense.phptools-vscode'
|
||||
'devsense.profiler-php-vscode'
|
||||
'dotjoshjohnson.xml'
|
||||
'dunstontc.vscode-go-syntax'
|
||||
'dustypomerleau.rust-syntax'
|
||||
'eamodio.gitlens'
|
||||
'editorconfig.editorconfig'
|
||||
'esbenp.prettier-vscode'
|
||||
'furkanozalp.go-syntax'
|
||||
'gigacode.gigacode-vscode'
|
||||
'golang.go'
|
||||
'grapecity.gc-excelviewer'
|
||||
'humao.rest-client'
|
||||
'irongeek.vscode-env'
|
||||
'jebbs.plantuml'
|
||||
'jeff-hykin.better-go-syntax'
|
||||
'jeppeandersen.vscode-kafka'
|
||||
'jflbr.jwt-decoder'
|
||||
'jinsihou.diff-tool'
|
||||
'jtr.vscode-position'
|
||||
'kenhowardpdx.vscode-gist'
|
||||
'leavesster.jsonpath'
|
||||
'mads-hartmann.bash-ide-vscode'
|
||||
'mamoru.vscode-fish-text'
|
||||
'mechatroner.rainbow-csv'
|
||||
'mehedidracula.php-namespace-resolver'
|
||||
'mhutchie.git-graph'
|
||||
'mrmlnc.vscode-apache'
|
||||
'ms-azuretools.vscode-docker'
|
||||
'ms-ceintl.vscode-language-pack-ru'
|
||||
'ms-vscode.hexeditor'
|
||||
'ms-vscode.makefile-tools'
|
||||
'neilbrayfield.php-docblocker'
|
||||
'neonxp.gotools'
|
||||
'nickdemayo.vscode-json-editor'
|
||||
'nico-castell.linux-desktop-file'
|
||||
'open-rpc.open-rpc'
|
||||
'pejmannikram.vscode-auto-scroll'
|
||||
'pkief.material-icon-theme'
|
||||
'qcz.text-power-tools'
|
||||
'rogalmic.bash-debug'
|
||||
'rust-lang.rust-analyzer'
|
||||
'ryu1kn.partial-diff'
|
||||
'srmeyers.git-prefix'
|
||||
'sumneko.lua'
|
||||
'syler.ignore'
|
||||
'takumii.markdowntable'
|
||||
'tamasfe.even-better-toml'
|
||||
'tyriar.lorem-ipsum'
|
||||
'vitorsalgado.vscode-redis'
|
||||
'waderyan.gitblame'
|
||||
'wayou.vscode-todo-highlight'
|
||||
'weijunyu.vscode-json-path'
|
||||
'xdebug.php-debug'
|
||||
'yinfei.luahelper'
|
||||
'yog.yog-plantuml-highlight'
|
||||
'yves.schema-tree'
|
||||
'yzane.markdown-pdf'
|
||||
'yzhang.markdown-all-in-one'
|
||||
'zgm.cuesheet'
|
||||
'zh9528.file-size'
|
||||
'zobo.php-intellisense'
|
||||
)
|
||||
|
||||
for ext in "$exts[@]"; do
|
||||
code --install-extension $ext
|
||||
done
|
||||
93
scripts/ytdlcue.sh
Normal file
93
scripts/ytdlcue.sh
Normal file
@@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env bash
|
||||
# CUE-sheet generator for youtube-dl
|
||||
|
||||
# Usage:
|
||||
# 0. Install 'jq' utility
|
||||
# 1. Download any audio file with metadata from YouTube or Youtube Music, e.g.
|
||||
# $ youtube-dl \
|
||||
# --extract-audio \
|
||||
# --audio-format flac \
|
||||
# --audio-quality 0 \
|
||||
# --format bestaudio \
|
||||
# --write-info-json \
|
||||
# --output "/tmp/ytm/%(playlist_title)s/%(channel)s - %(title)s.%(ext)s" \
|
||||
# https://www.youtube.com/watch?v=lVpDQnXz34M
|
||||
#
|
||||
# If audio file is already downloaded earlier then just fetch only its metadata:
|
||||
# $ youtube-dl \
|
||||
# --write-info-json \
|
||||
# --skip-download \
|
||||
# --output "/tmp/ytm/%(playlist_title)s/%(channel)s - %(title)s.%(ext)s" \
|
||||
# https://www.youtube.com/watch?v=lVpDQnXz34M
|
||||
#
|
||||
# 2. Audio and metadata files MUST be named exactly similar (except extenstion),
|
||||
# but it is not necessary to keep original names. Also they MUST be placed in
|
||||
# the same directory. Example:
|
||||
# /tmp/ytm/ABGT496.flac
|
||||
# /tmp/ytm/ABGT496.info.json
|
||||
#
|
||||
# 3. To create CUE file run ytdlcue with a path to audio file:
|
||||
# $ ytdlcue.sh /tmp/ytm/ABGT496.flac
|
||||
#
|
||||
# A new file will be created in the same directory:
|
||||
# /tmp/ytm/ABGT496.cue
|
||||
|
||||
installed() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
! installed 'jq' && {
|
||||
echo "ERROR: you need to install jq!"
|
||||
exit 1
|
||||
}
|
||||
|
||||
audio_path="$1" # path to audiofile
|
||||
audio_file=`basename "$audio_path"` # audiofile name with extension
|
||||
audio_name=${audio_file%.*} # audiofile name without extension
|
||||
audio_ext=${audio_file##*.} # audiofile name extension
|
||||
path="`dirname "$audio_path"`/$audio_name" # path to audiofile and its name without ext
|
||||
json_path="$path.info.json" # path to json file with metadata created by youtube-dl
|
||||
cue_path="$path.cue" # path to cue sheet to be generated
|
||||
|
||||
# echo -e "audio_path:\t$audio_path"
|
||||
# echo -e "audio_file:\t$audio_file"
|
||||
# echo -e "audio_name:\t$audio_name"
|
||||
# echo -e "audio_ext:\t$audio_ext"
|
||||
# echo -e "path:\t\t$path"
|
||||
# echo -e "json_path:\t$json_path"
|
||||
# echo -e "cue_path:\t$cue_path"
|
||||
|
||||
[ ! -f "$audio_path" ] && {
|
||||
echo "ERROR: File not found: $audio_path"
|
||||
exit 2
|
||||
}
|
||||
[ ! -f "$json_path" ] && {
|
||||
echo "ERROR: File not found: $json_path"
|
||||
exit 3
|
||||
}
|
||||
|
||||
echo "PERFORMER `cat "$json_path" | jq -Mc '.channel'`" > "$cue_path"
|
||||
echo "TITLE `cat "$json_path" | jq -Mc '.title'`" >> "$cue_path"
|
||||
echo "FILE \"$audio_file\" ${audio_ext^^}" >> "$cue_path"
|
||||
|
||||
counter=1 # track counter (works only inside loop!)
|
||||
cat "$json_path" | jq -Mc '.chapters[]' \
|
||||
| while read chapter; do
|
||||
number=`printf %0.2d $counter` # pad current counter with zeros
|
||||
time=`echo "$chapter" | jq -Mc '.start_time'` # get initial start time in seconds
|
||||
time=`printf '%0.2d:%0.2d:00' $((time/60)) $((time%60))` # convert start time to minutes:seconds
|
||||
title=`echo "$chapter" | jq -Mc '.title' | sed -r "s#[\"]##g"` # get initial chapter title
|
||||
performer=`echo "$title" | cut -d "-" -f 1 | sed 's#^[[:space:]]*##g' | sed 's# *$##g'` # get and trim chapter's performer (before '-')
|
||||
title2=`echo "$title" | cut -d "-" -f 2 | sed 's#^[[:space:]]*##g' | sed 's# *$##g'` # get and trim chapter's title (after '-')
|
||||
#TODO: what if dash is not delimiter between performer and title?
|
||||
#TODO: take $title2 if $performer and (or?) $title2 are empty
|
||||
|
||||
printf "%-2sTRACK $number AUDIO\n" >> "$cue_path"
|
||||
printf "%-4sPERFORMER \"$performer\"\n" >> "$cue_path"
|
||||
printf "%-4sTITLE \"$title2\"\n" >> "$cue_path"
|
||||
printf "%-4sINDEX 01 $time\n" >> "$cue_path"
|
||||
|
||||
counter=`expr $counter + 1` # increase counter
|
||||
done
|
||||
echo "Done! Cue file:"
|
||||
echo "$cue_path"
|
||||
Reference in New Issue
Block a user