mirror of
https://github.com/ohmyzsh/ohmyzsh.git
synced 2026-01-30 02:44:42 +01:00
updates
This commit is contained in:
parent
e47ef872e3
commit
a452b7a9f1
15 changed files with 768 additions and 3574 deletions
|
|
@ -1 +1 @@
|
|||
/Users/peter/.gitconfig
|
||||
/Users/peter/Dropbox/git-config-files/.gitconfig
|
||||
|
|
@ -1 +0,0 @@
|
|||
/Users/peter/.p10k.zsh
|
||||
1641
.p10k.zsh.bak
1641
.p10k.zsh.bak
File diff suppressed because it is too large
Load diff
1730
.p10k.zsh~
1730
.p10k.zsh~
File diff suppressed because it is too large
Load diff
49
.vscode/settings.json
vendored
49
.vscode/settings.json
vendored
|
|
@ -1,26 +1,27 @@
|
|||
{
|
||||
"python.testing.promptToConfigure": false,
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.nosetestsEnabled": false,
|
||||
"workbench.colorCustomizations": {
|
||||
"activityBar.activeBackground": "#fbed80",
|
||||
"activityBar.activeBorder": "#06b9a5",
|
||||
"activityBar.background": "#fbed80",
|
||||
"activityBar.foreground": "#15202b",
|
||||
"activityBar.inactiveForeground": "#15202b99",
|
||||
"activityBarBadge.background": "#06b9a5",
|
||||
"activityBarBadge.foreground": "#15202b",
|
||||
"sash.hoverBorder": "#fbed80",
|
||||
"statusBar.background": "#f9e64f",
|
||||
"statusBar.foreground": "#15202b",
|
||||
"statusBarItem.hoverBackground": "#f7df1e",
|
||||
"statusBarItem.remoteBackground": "#f9e64f",
|
||||
"statusBarItem.remoteForeground": "#15202b",
|
||||
"titleBar.activeBackground": "#f9e64f",
|
||||
"titleBar.activeForeground": "#15202b",
|
||||
"titleBar.inactiveBackground": "#f9e64f99",
|
||||
"titleBar.inactiveForeground": "#15202b99"
|
||||
},
|
||||
"peacock.color": "#f9e64f"
|
||||
"python.testing.promptToConfigure": false,
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.nosetestsEnabled": false,
|
||||
"workbench.colorCustomizations": {
|
||||
"activityBar.activeBackground": "#fbed80",
|
||||
"activityBar.activeBorder": "#06b9a5",
|
||||
"activityBar.background": "#fbed80",
|
||||
"activityBar.foreground": "#15202b",
|
||||
"activityBar.inactiveForeground": "#15202b99",
|
||||
"activityBarBadge.background": "#06b9a5",
|
||||
"activityBarBadge.foreground": "#15202b",
|
||||
"sash.hoverBorder": "#fbed80",
|
||||
"statusBar.background": "#f9e64f",
|
||||
"statusBar.foreground": "#15202b",
|
||||
"statusBarItem.hoverBackground": "#f7df1e",
|
||||
"statusBarItem.remoteBackground": "#f9e64f",
|
||||
"statusBarItem.remoteForeground": "#15202b",
|
||||
"titleBar.activeBackground": "#f9e64f",
|
||||
"titleBar.activeForeground": "#15202b",
|
||||
"titleBar.inactiveBackground": "#f9e64f99",
|
||||
"titleBar.inactiveForeground": "#15202b99",
|
||||
"commandCenter.border": "#15202b99"
|
||||
},
|
||||
"peacock.color": "#f9e64f"
|
||||
}
|
||||
|
|
|
|||
28
alias-ls.sh
28
alias-ls.sh
|
|
@ -17,25 +17,27 @@
|
|||
|
||||
# NEW SKOOL LS ALIASES
|
||||
######################
|
||||
# TODO alias lgr="exa $COMMON_EXA_PARAMS | grep -Ei 'Permissions Size|;'
|
||||
COMMON_EXA_PARAMS=" --long --header --icons --git --all "
|
||||
alias l="exa $COMMON_EXA_PARAMS --group --group-directories-first --time-style long-iso --git --git-ignore"
|
||||
alias ls1="exa $COMMON_EXA_PARAMS --oneline --group-directories-first"
|
||||
alias ls-tree="exa $COMMON_EXA_PARAMS --tree"
|
||||
alias lrt="exa $COMMON_EXA_PARAMS --sort newest"
|
||||
alias lsd="exa $COMMON_EXA_PARAMS --only-dirs"
|
||||
# TODO alias lgr="eza $COMMON_EZA_PARAMS | grep -Ei 'Permissions Size|;'
|
||||
COMMON_EZA_PARAMS=" --long --header --icons --git --all --time-style long-iso --no-quotes"
|
||||
alias l="eza $COMMON_EZA_PARAMS --group --group-directories-first --time-style long-iso --git "
|
||||
alias ls1="eza $COMMON_EZA_PARAMS --oneline --group-directories-first"
|
||||
alias ls-tree="eza $COMMON_EZA_PARAMS --tree"
|
||||
alias tree-ls="eza $COMMON_EZA_PARAMS --tree"
|
||||
alias lrt="eza $COMMON_EZA_PARAMS --sort newest"
|
||||
alias lsd="eza $COMMON_EZA_PARAMS --only-dirs"
|
||||
|
||||
# PSK 07-09-2022 undoing exa as it's hanging for ages on attached volumes
|
||||
alias exa-default="exa $COMMON_EXA_PARAMS"
|
||||
alias l="exa-default "
|
||||
alias lf="exa-default | grep -v /" # list files only
|
||||
alias lgr='l | grep -i ' # ls grep
|
||||
# PSK 07-09-2022 undoing eza as it's hanging for ages on attached volumes
|
||||
alias eza-default="eza $COMMON_EZA_PARAMS"
|
||||
# alias l="eza-default "
|
||||
alias ls="eza"
|
||||
alias lf="eza-default | grep -v /" # list files only
|
||||
alias lgr='l | grep -i ' # ls grep
|
||||
alias lhx="ls ~"
|
||||
alias list-aliases=n-aliases # wot about my aliases?
|
||||
alias list-functions=n-functions # wot about my functions?
|
||||
alias list-javas="l ~/.jenv/versions" # where the fuck is my javas?
|
||||
alias list-themes="cat ${HOME}/.zsh_favlist" # oh-my-zsh stuff
|
||||
alias ll="exa-default -t modified | tail -1" # list last file
|
||||
alias ll="eza-default -t modified | tail -1" # list last file
|
||||
alias lla='ls -lat'
|
||||
alias llf="clf"
|
||||
alias lock="/System/Library/CoreServices/Menu\ Extras/User.menu/Contents/Resources/CGSession -suspend"
|
||||
|
|
|
|||
79
aliases.sh
79
aliases.sh
|
|
@ -1,59 +1,93 @@
|
|||
###############################################################################
|
||||
# PSK's aliases. All of 'em. Well, nearly all.
|
||||
###############################################################################
|
||||
|
||||
source $HOME/.oh-my-zsh/alias-ls.sh
|
||||
# source $HOME/.oh-my-zsh/alias-sonos.sh
|
||||
|
||||
alias activate-venv=". ./venv/bin/activate"
|
||||
alias ag="anyguard" # command that shows a light whilst command is running then green/red
|
||||
alias aliases-n=n-aliases
|
||||
alias aliases=n-aliases
|
||||
alias all-aliases=n-aliases
|
||||
alias any="anyguard" # command that shows a light whilst command is running then green/red
|
||||
alias battery-info="pmset -g batt"
|
||||
alias bmake="today-time && make "
|
||||
alias brew-x86="/usr/local/homebrew/bin/brew"
|
||||
alias cerebro="/usr/local/cerebro-0.9.4/bin/cerebro"
|
||||
alias clf="exa-default -t modified | tail -1 | xargs less" # cat the last file
|
||||
alias code="/Applications/Visual\ Studio\ Code.app/Contents/Resources/app/bin/code"
|
||||
alias cf="copyfile"
|
||||
alias clf="eza-default -t modified | tail -1 | xargs less" # cat the last file
|
||||
alias cpwd="copypath"
|
||||
alias crontab="EDITOR=vi \crontab" # This is because code editing crontab doesn't work
|
||||
alias denv="ddp"
|
||||
alias dock-toggle="osascript -e 'tell application \"System Events\" to set autohide of dock preferences to not (autohide of dock preferences)'"
|
||||
alias egr='env | sort | grep -i '
|
||||
alias envgr='env | sort | grep -i '
|
||||
alias errorsx="^FATA|^ERR|^WAR|^FAIL"
|
||||
alias errx="^FATA|^ERR|^WAR|^FAIL"
|
||||
alias fdd='\fd -t d -uu -i -L ' # find directories
|
||||
alias fdda="fd -t d -H -uu -i --no-ignore-vcs -I -L " # find directories in horrible places
|
||||
alias fdir='\fd -t d -uu -i -L ' # find directories
|
||||
alias ff='fd -t f -uu -i -L ' # find files
|
||||
alias ffa="fd -t f -H -uu -i --no-ignore-vcs -I -L " # find files in horrible places
|
||||
alias es="env|sort | bat"
|
||||
alias fdd-exact='\fd --type d --exclude "__pycache__" --glob --max-depth 7' # don't use regex as is the default
|
||||
alias fdd='\fd --type d --ignore-case --exclude "__pycache__" --max-depth 7' # find directories
|
||||
alias fdda-exact='\fd --type d -H -uu --exclude "__pycache__" --no-ignore-vcs --glob --max-depth 7' # don't use regex as is the default
|
||||
alias fdda="fd --type d -H -uu --ignore-case --no-ignore-vcs -I -L --max-depth 7" # find directories in horrible places
|
||||
alias fdir='\fd --type d -uu --ignore-case --exclude "__pycache__" --max-depth 7' # find directories
|
||||
alias ff-exact='\fd --type f --exclude "__pycache__" --glob --max-depth 7' # don't use regex as is the default
|
||||
alias ff='\fd --type f --ignore-case --exclude "__pycache__" --max-depth 7' # find files
|
||||
alias ffa-exact='\fd --type f -H -uu --exclude "__pycache__" --no-ignore-vcs --glob --max-depth 7' # don't use regex as is the default
|
||||
alias ffa="\fd --type f -H -uu --ignore-case --no-ignore-vcs -I --max-depth 7" # find files in horrible places
|
||||
alias fin="echo -n "blue" | nc -4u -w0 localhost 1738"
|
||||
alias flush-dns="flush"
|
||||
alias flush="sudo dscacheutil -flushcache; sudo killall -HUP mDNSResponder"
|
||||
alias ftp-DS="sftp -P 699 pskadmin@ds718-psk.synology.me"
|
||||
alias functions-n=n-functions
|
||||
alias g="google"
|
||||
alias fzf="\fzf --preview 'bat --color=always --style=numbers --line-range=:500 {}'"
|
||||
alias g="google-search"
|
||||
alias yt="youtube-search"
|
||||
alias az="amazon-search"
|
||||
alias gfind-filename=gfindf
|
||||
alias gfind-matches-only="gfind-only-matching-files"
|
||||
alias gfinda-matches-only="gfind-all-only-matching-files"
|
||||
alias gfinda="gfind-all"
|
||||
alias ggm="cd ~\/src\/rune\/go-mono"
|
||||
alias ghub-code='ghub browse -- tree/main'
|
||||
alias ghub-pr='ghub browse -- pulls'
|
||||
alias ghub='\hub '
|
||||
alias gi="git-info"
|
||||
alias git-info='echo "BRANCH ———————" && git branch -vv && echo "\nREMOTE———————" && git remote -vv'
|
||||
alias go-go-mono="cd ~\/src\/rune\/go-mono"
|
||||
alias git-reset='git checkout HEAD -- '
|
||||
alias go-omz="cd ~\/.oh-my-zsh"
|
||||
alias go-tortilla="cd ~\/src\/rune\/go-mono\/tortilla"
|
||||
alias gomz="cd ~\/.oh-my-zsh"
|
||||
alias goomz="cd ~\/.oh-my-zsh"
|
||||
alias gpm="cd ~\/src\/rune\/python-mono"
|
||||
alias grad="cd ~\/src\/rune\/python-mono\/radish"
|
||||
alias gtor="cd ~\/src\/rune\/go-mono\/tortilla"
|
||||
alias gtx="cd ~\/src\/rune\/go-mono\/tortilla"
|
||||
alias grep-nf="grep --color=always --ignore-case --exclude-dir={.bzr,CVS,.git,.hg,.svn,.idea,.tox}"
|
||||
alias help="tldr"
|
||||
alias hgr="history -500 | grep -i "
|
||||
alias hub-code='hub browse -- tree/main'
|
||||
alias hub-pr='hub browse -- pulls'
|
||||
alias list-aliases=n-aliases # wot about my aliases?
|
||||
alias list-functions=n-functions # wot about my functions?
|
||||
alias info-battery="battery-info"
|
||||
alias ip-address="what-is-my-ip"
|
||||
alias ip="what-is-my-ip"
|
||||
alias list-aliases=n-aliases # wot about my aliases?
|
||||
alias list-all-widgets="zle -al"
|
||||
alias list-functions=n-functions # wot about my functions?
|
||||
alias list-shell-widgets="list-all-widgets"
|
||||
alias list-themes="cat ${HOME}/.zsh_favlist" # oh-my-zsh stuff
|
||||
alias lock="/System/Library/CoreServices/Menu\ Extras/User.menu/Contents/Resources/CGSession -suspend"
|
||||
source $HOME/.oh-my-zsh/alias-ls.sh
|
||||
alias lock="/System/Library/CoreServices/Menu\ Extras/User.menu/Contents/Resources/CGSession -susp end"
|
||||
alias lz='lazygit'
|
||||
alias maketi="today-time&& make test-integration 2>&1 | tee .\/make-local.log ; docker-stop-all-containers >\/dev\/null &"
|
||||
alias maketil="make-big-break&& make test-integration-local 2>&1 | tee .\/make-local.log ; docker-stop-all-containers >\/dev\/null &"
|
||||
alias mdf=mdfind-current-dir
|
||||
alias mdfh=mdfind-home-dir
|
||||
alias mdfgh=mdfind-grep-home-dir # mdfind grep starting in $HOME
|
||||
alias mdfhg=mdfgh
|
||||
alias mdfg=mdfind-grep-current-dir # mdfind grep starting in .
|
||||
alias mmake="today-time && make "
|
||||
alias mount-DS718="nohup /Users/peter/bin/mount-DS718.sh 2>/dev/null &"
|
||||
alias mti="today-time&& make test-integration 2>&1 | tee .\/make-local.log ; docker-stop-all-containers >\/dev\/null &"
|
||||
alias mtil="make-big-break&& make test-integration-local 2>&1 | tee .\/make-local.log ; docker-stop-all-containers >\/dev\/null &"
|
||||
alias my-ip="what-is-my-ip"
|
||||
alias o="open ."
|
||||
alias ohmyzsh="vscode ~/.oh-my-zsh"
|
||||
alias old-brew="/usr/local/homebrew/bin/brew"
|
||||
alias q='echo you are not in pdb, ya twit' # This is cos I keep on hitting q to exit pdb when I've already exited and just want to repeat the container run
|
||||
alias s="/Users/peter/.Sidekick/s-macos" # Sidekick is a ChatGPT ... clone? It sits in VS Code usually
|
||||
alias set-timer="termdown"
|
||||
alias sftp-DS="sftp -P 699 pskadmin@ds718-psk.synology.me"
|
||||
alias slate="code ~/.slate.js"
|
||||
|
|
@ -64,10 +98,13 @@ alias stopwatch="termdown"
|
|||
alias sudo="nocorrect sudo"
|
||||
alias t="tmux"
|
||||
alias timer="termdown"
|
||||
alias tldr-cool="cht.sh"
|
||||
alias vscode="/Applications/Visual\ Studio\ Code.app/Contents/Resources/app/bin/code"
|
||||
alias wtf="tldr"
|
||||
alias wts="tldr"
|
||||
alias yoink="open -a Yoink"
|
||||
alias z="vscode ~/.zshrc"
|
||||
# alias ls="exa --header --git -1 --long"
|
||||
# alias ls="eza --header --git -1 --long"
|
||||
# WARNING: these might balls things up
|
||||
# alias cat="bat"
|
||||
# alias man="batman"
|
||||
|
|
|
|||
|
|
@ -1,43 +0,0 @@
|
|||
|
||||
function run_cicd_test_containers() {
|
||||
echo "Starting local CI/CD tests at `date`"
|
||||
echo "Running bandit..."
|
||||
docker-compose run --no-deps --rm clever-sync-api python -m bandit -r . -x /tests/
|
||||
RES2=$?
|
||||
echo "Running black..."
|
||||
docker-compose run --no-deps --rm clever-sync-api black . --check --line-length=120
|
||||
RES4=$?
|
||||
echo "Running flake8..."
|
||||
docker-compose run --no-deps --rm clever-sync-api flake8 --config /app/dev-tools-config/.flake8 .
|
||||
RES3=$?
|
||||
# echo "Running pylint..."
|
||||
# docker-compose run --no-deps --rm clever-sync-api pylint --rcfile /app/dev-tools-config/.pylintrc classkick
|
||||
# RES2=$?
|
||||
echo "Running isort..."
|
||||
docker-compose run --no-deps --rm clever-sync-api isort . --profile black --line-length=120 --check
|
||||
RES1=$?
|
||||
echo "Running coverage..."
|
||||
docker-compose run --no-deps --rm clever-sync-api pytest --cov .
|
||||
RES5=$?
|
||||
echo "Finished local CI/CD tests at `date`"
|
||||
|
||||
if [[ $RES1 -ne 0 || $RES2 -ne 0 || $RES3 -ne 0 || $RES4 -ne 0 || $RES5 -ne 0 ]]
|
||||
then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function run_cicd_fix_containers() {
|
||||
echo "Starting CI/CD containers in 'fix mode' at `date`."
|
||||
echo "Are you sure you want to continue?"
|
||||
read
|
||||
|
||||
echo "Running black..."
|
||||
docker-compose run --no-deps --rm clever-sync-api black . --line-length=120
|
||||
RES4=$?
|
||||
echo "Running isort..."
|
||||
docker-compose run --no-deps --rm clever-sync-api isort . --profile black --line-length=120
|
||||
RES1=$?
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
function git-diff-subdirs() {
|
||||
for DIR in */; do
|
||||
pushd $DIR
|
||||
echo "Git diffing $PWD"
|
||||
git --no-pager diff -- ':!*poetry.lock'
|
||||
popd
|
||||
done
|
||||
}
|
||||
|
|
@ -2,15 +2,19 @@
|
|||
### DOCKER UTIL
|
||||
###
|
||||
|
||||
alias dc='docker-compose '
|
||||
alias ddp="echo '\$DOCKER_DEFAULT_PLATFORM': $DOCKER_DEFAULT_PLATFORM"
|
||||
alias dk="docker"
|
||||
alias docker-killall="killall com.docker.hyperkit"
|
||||
alias killall-docker="killall com.docker.hyperkit"
|
||||
|
||||
alias dps="docker ps"
|
||||
alias dl="docker logs "
|
||||
alias dlf="docker logs -f "
|
||||
alias dkl="dl"
|
||||
alias dlf="dlf"
|
||||
alias docker-killall="killall com.docker.hyperkit"
|
||||
alias dps="docker ps"
|
||||
alias dkps="dps"
|
||||
alias killall-docker="killall com.docker.hyperkit"
|
||||
alias dkls="dk container ls"
|
||||
|
||||
# stop all containers
|
||||
function docker-stop-all-containers () {
|
||||
echo "Stopping all containers..." ; docker container stop -t 2 $(docker container ls -q) 2>/dev/null ; echo ""
|
||||
}
|
||||
|
|
@ -19,6 +23,19 @@ function docker-lsg () {
|
|||
docker image ls | grep -Ei "'IMAGE ID'|$1"
|
||||
}
|
||||
|
||||
|
||||
# kill most recent container instance
|
||||
alias docker-kill-latest='docker ps -l --format="{{.Names}}" | xargs docker kill'
|
||||
alias docker-kill-last='docker-kill-latest'
|
||||
alias docker-kill-most-recent='docker-kill-latest'
|
||||
|
||||
function docker-ls-grep () {
|
||||
docker image ls | grep -Ei "'IMAGE ID'|$1"
|
||||
}
|
||||
|
||||
|
||||
alias dkl='docker-ls-grep'
|
||||
|
||||
# kill most recent container instance
|
||||
alias docker-kill-latest='docker ps -l --format="{{.Names}}" | xargs docker kill'
|
||||
alias kill-docker-latest=docker-kill-latest
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ alias 9='cd -9'
|
|||
alias md='mkdir -p'
|
||||
alias rd=rmdir
|
||||
|
||||
function d () {
|
||||
function d() {
|
||||
if [[ -n $1 ]]; then
|
||||
dirs "$@"
|
||||
else
|
||||
|
|
@ -35,4 +35,5 @@ compdef _dirs d
|
|||
alias lsa='ls -lah'
|
||||
alias l='ls -lah'
|
||||
alias ll='ls -lh'
|
||||
alias la='ls -lAh'
|
||||
# Note that if I switch back to native ls from eza, change the a to an A
|
||||
alias la='ls -lah'
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
source ~/.oh-my-zsh/classkick-functions.zsh
|
||||
source ~/.oh-my-zsh/work-functions.zsh
|
||||
|
||||
function zsh_stats() {
|
||||
fc -l 1 \
|
||||
|
|
@ -269,24 +269,31 @@ function omz_urldecode {
|
|||
##################################
|
||||
# ls grep
|
||||
lsg() {
|
||||
la | grep -iE "$1"
|
||||
la | grep --color=always -iE "$1"
|
||||
}
|
||||
|
||||
function alg() {
|
||||
beenv() {
|
||||
echo "\$MYSQL_USER=$MYSQL_USER"
|
||||
echo "\$MYSQL_PASSWORD=$MYSQL_PASSWORD"
|
||||
echo "\$MYSQL_HOST_IP=$MYSQL_HOST_IP"
|
||||
echo "\$PYTHONPATH=$PYTHONPATH"
|
||||
echo "\$TEST_DB=$TEST_DB"
|
||||
}
|
||||
|
||||
alg () {
|
||||
FN=/tmp/alg.$$
|
||||
echo -e "\nAliases ———————" > $FN
|
||||
alias | grep -i $1 >> $FN
|
||||
alias | grep --color=always --ignore-case -h --exclude-dir={.bzr,CVS,.git,.hg,.svn,.idea,.tox} -i $1 >> $FN
|
||||
echo -e "\nFunctions ———————" >> $FN
|
||||
functions | grep -i $1 >> $FN
|
||||
bat $FN
|
||||
functions | grep --color=always --ignore-case -h --exclude-dir={.bzr,CVS,.git,.hg,.svn,.idea,.tox} -i $1 >> $FN
|
||||
less $FN
|
||||
rm -f $FN
|
||||
}
|
||||
|
||||
alias agr="alg"
|
||||
alias alias-grep="alg"
|
||||
|
||||
# These need to be here since they're required by gfind*
|
||||
alias ag-no-pager="/opt/homebrew/bin/ag --ignore '*.svg' --ignore '*.xlt' --ignore '*.tsx' --ignore '*.js' --ignore '*.snap' --ignore '*.json' --ignore '*.dat' --ignore '*.builds' --ignore '*.tsv' --ignore '*.csv' --ignore '*.lock' --ignore '*.patch' --ignore '*.sum'"
|
||||
alias ag-no-pager="/opt/homebrew/bin/ag --ignore '*.snap' --ignore '*.dat' --ignore '*.builds' --ignore '*.tsv' --ignore '*.csv' --ignore '*.lock' --ignore '*.patch' --ignore '*.sum' --ignore-dir={venv,node_modules,mounts}"
|
||||
alias ag="ag-no-pager --pager=bat"
|
||||
alias "git-grep"="git \grep"
|
||||
|
||||
|
|
@ -318,6 +325,7 @@ function today-time() {
|
|||
}
|
||||
|
||||
alias make-big-break=page-break
|
||||
alias space=page-break
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind-all() {
|
||||
|
|
@ -329,34 +337,35 @@ function gfind-all() {
|
|||
ag-no-pager --ignore-case --hidden --ignore-case --pager bat "$@"
|
||||
}
|
||||
|
||||
function gfind-all-only-matching-files() {
|
||||
ag-no-pager --ignore-case --hidden --ignore-case --pager bat --count "$@"
|
||||
}
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind() {
|
||||
# fd -t f . -x grep --color=always -Hi ${1}
|
||||
ag-no-pager --ignore-case --pager bat "$@"
|
||||
}
|
||||
|
||||
# Print out the matches only
|
||||
function gfindf() {
|
||||
ack -l $1 --pager=bat --color
|
||||
function gfind-only-matching-files() {
|
||||
ag-no-pager --ignore-case --pager bat --count "$@"
|
||||
}
|
||||
|
||||
# function h() {
|
||||
# NUM_LINES = ${1:-1000}
|
||||
# history | tail -n $NUM_LINES
|
||||
# }
|
||||
# Print out the matches only
|
||||
function gfindf() {
|
||||
ack -l $1 --pager=less --color
|
||||
}
|
||||
|
||||
# function h() {
|
||||
# set -x
|
||||
# NUM_LINES = ${1:-25}
|
||||
# \history -${NUM_LINES}
|
||||
# }
|
||||
batdiff() {
|
||||
git diff --name-only --diff-filter=d | xargs bat --diff
|
||||
}
|
||||
|
||||
function agl() {
|
||||
ag --pager less "$@"
|
||||
ag --pager bat "$@"
|
||||
}
|
||||
|
||||
function lsofgr() {
|
||||
sudo lsof -i -P | grep -E "$1|LISTEN" | grep -E "$1|:"
|
||||
sudo lsof -i -P | grep -E "$1|COMMAND" | grep -E "$1|:"
|
||||
}
|
||||
|
||||
function kill-em-all() {
|
||||
|
|
@ -396,27 +405,60 @@ alias cls=clean-slate
|
|||
function print-hashes() {
|
||||
repeat $1 echo -n "#" ; echo
|
||||
}
|
||||
|
||||
function h() {
|
||||
print-hashes 60
|
||||
NUM_LINES=$1
|
||||
if [ -z "$NUM_LINES" ]; then
|
||||
NUM_LINES=35
|
||||
fi
|
||||
\history -$NUM_LINES
|
||||
print-hashes 60
|
||||
# check if we passed any parameters
|
||||
if [ -z "$*" ]; then
|
||||
# if no parameters were passed print entire history
|
||||
print-hashes 60
|
||||
history -200
|
||||
print-hashes 60
|
||||
else
|
||||
# if words were passed use it as a search
|
||||
history 1 | egrep --color=auto "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
function psgr() {
|
||||
ps -e | grep -v 'grep ' | grep -iE "TIME CMD|$1"
|
||||
# function h() {
|
||||
# print-hashes 60
|
||||
# NUM_LINES=$1
|
||||
# if [ -z "$NUM_LINES" ]; then
|
||||
# NUM_LINES=35
|
||||
# fi
|
||||
# history -$NUM_LINES
|
||||
# print-hashes 60
|
||||
# }
|
||||
|
||||
|
||||
function psgr () {
|
||||
# echo " UID PID PPID C STIME TTY TIME CMD"
|
||||
# ps -h -A -o %cpu,%mem,command,cputime,state,user,pid,ppid,ruser,start,time,tt,tty | \grep -v 'grep ' | awk '{$1=$1};1' | \grep -iE "$1"
|
||||
ps -ef | \grep -v 'grep ' | awk '{$1=$1};1' | \grep -iE "$1|PPID"
|
||||
}
|
||||
|
||||
# Sort on the command
|
||||
function psgr-sorted() {
|
||||
echo " PID TTY TIME CMD"
|
||||
ps -e | grep -v 'grep ' | grep -iE "$1" | sort -k 4
|
||||
echo " UID PID PPID C STIME TTY TIME CMD"
|
||||
ps -ef | grep -v 'grep ' | awk '{$1=$1};1' | grep -iE "$1" | sort -k 4
|
||||
}
|
||||
|
||||
psgr-pid-only () {
|
||||
ps -e | \grep -v 'grep ' | awk '{$1=$1};1' | \grep -iE "$1" | cut -f 1 -d" "
|
||||
}
|
||||
|
||||
kill-em-all () {
|
||||
PROCESS_NAME=$1
|
||||
echo "This will absolutely kill all processes matching *$PROCESS_NAME*. Are you sure?"
|
||||
read -n 1
|
||||
psgr-pid-only $PROCESS_NAME|cut -f 1 -d" "| tr '\n' ' ' | xargs kill -9
|
||||
RES=$?
|
||||
echo "RES: • ${RES} •"
|
||||
}
|
||||
|
||||
function list-app-url-schemes() {
|
||||
defaults read /Applications/${1}.app/Contents/Info.plist CFBundleURLTypes
|
||||
}
|
||||
alias url-schemes-of-app=list-app-url-schemes
|
||||
|
||||
function lsofgr-listen() {
|
||||
echo "Searching for processes listening on port $1..."
|
||||
#echo "ℹ️ lsof can take up to 2 minutes to complete"
|
||||
|
|
@ -435,6 +477,11 @@ function zshrc() {
|
|||
popd
|
||||
}
|
||||
|
||||
|
||||
function what-is-my-ip() {
|
||||
ifconfig en0 | awk -v OFS="\n" '{ print $1 " " $2, $NF }' | grep "inet 192"
|
||||
}
|
||||
|
||||
function dir-sizes() {
|
||||
du -sh ./* | sort -h
|
||||
}
|
||||
|
|
@ -460,11 +507,23 @@ function git-show-branch() {
|
|||
git branch -vv | grep `git branch --show-current`
|
||||
}
|
||||
|
||||
function git-show-stash() {
|
||||
git stash show stash@{$1}
|
||||
}
|
||||
alias gss="git-show-stash"
|
||||
|
||||
function git-apply-stash() {
|
||||
git stash apply stash@{$1}
|
||||
}
|
||||
alias gas="git-apply-stash"
|
||||
alias gsa="git-apply-stash"
|
||||
|
||||
function git-show-all-stashes() {
|
||||
echo "Hit 'q' to go to next file"
|
||||
echo ""
|
||||
git stash list | awk -F: '{ print "\n\n\n\n"; print $0; print "\n\n"; system("git stash show -p " $1); }'
|
||||
}
|
||||
alias gsas="git-show-all-stashes"
|
||||
|
||||
# Check whether the supplied file is under SCM/git.
|
||||
# Check whether the supplied file is under SCM/git.
|
||||
|
|
@ -472,18 +531,6 @@ function git-status() {
|
|||
git ls-files | grep $1
|
||||
}
|
||||
|
||||
# kill most recent container instance
|
||||
alias docker-kill-latest='docker ps -l --format="{{.Names}}" | xargs docker kill'
|
||||
|
||||
# stop all containers
|
||||
function docker-stop-all-containers () {
|
||||
docker container stop -t 2 $(docker container ls -q) 2>/dev/null ; echo ""
|
||||
}
|
||||
|
||||
function docker-lsg () {
|
||||
docker image ls | grep -Ei "'IMAGE ID'|$1"
|
||||
}
|
||||
|
||||
function find-gig-files() {
|
||||
find . -size +1G -ls | sort -k7n # Find files larger than 1GB and then order the list by the file size
|
||||
}
|
||||
|
|
@ -528,42 +575,8 @@ function open-job-docs() {
|
|||
open 'https://docs.google.com/document/d/1gPNcLjrZJnJnWy0-k5SqpgP4VAUZ_ikRLR9qYEB50M0/edit'
|
||||
}
|
||||
|
||||
goclean() {
|
||||
local pkg=$1; shift || return 1
|
||||
local ost
|
||||
local cnt
|
||||
local scr
|
||||
|
||||
# Clean removes object files from package source directories (ignore error)
|
||||
go clean -i $pkg &>/dev/null
|
||||
|
||||
# Set local variables
|
||||
[[ "$(uname -m)" == "x86_64" ]] \
|
||||
&& ost="$(uname)";ost="${ost,,}_amd64" \
|
||||
&& cnt="${pkg//[^\/]}"
|
||||
|
||||
# Delete the source directory and compiled package directory(ies)
|
||||
if (("${#cnt}" == "2")); then
|
||||
rm -rf "${GOPATH%%:*}/src/${pkg%/*}"
|
||||
rm -rf "${GOPATH%%:*}/pkg/${ost}/${pkg%/*}"
|
||||
elif (("${#cnt}" > "2")); then
|
||||
rm -rf "${GOPATH%%:*}/src/${pkg%/*/*}"
|
||||
rm -rf "${GOPATH%%:*}/pkg/${ost}/${pkg%/*/*}"
|
||||
fi
|
||||
}
|
||||
|
||||
function _open-all-chrome-apps() {
|
||||
for APP in "${1}"/*.app; do
|
||||
echo "Opening $APP ..."
|
||||
nohup open -a "$APP" > /dev/null 2>&1 &
|
||||
done
|
||||
}
|
||||
|
||||
function open-all-chrome-apps() {
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome Apps/Chrome Apps.localized'
|
||||
_open-all-chrome-apps $CHROME_APP_DIR
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome/Chrome Apps/Chrome Apps.localized'
|
||||
_open-all-chrome-apps $CHROME_APP_DIR
|
||||
/Users/peter/.oh-my-zsh/bin/launch-browser-apps.sh & 2>&1 > /dev/null
|
||||
}
|
||||
|
||||
function post-boot-tasks() {
|
||||
|
|
@ -581,6 +594,39 @@ function kill-cloud-storage() {
|
|||
killall -v "FinderSyncExtension" -SIGKILL &
|
||||
}
|
||||
|
||||
function mdfind() {
|
||||
/usr/bin/mdfind $@ 2> >(grep --invert-match ' \[UserQueryParser\] ' >&2)
|
||||
}
|
||||
|
||||
function mdfind-current-dir() { # find file in .
|
||||
mdfind -onlyin . -name $1
|
||||
}
|
||||
|
||||
function mdfind-home-dir() { # mdfind file in $HOME
|
||||
mdfind -onlyin $HOME -name $1
|
||||
}
|
||||
|
||||
function mdfind-grep-current-dir() { # mdfind grep in current dir
|
||||
mdfind -onlyin . $1
|
||||
}
|
||||
|
||||
function mdfind-grep-home-dir() { # mdfind grep in $HOME
|
||||
mdfind -onlyin $HOME $1
|
||||
}
|
||||
|
||||
|
||||
function google-search() {
|
||||
s $1 -p google
|
||||
}
|
||||
|
||||
function amazon-search() {
|
||||
s $1 -p amazon
|
||||
}
|
||||
|
||||
function youtube-search() {
|
||||
s $1 -p youtube
|
||||
}
|
||||
|
||||
function explain-command {
|
||||
command="https://explainshell.com/explain?cmd=${1}"
|
||||
osascript <<EOD
|
||||
|
|
@ -704,6 +750,7 @@ alias kill-percol="ppkill"
|
|||
# tab to a random color
|
||||
PRELINE="\r\033[A"
|
||||
|
||||
# From https://stackoverflow.com/questions/59090903/is-there-any-way-to-get-iterm2-to-color-each-new-tab-with-a-different-color-usi
|
||||
function random {
|
||||
echo -e "\033]6;1;bg;red;brightness;$((1 + $RANDOM % 255))\a"$PRELINE
|
||||
echo -e "\033]6;1;bg;green;brightness;$((1 + $RANDOM % 255))\a"$PRELINE
|
||||
|
|
@ -731,3 +778,59 @@ function color {
|
|||
random
|
||||
esac
|
||||
}
|
||||
|
||||
function git-diff-repos() {
|
||||
RESULTS="diff-results.patch"
|
||||
echo "" > $RESULTS
|
||||
|
||||
for DIR in */; do
|
||||
pushd $DIR
|
||||
echo "\n———————\nGit diffing $PWD\n———————\n" >> ../$RESULTS
|
||||
git --no-pager diff --ignore-space-change -- ':!*poetry.lock' -- ':^.vscode' . >> ../$RESULTS
|
||||
popd
|
||||
done
|
||||
|
||||
bat $RESULTS
|
||||
}
|
||||
|
||||
function git-info-repos() {
|
||||
RESULTS="info-results.txt"
|
||||
echo "" > $RESULTS
|
||||
|
||||
for DIR in */; do
|
||||
pushd $DIR
|
||||
echo "\n———————\nGit info-ing $PWD\n———————\n" >> ../$RESULTS
|
||||
git-info 2>&1 >> ../$RESULTS
|
||||
popd
|
||||
done
|
||||
|
||||
bat $RESULTS
|
||||
}
|
||||
|
||||
function git-merge-develop() {
|
||||
echo "\nChecking out develop..." && gco develop && \
|
||||
echo "\nPulling develop..." && git pull && \
|
||||
echo "\nChecking out -..." && gco - && \
|
||||
echo "\nMerging develop in..." && git merge develop
|
||||
}
|
||||
|
||||
function scrcpy-s20() {
|
||||
# presumably this once worked but does not now
|
||||
# /opt/homebrew/bin/scrcpy --serial R5CN20MZEKJ
|
||||
/opt/homebrew/bin/scrcpy -sadb-R5CN20MZEKJ-ogJwLS._adb-tls-connect._tcp.
|
||||
if [[ "$?" -ne 0 ]]
|
||||
then
|
||||
echo "first run usually fails, trying a second time..."
|
||||
fi
|
||||
}
|
||||
alias run-scrcpy=launch-scrcpy
|
||||
alias scr=launch-scrcpy
|
||||
|
||||
|
||||
function dir-with-most-files() {
|
||||
~/bin/dir-with-most-files.sh "$@"
|
||||
}
|
||||
|
||||
alias most-files-dir=dir-with-most-files
|
||||
alias count-files=dir-with-most-files
|
||||
alias biggest-dirs=dir-with-most-files
|
||||
|
|
|
|||
|
|
@ -15,10 +15,9 @@ else
|
|||
# Check for --exclude-dir, otherwise check for --exclude. If --exclude
|
||||
# isn't available, --color won't be either (they were released at the same
|
||||
# time (v2.5): https://git.savannah.gnu.org/cgit/grep.git/tree/NEWS?id=1236f007
|
||||
if grep-flags-available --color=auto --exclude-dir=.cvs; then
|
||||
GREP_OPTIONS="--exclude-dir=$EXC_FOLDERS"
|
||||
elif grep-flags-available --color=auto --exclude=.cvs; then
|
||||
GREP_OPTIONS="--exclude=$EXC_FOLDERS"
|
||||
if grep-flags-available --color=always --exclude-dir=.cvs; then
|
||||
GREP_OPTIONS="--color=always --ignore-case --exclude-dir=$EXC_FOLDERS"
|
||||
elif grep-flags-available --color=always --exclude=.cvs; then
|
||||
fi
|
||||
|
||||
if [[ -n "$GREP_OPTIONS" ]]; then
|
||||
|
|
@ -26,7 +25,6 @@ else
|
|||
alias grep="grep $GREP_OPTIONS"
|
||||
alias egrep="egrep $GREP_OPTIONS"
|
||||
alias fgrep="fgrep $GREP_OPTIONS"
|
||||
|
||||
# write to cache file if cache directory is writable
|
||||
if [[ -w "$ZSH_CACHE_DIR" ]]; then
|
||||
alias -L grep egrep fgrep >| "$__GREP_CACHE_FILE"
|
||||
|
|
|
|||
212
raycast-completions.sh
Normal file
212
raycast-completions.sh
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
#compdef ray
|
||||
compdef _ray ray
|
||||
|
||||
# zsh completion for ray -*- shell-script -*-
|
||||
|
||||
__ray_debug()
|
||||
{
|
||||
# local file="$BASH_COMP_DEBUG_FILE"
|
||||
# if [[ -n ${file} ]]; then
|
||||
# echo "$*" >> "${file}"
|
||||
# fi
|
||||
}
|
||||
|
||||
_ray()
|
||||
{
|
||||
local shellCompDirectiveError=1
|
||||
local shellCompDirectiveNoSpace=2
|
||||
local shellCompDirectiveNoFileComp=4
|
||||
local shellCompDirectiveFilterFileExt=8
|
||||
local shellCompDirectiveFilterDirs=16
|
||||
local shellCompDirectiveKeepOrder=32
|
||||
|
||||
local lastParam lastChar flagPrefix requestComp out directive comp lastComp noSpace keepOrder
|
||||
local -a completions
|
||||
|
||||
__ray_debug "\n========= starting completion logic =========="
|
||||
__ray_debug "CURRENT: ${CURRENT}, words[*]: ${words[*]}"
|
||||
|
||||
# The user could have moved the cursor backwards on the command-line.
|
||||
# We need to trigger completion from the $CURRENT location, so we need
|
||||
# to truncate the command-line ($words) up to the $CURRENT location.
|
||||
# (We cannot use $CURSOR as its value does not work when a command is an alias.)
|
||||
words=("${=words[1,CURRENT]}")
|
||||
__ray_debug "Truncated words[*]: ${words[*]},"
|
||||
|
||||
lastParam=${words[-1]}
|
||||
lastChar=${lastParam[-1]}
|
||||
__ray_debug "lastParam: ${lastParam}, lastChar: ${lastChar}"
|
||||
|
||||
# For zsh, when completing a flag with an = (e.g., ray -n=<TAB>)
|
||||
# completions must be prefixed with the flag
|
||||
setopt local_options BASH_REMATCH
|
||||
if [[ "${lastParam}" =~ '-.*=' ]]; then
|
||||
# We are dealing with a flag with an =
|
||||
flagPrefix="-P ${BASH_REMATCH}"
|
||||
fi
|
||||
|
||||
# Prepare the command to obtain completions
|
||||
requestComp="${words[1]} __complete ${words[2,-1]}"
|
||||
if [ "${lastChar}" = "" ]; then
|
||||
# If the last parameter is complete (there is a space following it)
|
||||
# We add an extra empty parameter so we can indicate this to the go completion code.
|
||||
__ray_debug "Adding extra empty parameter"
|
||||
requestComp="${requestComp} \"\""
|
||||
fi
|
||||
|
||||
__ray_debug "About to call: eval ${requestComp}"
|
||||
|
||||
# Use eval to handle any environment variables and such
|
||||
out=$(eval ${requestComp} 2>/dev/null)
|
||||
__ray_debug "completion output: ${out}"
|
||||
|
||||
# Extract the directive integer following a : from the last line
|
||||
local lastLine
|
||||
while IFS='\n' read -r line; do
|
||||
lastLine=${line}
|
||||
done < <(printf "%s\n" "${out[@]}")
|
||||
__ray_debug "last line: ${lastLine}"
|
||||
|
||||
if [ "${lastLine[1]}" = : ]; then
|
||||
directive=${lastLine[2,-1]}
|
||||
# Remove the directive including the : and the newline
|
||||
local suffix
|
||||
(( suffix=${#lastLine}+2))
|
||||
out=${out[1,-$suffix]}
|
||||
else
|
||||
# There is no directive specified. Leave $out as is.
|
||||
__ray_debug "No directive found. Setting do default"
|
||||
directive=0
|
||||
fi
|
||||
|
||||
__ray_debug "directive: ${directive}"
|
||||
__ray_debug "completions: ${out}"
|
||||
__ray_debug "flagPrefix: ${flagPrefix}"
|
||||
|
||||
if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then
|
||||
__ray_debug "Completion received error. Ignoring completions."
|
||||
return
|
||||
fi
|
||||
|
||||
local activeHelpMarker="_activeHelp_ "
|
||||
local endIndex=${#activeHelpMarker}
|
||||
local startIndex=$((${#activeHelpMarker}+1))
|
||||
local hasActiveHelp=0
|
||||
while IFS='\n' read -r comp; do
|
||||
# Check if this is an activeHelp statement (i.e., prefixed with $activeHelpMarker)
|
||||
if [ "${comp[1,$endIndex]}" = "$activeHelpMarker" ];then
|
||||
__ray_debug "ActiveHelp found: $comp"
|
||||
comp="${comp[$startIndex,-1]}"
|
||||
if [ -n "$comp" ]; then
|
||||
compadd -x "${comp}"
|
||||
__ray_debug "ActiveHelp will need delimiter"
|
||||
hasActiveHelp=1
|
||||
fi
|
||||
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "$comp" ]; then
|
||||
# If requested, completions are returned with a description.
|
||||
# The description is preceded by a TAB character.
|
||||
# For zsh's _describe, we need to use a : instead of a TAB.
|
||||
# We first need to escape any : as part of the completion itself.
|
||||
comp=${comp//:/\\:}
|
||||
|
||||
local tab="$(printf '\t')"
|
||||
comp=${comp//$tab/:}
|
||||
|
||||
__ray_debug "Adding completion: ${comp}"
|
||||
completions+=${comp}
|
||||
lastComp=$comp
|
||||
fi
|
||||
done < <(printf "%s\n" "${out[@]}")
|
||||
|
||||
# Add a delimiter after the activeHelp statements, but only if:
|
||||
# - there are completions following the activeHelp statements, or
|
||||
# - file completion will be performed (so there will be choices after the activeHelp)
|
||||
if [ $hasActiveHelp -eq 1 ]; then
|
||||
if [ ${#completions} -ne 0 ] || [ $((directive & shellCompDirectiveNoFileComp)) -eq 0 ]; then
|
||||
__ray_debug "Adding activeHelp delimiter"
|
||||
compadd -x "--"
|
||||
hasActiveHelp=0
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then
|
||||
__ray_debug "Activating nospace."
|
||||
noSpace="-S ''"
|
||||
fi
|
||||
|
||||
if [ $((directive & shellCompDirectiveKeepOrder)) -ne 0 ]; then
|
||||
__ray_debug "Activating keep order."
|
||||
keepOrder="-V"
|
||||
fi
|
||||
|
||||
if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then
|
||||
# File extension filtering
|
||||
local filteringCmd
|
||||
filteringCmd='_files'
|
||||
for filter in ${completions[@]}; do
|
||||
if [ ${filter[1]} != '*' ]; then
|
||||
# zsh requires a glob pattern to do file filtering
|
||||
filter="\*.$filter"
|
||||
fi
|
||||
filteringCmd+=" -g $filter"
|
||||
done
|
||||
filteringCmd+=" ${flagPrefix}"
|
||||
|
||||
__ray_debug "File filtering command: $filteringCmd"
|
||||
_arguments '*:filename:'"$filteringCmd"
|
||||
elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
|
||||
# File completion for directories only
|
||||
local subdir
|
||||
subdir="${completions[1]}"
|
||||
if [ -n "$subdir" ]; then
|
||||
__ray_debug "Listing directories in $subdir"
|
||||
pushd "${subdir}" >/dev/null 2>&1
|
||||
else
|
||||
__ray_debug "Listing directories in ."
|
||||
fi
|
||||
|
||||
local result
|
||||
_arguments '*:dirname:_files -/'" ${flagPrefix}"
|
||||
result=$?
|
||||
if [ -n "$subdir" ]; then
|
||||
popd >/dev/null 2>&1
|
||||
fi
|
||||
return $result
|
||||
else
|
||||
__ray_debug "Calling _describe"
|
||||
if eval _describe $keepOrder "completions" completions $flagPrefix $noSpace; then
|
||||
__ray_debug "_describe found some completions"
|
||||
|
||||
# Return the success of having called _describe
|
||||
return 0
|
||||
else
|
||||
__ray_debug "_describe did not find completions."
|
||||
__ray_debug "Checking if we should do file completion."
|
||||
if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then
|
||||
__ray_debug "deactivating file completion"
|
||||
|
||||
# We must return an error code here to let zsh know that there were no
|
||||
# completions found by _describe; this is what will trigger other
|
||||
# matching algorithms to attempt to find completions.
|
||||
# For example zsh can match letters in the middle of words.
|
||||
return 1
|
||||
else
|
||||
# Perform file completion
|
||||
__ray_debug "Activating file completion"
|
||||
|
||||
# We must return the result of this command, so it must be the
|
||||
# last command, or else we must store its result to return it.
|
||||
_arguments '*:filename:_files'" ${flagPrefix}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# don't run the completion function when being source-ed or eval-ed
|
||||
if [ "$funcstack[1]" = "_ray" ]; then
|
||||
_ray
|
||||
fi
|
||||
246
work-functions.zsh
Normal file
246
work-functions.zsh
Normal file
|
|
@ -0,0 +1,246 @@
|
|||
function run_cicd_test_containers() {
|
||||
echo "Starting local CI/CD tests at $(date)"
|
||||
echo "Running bandit..."
|
||||
docker-compose run --no-deps --rm clever-sync-api python -m bandit -r . -x /tests/
|
||||
RES2=$?
|
||||
echo "Running black..."
|
||||
docker-compose run --no-deps --rm clever-sync-api black . --check --line-length=120
|
||||
RES4=$?
|
||||
echo "Running flake8..."
|
||||
docker-compose run --no-deps --rm clever-sync-api flake8 --config /app/dev-tools-config/.flake8 .
|
||||
RES3=$?
|
||||
# echo "Running pylint..."
|
||||
# docker-compose run --no-deps --rm clever-sync-api pylint --rcfile /app/dev-tools-config/.pylintrc classkick
|
||||
# RES2=$?
|
||||
echo "Running isort..."
|
||||
docker-compose run --no-deps --rm clever-sync-api isort . --profile black --line-length=120 --check
|
||||
RES1=$?
|
||||
echo "Running coverage..."
|
||||
docker-compose run --no-deps --rm clever-sync-api pytest --cov .
|
||||
RES5=$?
|
||||
echo "Finished local CI/CD tests at $(date)"
|
||||
|
||||
if [[ $RES1 -ne 0 || $RES2 -ne 0 || $RES3 -ne 0 || $RES4 -ne 0 || $RES5 -ne 0 ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function run-cicd-fix() {
|
||||
echo "Starting CI/CD containers in 'fix mode' at $(date)."
|
||||
echo "Running black..."
|
||||
docker-compose run --no-deps --rm $APP black . --line-length=120
|
||||
RES4=$?
|
||||
echo "Running ruff fix..."
|
||||
docker-compose run --rm $APP ruff check --fix --config /app/dev-tools-config/.ruff.toml .
|
||||
RES1=$?
|
||||
}
|
||||
|
||||
alias grep-branches="find-branch"
|
||||
alias search-branches="find-branch"
|
||||
|
||||
|
||||
function run-cicd-tests() {
|
||||
LOGDIR="./cicd-test-logs"
|
||||
mkdir -p $LOGDIR
|
||||
docker-compose run --rm $APP black . --line-length=120
|
||||
docker-compose run --rm $APP ruff check --config /app/dev-tools-config/.ruff.toml .
|
||||
docker-compose run --rm $APP python -m bandit -r . -x /tests/
|
||||
docker-compose run -e TEST_DB=$APP --rm $APP pytest .
|
||||
docker-compose run --rm $APP pytest --cov .
|
||||
# docker-compose logs -t -f --tail 5 # Doesn't work - only tails "proper containers", not these temp ones
|
||||
}
|
||||
|
||||
function run-bandit() {
|
||||
docker-compose run --rm $APP python -m bandit -r . -x /tests/
|
||||
}
|
||||
|
||||
function run-pytest() {
|
||||
docker-compose run -e TEST_DB=$APP --rm $APP pytest .
|
||||
}
|
||||
|
||||
function run-black() {
|
||||
docker-compose run --rm $APP black . --line-length=120
|
||||
}
|
||||
|
||||
function run-isort() {
|
||||
docker-compose run --rm $APP isort . --profile black --check
|
||||
}
|
||||
|
||||
function run-flake8() {
|
||||
docker-compose run --rm $APP flake8 --config /app/dev-tools-config/.flake8 .
|
||||
}
|
||||
|
||||
function run-flake8-on-local-dir() {
|
||||
autoflake --remove-unused-variables --remove-all-unused-imports --in-place --recursive $1
|
||||
}
|
||||
|
||||
function classkick-web-build() {
|
||||
docker compose build --no-cache --build-arg GITHUB_TOKEN=$GITHUB_TOKEN
|
||||
}
|
||||
|
||||
function run-pylint() {
|
||||
docker-compose run --rm $APP pylint --rcfile /app/dev-tools-config/.pylintrc classkick
|
||||
}
|
||||
|
||||
# Obvs adapt to current needs
|
||||
function run-pytest-dir() {
|
||||
cls
|
||||
DIR="${1:-.}"
|
||||
docker-compose run -e TEST_DB=$APP --rm $APP pytest $DIR
|
||||
}
|
||||
|
||||
function run-pytest-dir-pdb() {
|
||||
cls
|
||||
DIR="${1:-.}"
|
||||
docker-compose run -e TEST_DB=$APP --rm $APP pytest --pdb $DIR
|
||||
}
|
||||
|
||||
function rebuild-db() {
|
||||
echo "Deleting backend-db and db containers..."
|
||||
docker container rm db 2>/dev/null
|
||||
docker container rm backend-db 2>/dev/null
|
||||
|
||||
echo "Running initdev..." &&
|
||||
docker-compose run --rm -w /app/tasks salesbuddy-api inv initdev &&
|
||||
echo "Running alembic upgrade..." &&
|
||||
ops/alembic-ops development upgrade head &&
|
||||
echo "Seeding DB..." &&
|
||||
docker-compose run --rm -w /app/tasks salesbuddy-api inv seed &&
|
||||
echo "Running dbUpdate..." &&
|
||||
ops/classkick-v1-dev classkick-schema-init dbUpdate &&
|
||||
echo "All done."
|
||||
}
|
||||
|
||||
alias reinit-db=rebuild-db
|
||||
alias init-db=rebuild-db
|
||||
|
||||
function print-ck-env-vars() {
|
||||
echo -e "\nDATABASE\n——————————————"
|
||||
echo \$MYSQL_USER=$MYSQL_USER
|
||||
echo \$MYSQL_PASSWORD=$MYSQL_PASSWORD
|
||||
echo \$MYSQL_ROOT_PASSWORD=$MYSQL_ROOT_PASSWORD
|
||||
echo \$MYSQL_HOST_IP=$MYSQL_HOST_IP
|
||||
echo \$MYSQL_RO_PORT=$MYSQL_RO_PORT
|
||||
echo \$MYSQL_TX_PORT=$MYSQL_TX_PORT
|
||||
echo \$SSL_CA_FILE=$SSL_CA_FILE
|
||||
echo \$SSL_CERT_FILE=$SSL_CERT_FILE
|
||||
echo \$SSL_KEY_FILE=$SSL_KEY_FILE
|
||||
echo \$TEST_DB=$TEST_DB
|
||||
echo \$DOCKER_DB_NAME=$DOCKER_DB_NAME
|
||||
|
||||
echo -e "\nNOT DATABASE\n——————————————"
|
||||
echo \$PYTHONPATH=$PYTHONPATH
|
||||
echo \$DOCKER_DEFAULT_PLATFORM=$DOCKER_DEFAULT_PLATFORM
|
||||
echo \$ENVIRONMENT=$ENVIRONMENT
|
||||
echo \$APP=$APP
|
||||
}
|
||||
|
||||
alias ck-env=print-ck-env-vars
|
||||
alias dump-env=print-ck-env-vars
|
||||
alias echo-ck-env=print-ck-env-vars
|
||||
|
||||
function set-ck-prod-env-vars() {
|
||||
export MYSQL_USER='pkingswell'
|
||||
export MYSQL_PASSWORD="PxeAnQ(0>'I<d^FV"
|
||||
export MYSQL_HOST_IP='35.232.226.146'
|
||||
export MYSQL_TX_PORT=3306
|
||||
export MYSQL_RO_PORT=3306
|
||||
export SSL_CA_FILE='/Users/peter/src/ck/network-ops/certs/production-mysql80/server-ca.pem'
|
||||
export SSL_CERT_FILE='/Users/peter/src/ck/network-ops/certs/production-mysql80/client-cert.pem'
|
||||
export SSL_KEY_FILE='/Users/peter/src/ck/network-ops/certs/production-mysql80/client-key.pem'
|
||||
export PYTHONPATH=$(pwd)/libs:$(pwd)/apps/background-jobs
|
||||
|
||||
echo "PROD env vars set:"
|
||||
print-ck-env-vars
|
||||
}
|
||||
|
||||
function set-ck-dev-env-vars() {
|
||||
export MYSQL_USER='root'
|
||||
export MYSQL_PASSWORD="dev"
|
||||
export MYSQL_ROOT_PASSWORD="dev"
|
||||
export MYSQL_HOST_IP='127.0.0.1'
|
||||
export MYSQL_TX_PORT=3306
|
||||
export MYSQL_RO_PORT=3306
|
||||
export PYTHONPATH=$(pwd)/libs:$(pwd)/apps/background-jobs
|
||||
|
||||
echo "DEV env vars set:"
|
||||
print-ck-env-vars
|
||||
}
|
||||
|
||||
function set-ck-staging-env-vars() {
|
||||
export MYSQL_USER='root' # TODO get pkingswell working
|
||||
export MYSQL_PASSWORD="^T%#wE%yArYPAgP2f8@k"
|
||||
export MYSQL_HOST_IP='104.197.69.215'
|
||||
export MYSQL_TX_PORT=3306
|
||||
export MYSQL_RO_PORT=3306
|
||||
export SSL_CA_FILE='/Users/peter/src/ck/network-ops/certs/staging-mysql80/server-ca.pem'
|
||||
export SSL_CERT_FILE='/Users/peter/src/ck/network-ops/certs/staging-mysql80/client-cert.pem'
|
||||
export SSL_KEY_FILE='/Users/peter/src/ck/network-ops/certs/staging-mysql80/client-key.pem'
|
||||
export PYTHONPATH=$(pwd)/libs:$(pwd)/apps/background-jobs
|
||||
|
||||
echo "STAGING env vars set:"
|
||||
print-ck-env-vars
|
||||
}
|
||||
|
||||
########################################################
|
||||
# HANDY CONTAINER FUNCTIONS
|
||||
########################################################
|
||||
make-break ()
|
||||
{
|
||||
echo -e "—————————————————————————————————————————— \n\n——————————————————————————————————————————\n";
|
||||
echo -e "—————————————————————————————————————————— \n\n——————————————————————————————————————————\n";
|
||||
echo -e "—————————————————————————————————————————— \n\n——————————————————————————————————————————\n";
|
||||
echo -e "—————————————————————————————————————————— \n\n——————————————————————————————————————————\n";
|
||||
echo -e "—————————————————————————————————————————— \n\n——————————————————————————————————————————\n";
|
||||
echo -e "——————————————————————————————————————————
|
||||
\n——————————————————————————"
|
||||
date +%H:%m:%S
|
||||
echo -e "——————————————————————————\n"
|
||||
}
|
||||
alias cls=make-break
|
||||
|
||||
function run-test-file() {
|
||||
cls; pytest -rP --capture=tee-sys -rs $1 # --pdb
|
||||
}
|
||||
|
||||
function do_settings() {
|
||||
alias l="ls -l"
|
||||
alias lrt="ls -lart"
|
||||
export TEST_DB=background-jobs
|
||||
export SQLALCHEMY_SILENCE_UBER_WARNING=1
|
||||
}
|
||||
|
||||
function switch-app() {
|
||||
APP=""
|
||||
case "$1" in
|
||||
sb)
|
||||
APP="salesbuddy-api" ;;
|
||||
bg)
|
||||
APP="background-jobs" ;;
|
||||
cs)
|
||||
APP="clever-sync-api" ;;
|
||||
*)
|
||||
echo $"Usage: $0 {sb|bg|cs}"
|
||||
exit 1
|
||||
esac
|
||||
export APP
|
||||
export TEST_DB=$APP
|
||||
}
|
||||
|
||||
|
||||
function ck-cloud-proxy() {
|
||||
CONN=""
|
||||
case "$1" in
|
||||
staging)
|
||||
CONN='"classkick-907:us-central1:staging-mysql80?port=3306" "classkick-907:us-central1:staging-mysql80-replica?port=3307" "classkick-907:us-central1:production-postgres-reporting?port=5432"' ;;
|
||||
production)
|
||||
CONN='"classkick-907:us-central1:production-mysql80?port=3306" "classkick-907:us-central1:production-mysql80-replica?port=3307" "classkick-907:us-central1:production-postgres-reporting?port=5432"' ;;
|
||||
*)
|
||||
echo $"Usage: $0 {staging|production}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
cloud-sql-proxy $CONN
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue