mirror of
https://github.com/ohmyzsh/ohmyzsh.git
synced 2026-01-30 02:44:42 +01:00
very large update
This commit is contained in:
parent
597092aa83
commit
ef398bf673
22 changed files with 2982 additions and 79 deletions
1
.bash_profile
Symbolic link
1
.bash_profile
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
/Users/peter/.bash_profile
|
||||
1
.gitconfig
Symbolic link
1
.gitconfig
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
/Users/peter/.gitconfig
|
||||
515
.history/lib/functions_20210826171710.zsh
Normal file
515
.history/lib/functions_20210826171710.zsh
Normal file
|
|
@ -0,0 +1,515 @@
|
|||
function zsh_stats() {
|
||||
fc -l 1 \
|
||||
| awk '{ CMD[$2]++; count++; } END { for (a in CMD) print CMD[a] " " CMD[a]*100/count "% " a }' \
|
||||
| grep -v "./" | sort -nr | head -20 | column -c3 -s " " -t | nl
|
||||
}
|
||||
|
||||
function uninstall_oh_my_zsh() {
|
||||
env ZSH="$ZSH" sh "$ZSH/tools/uninstall.sh"
|
||||
}
|
||||
|
||||
function upgrade_oh_my_zsh() {
|
||||
echo >&2 "${fg[yellow]}Note: \`$0\` is deprecated. Use \`omz update\` instead.$reset_color"
|
||||
omz update
|
||||
}
|
||||
|
||||
function takedir() {
|
||||
mkdir -p $@ && cd ${@:$#}
|
||||
}
|
||||
|
||||
function open_command() {
|
||||
local open_cmd
|
||||
|
||||
# define the open command
|
||||
case "$OSTYPE" in
|
||||
darwin*) open_cmd='open' ;;
|
||||
cygwin*) open_cmd='cygstart' ;;
|
||||
linux*) [[ "$(uname -r)" != *icrosoft* ]] && open_cmd='nohup xdg-open' || {
|
||||
open_cmd='cmd.exe /c start ""'
|
||||
[[ -e "$1" ]] && { 1="$(wslpath -w "${1:a}")" || return 1 }
|
||||
} ;;
|
||||
msys*) open_cmd='start ""' ;;
|
||||
*) echo "Platform $OSTYPE not supported"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
${=open_cmd} "$@" &>/dev/null
|
||||
}
|
||||
|
||||
function takeurl() {
|
||||
data=$(mktemp)
|
||||
curl -L $1 > $data
|
||||
tar xf $data
|
||||
thedir=$(tar tf $data | head -1)
|
||||
rm $data
|
||||
cd $thedir
|
||||
}
|
||||
|
||||
function takegit() {
|
||||
git clone $1
|
||||
cd $(basename ${1%%.git})
|
||||
}
|
||||
|
||||
function take() {
|
||||
if [[ $1 =~ ^(https?|ftp).*\.tar\.(gz|bz2|xz)$ ]]; then
|
||||
takeurl $1
|
||||
elif [[ $1 =~ ^([A-Za-z0-9]\+@|https?|git|ssh|ftps?|rsync).*\.git/?$ ]]; then
|
||||
takegit $1
|
||||
else
|
||||
takedir $1
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Get the value of an alias.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. alias - The alias to get its value from
|
||||
# STDOUT:
|
||||
# The value of alias $1 (if it has one).
|
||||
# Return value:
|
||||
# 0 if the alias was found,
|
||||
# 1 if it does not exist
|
||||
#
|
||||
function alias_value() {
|
||||
(( $+aliases[$1] )) && echo $aliases[$1]
|
||||
}
|
||||
|
||||
#
|
||||
# Try to get the value of an alias,
|
||||
# otherwise return the input.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. alias - The alias to get its value from
|
||||
# STDOUT:
|
||||
# The value of alias $1, or $1 if there is no alias $1.
|
||||
# Return value:
|
||||
# Always 0
|
||||
#
|
||||
function try_alias_value() {
|
||||
alias_value "$1" || echo "$1"
|
||||
}
|
||||
|
||||
#
|
||||
# Set variable "$1" to default value "$2" if "$1" is not yet defined.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. name - The variable to set
|
||||
# 2. val - The default value
|
||||
# Return value:
|
||||
# 0 if the variable exists, 3 if it was set
|
||||
#
|
||||
function default() {
|
||||
(( $+parameters[$1] )) && return 0
|
||||
typeset -g "$1"="$2" && return 3
|
||||
}
|
||||
|
||||
#
|
||||
# Set environment variable "$1" to default value "$2" if "$1" is not yet defined.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. name - The env variable to set
|
||||
# 2. val - The default value
|
||||
# Return value:
|
||||
# 0 if the env variable exists, 3 if it was set
|
||||
#
|
||||
function env_default() {
|
||||
[[ ${parameters[$1]} = *-export* ]] && return 0
|
||||
export "$1=$2" && return 3
|
||||
}
|
||||
|
||||
|
||||
# Required for $langinfo
|
||||
zmodload zsh/langinfo
|
||||
|
||||
# URL-encode a string
|
||||
#
|
||||
# Encodes a string using RFC 2396 URL-encoding (%-escaped).
|
||||
# See: https://www.ietf.org/rfc/rfc2396.txt
|
||||
#
|
||||
# By default, reserved characters and unreserved "mark" characters are
|
||||
# not escaped by this function. This allows the common usage of passing
|
||||
# an entire URL in, and encoding just special characters in it, with
|
||||
# the expectation that reserved and mark characters are used appropriately.
|
||||
# The -r and -m options turn on escaping of the reserved and mark characters,
|
||||
# respectively, which allows arbitrary strings to be fully escaped for
|
||||
# embedding inside URLs, where reserved characters might be misinterpreted.
|
||||
#
|
||||
# Prints the encoded string on stdout.
|
||||
# Returns nonzero if encoding failed.
|
||||
#
|
||||
# Usage:
|
||||
# omz_urlencode [-r] [-m] [-P] <string>
|
||||
#
|
||||
# -r causes reserved characters (;/?:@&=+$,) to be escaped
|
||||
#
|
||||
# -m causes "mark" characters (_.!~*''()-) to be escaped
|
||||
#
|
||||
# -P causes spaces to be encoded as '%20' instead of '+'
|
||||
function omz_urlencode() {
|
||||
emulate -L zsh
|
||||
local -a opts
|
||||
zparseopts -D -E -a opts r m P
|
||||
|
||||
local in_str=$1
|
||||
local url_str=""
|
||||
local spaces_as_plus
|
||||
if [[ -z $opts[(r)-P] ]]; then spaces_as_plus=1; fi
|
||||
local str="$in_str"
|
||||
|
||||
# URLs must use UTF-8 encoding; convert str to UTF-8 if required
|
||||
local encoding=$langinfo[CODESET]
|
||||
local safe_encodings
|
||||
safe_encodings=(UTF-8 utf8 US-ASCII)
|
||||
if [[ -z ${safe_encodings[(r)$encoding]} ]]; then
|
||||
str=$(echo -E "$str" | iconv -f $encoding -t UTF-8)
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Error converting string from $encoding to UTF-8" >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use LC_CTYPE=C to process text byte-by-byte
|
||||
local i byte ord LC_ALL=C
|
||||
export LC_ALL
|
||||
local reserved=';/?:@&=+$,'
|
||||
local mark='_.!~*''()-'
|
||||
local dont_escape="[A-Za-z0-9"
|
||||
if [[ -z $opts[(r)-r] ]]; then
|
||||
dont_escape+=$reserved
|
||||
fi
|
||||
# $mark must be last because of the "-"
|
||||
if [[ -z $opts[(r)-m] ]]; then
|
||||
dont_escape+=$mark
|
||||
fi
|
||||
dont_escape+="]"
|
||||
|
||||
# Implemented to use a single printf call and avoid subshells in the loop,
|
||||
# for performance (primarily on Windows).
|
||||
local url_str=""
|
||||
for (( i = 1; i <= ${#str}; ++i )); do
|
||||
byte="$str[i]"
|
||||
if [[ "$byte" =~ "$dont_escape" ]]; then
|
||||
url_str+="$byte"
|
||||
else
|
||||
if [[ "$byte" == " " && -n $spaces_as_plus ]]; then
|
||||
url_str+="+"
|
||||
else
|
||||
ord=$(( [##16] #byte ))
|
||||
url_str+="%$ord"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
echo -E "$url_str"
|
||||
}
|
||||
|
||||
# URL-decode a string
|
||||
#
|
||||
# Decodes a RFC 2396 URL-encoded (%-escaped) string.
|
||||
# This decodes the '+' and '%' escapes in the input string, and leaves
|
||||
# other characters unchanged. Does not enforce that the input is a
|
||||
# valid URL-encoded string. This is a convenience to allow callers to
|
||||
# pass in a full URL or similar strings and decode them for human
|
||||
# presentation.
|
||||
#
|
||||
# Outputs the encoded string on stdout.
|
||||
# Returns nonzero if encoding failed.
|
||||
#
|
||||
# Usage:
|
||||
# omz_urldecode <urlstring> - prints decoded string followed by a newline
|
||||
function omz_urldecode {
|
||||
emulate -L zsh
|
||||
local encoded_url=$1
|
||||
|
||||
# Work bytewise, since URLs escape UTF-8 octets
|
||||
local caller_encoding=$langinfo[CODESET]
|
||||
local LC_ALL=C
|
||||
export LC_ALL
|
||||
|
||||
# Change + back to ' '
|
||||
local tmp=${encoded_url:gs/+/ /}
|
||||
# Protect other escapes to pass through the printf unchanged
|
||||
tmp=${tmp:gs/\\/\\\\/}
|
||||
# Handle %-escapes by turning them into `\xXX` printf escapes
|
||||
tmp=${tmp:gs/%/\\x/}
|
||||
local decoded
|
||||
eval "decoded=\$'$tmp'"
|
||||
|
||||
# Now we have a UTF-8 encoded string in the variable. We need to re-encode
|
||||
# it if caller is in a non-UTF-8 locale.
|
||||
local safe_encodings
|
||||
safe_encodings=(UTF-8 utf8 US-ASCII)
|
||||
if [[ -z ${safe_encodings[(r)$caller_encoding]} ]]; then
|
||||
decoded=$(echo -E "$decoded" | iconv -f UTF-8 -t $caller_encoding)
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Error converting string from UTF-8 to $caller_encoding" >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo -E "$decoded"
|
||||
}
|
||||
|
||||
##################################
|
||||
# PSK Functions
|
||||
##################################
|
||||
# PSK List directories only
|
||||
lsd() {
|
||||
l | grep -E "^d"
|
||||
}
|
||||
|
||||
# ls grep
|
||||
lsg() {
|
||||
l | grep -iE "$1"
|
||||
}
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind-all() {
|
||||
# fd -H -t f . -x grep --color=always -Hi ${1}
|
||||
# Gah. Bye-bye gfind, here's an off-the-shelf improvement upon it https://github.com/burntsushi/ripgrep
|
||||
# $1 is search term, $2 is path
|
||||
# rg --no-ignore --hidden "$@"
|
||||
# even better is ag / silver searcher https://github.com/ggreer/the_silver_searcher
|
||||
ag -a --pager less "$@"
|
||||
}
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind() {
|
||||
# fd -t f . -x grep --color=always -Hi ${1}
|
||||
ag --pager less "$@"
|
||||
}
|
||||
|
||||
# Print out the matches only
|
||||
function gfindf() {
|
||||
# TODO make this a lot less shit e.g. don't search .git . Surely rg has
|
||||
# the ability to do this.
|
||||
find . -type f -exec grep --color=always -Hil $1 {} \;
|
||||
}
|
||||
|
||||
function heroic-repo-configure() {
|
||||
cp ${HOME}/src/spotify/prism-tools/heroic-test.yml ./heroic-guc.yml
|
||||
cp ${HOME}/src/spotify/prism-tools/heroic-api-gae.yml ./heroic-gae.yml
|
||||
ls -l | grep -E 'heroic.*yml|heroic.*yaml'
|
||||
mkdir logs
|
||||
}
|
||||
|
||||
# function h() {
|
||||
# NUM_LINES = ${1:-1000}
|
||||
# history | tail -n $NUM_LINES
|
||||
# }
|
||||
|
||||
# function h() {
|
||||
# set -x
|
||||
# NUM_LINES = ${1:-25}
|
||||
# \history -${NUM_LINES}
|
||||
# }
|
||||
|
||||
function agl() {
|
||||
ag --pager less "$@"
|
||||
}
|
||||
|
||||
function kill-em-all() {
|
||||
NAME=$1
|
||||
|
||||
echo "Attempting to kill $NAME by arg match..."
|
||||
pkill -fli $1
|
||||
echo "Attempting to kill $NAME by binary match..."
|
||||
pkill -li $1
|
||||
echo "...the killing... is done"
|
||||
}
|
||||
|
||||
function dateline() {
|
||||
echo "––––––––––––"
|
||||
date
|
||||
echo "––––––––––––"
|
||||
}
|
||||
|
||||
function clean-slate() {
|
||||
clear
|
||||
dateline
|
||||
}
|
||||
|
||||
alias clr=clean-slate
|
||||
alias cls=clean-slate
|
||||
|
||||
function psgr() {
|
||||
ps auwwwwx | grep -v 'grep ' | grep -E "%CPU|$1"
|
||||
}
|
||||
|
||||
function edit() {
|
||||
/Applications/Visual\ Studio\ Code.app/Contents/Resources/app/bin/code $1
|
||||
}
|
||||
|
||||
function zshrc() {
|
||||
pushd ~/.oh-my-zsh
|
||||
edit .
|
||||
popd
|
||||
}
|
||||
|
||||
function dir-sizes() {
|
||||
du -sh ./* | sort -h
|
||||
}
|
||||
|
||||
|
||||
function ssh-ds718() {
|
||||
ssh -p 658 pskadmin@192.168.2.7
|
||||
}
|
||||
|
||||
alias git-stash-list-all='gitk `git stash list --pretty=format:%gd`'
|
||||
|
||||
function master-show-protection() {
|
||||
git branch -vv | grep "origin/`git branch --show-current`"
|
||||
}
|
||||
|
||||
function git-show-branch() {
|
||||
git branch -vv | grep `git branch --show-current`
|
||||
}
|
||||
|
||||
function git-show-all-stashes() {
|
||||
echo "Hit 'q' to go to next file"
|
||||
echo ""
|
||||
git stash list | awk -F: '{ print "\n\n\n\n"; print $0; print "\n\n"; system("git stash show -p " $1); }'
|
||||
}
|
||||
|
||||
# kill most recent container instance
|
||||
alias docker-kill-latest='docker ps -l --format='{{.Names}}' | xargs docker kill'
|
||||
|
||||
# stop all containers
|
||||
docker-stop-all-containers () {
|
||||
docker container stop -t 2 $(docker container ls -q) 2>/dev/null
|
||||
}
|
||||
|
||||
function find-gig-files() {
|
||||
find . -size +1G -ls | sort -k7n # Find files larger than 1GB and then order the list by the file size
|
||||
}
|
||||
|
||||
function _start-cloud-storage() {
|
||||
bgnotify "Booting cloud sync apps..."
|
||||
cd /Applications
|
||||
open Dropbox.app 2>/dev/null &
|
||||
open Google\ Drive.app 2>/dev/null &
|
||||
# Don't do this cos it downloads my backed up photos
|
||||
# open "Google Drive File Stream.app" 2>/dev/null &
|
||||
cd -
|
||||
}
|
||||
|
||||
function start-cloud-storage() {
|
||||
(
|
||||
bgnotify "Waiting for local unison sync..."
|
||||
/Users/peter/dotfiles_psk/bin/unison-cron-job.sh
|
||||
sleep 7
|
||||
_start-cloud-storage
|
||||
) &
|
||||
}
|
||||
|
||||
# Out of action - needs work
|
||||
# function tree() {
|
||||
# DIR=$1 ;
|
||||
# shift # pops $1 off
|
||||
# /usr/local/homebrew/bin/tree -a $DIR | colorize_less "$@"
|
||||
# }
|
||||
|
||||
function _open-all-chrome-apps() {
|
||||
for APP in "${1}"/*.app; do
|
||||
echo "Opening $APP ..."
|
||||
nohup open -a "$APP" &
|
||||
done
|
||||
}
|
||||
|
||||
function open-all-chrome-apps() {
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome Apps/Chrome Apps.localized/'
|
||||
_open-all-chrome-apps "$CHROME_APP_DIR"
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome/Chrome Apps/Chrome Apps.localized/'
|
||||
_open-all-chrome-apps "$CHROME_APP_DIR"
|
||||
}
|
||||
|
||||
function post-boot-tasks() {
|
||||
open-all-chrome-apps
|
||||
docker-stop-all
|
||||
}
|
||||
|
||||
function kill-cloud-storage() {
|
||||
# TODO investigate pkill as alternative
|
||||
|
||||
# Don't do this cos it downloads my backed up photos
|
||||
# killall "Google Drive File Stream" 2>/dev/null &
|
||||
killall Dropbox 2>/dev/null &
|
||||
killall "Google Drive" 2>/dev/null &
|
||||
killall -v "FinderSyncExtension" -SIGKILL &
|
||||
}
|
||||
|
||||
# For photos, pictures, DS718
|
||||
function organise-into-dated-dirs() {
|
||||
if [ $# -eq 0 ]
|
||||
then
|
||||
echo "Please supply an extension e.g. mov or mp4"
|
||||
return
|
||||
fi
|
||||
|
||||
for x in *.${1}; do
|
||||
d=$(date -r "$x" +%Y-%m-%d)
|
||||
mkdir -p "$d"
|
||||
mv -- "$x" "$d/"
|
||||
done
|
||||
}
|
||||
|
||||
### peco functions ###
|
||||
function peco-directories() {
|
||||
local current_lbuffer="$LBUFFER"
|
||||
local current_rbuffer="$RBUFFER"
|
||||
if command -v fd >/dev/null 2>&1; then
|
||||
local dir="$(command \fd --type directory --hidden --no-ignore --exclude .git/ --color never 2>/dev/null | peco )"
|
||||
else
|
||||
local dir="$(
|
||||
command find \( -path '*/\.*' -o -fstype dev -o -fstype proc \) -type d -print 2>/dev/null \
|
||||
| sed 1d \
|
||||
| cut -b3- \
|
||||
| awk '{a[length($0)" "NR]=$0}END{PROCINFO["sorted_in"]="@ind_num_asc"; for(i in a) print a[i]}' - \
|
||||
| peco
|
||||
)"
|
||||
fi
|
||||
|
||||
if [ -n "$dir" ]; then
|
||||
dir=$(echo "$dir" | tr -d '\n')
|
||||
dir=$(printf %q "$dir")
|
||||
# echo "PSK ${dir}"
|
||||
|
||||
BUFFER="${current_lbuffer}${file}${current_rbuffer}"
|
||||
CURSOR=$#BUFFER
|
||||
fi
|
||||
}
|
||||
|
||||
function peco-files() {
|
||||
local current_lbuffer="$LBUFFER"
|
||||
local current_rbuffer="$RBUFFER"
|
||||
if command -v fd >/dev/null 2>&1; then
|
||||
local file="$(command \fd --type file --hidden --no-ignore --exclude .git/ --color never 2>/dev/null | peco)"
|
||||
elif command -v rg >/dev/null 2>&1; then
|
||||
local file="$(rg --glob "" --files --hidden --no-ignore-vcs --iglob !.git/ --color never 2>/dev/null | peco)"
|
||||
elif command -v ag >/dev/null 2>&1; then
|
||||
local file="$(ag --files-with-matches --unrestricted --skip-vcs-ignores --ignore .git/ --nocolor -g "" 2>/dev/null | peco)"
|
||||
else
|
||||
local file="$(
|
||||
command find \( -path '*/\.*' -o -fstype dev -o -fstype proc \) -type f -print 2> /dev/null \
|
||||
| sed 1d \
|
||||
| cut -b3- \
|
||||
| awk '{a[length($0)" "NR]=$0}END{PROCINFO["sorted_in"]="@ind_num_asc"; for(i in a) print a[i]}' - \
|
||||
| peco
|
||||
)"
|
||||
fi
|
||||
|
||||
if [ -n "$file" ]; then
|
||||
file=$(echo "$file" | tr -d '\n')
|
||||
file=$(printf %q "$file")
|
||||
BUFFER="${current_lbuffer}${file}${current_rbuffer}"
|
||||
CURSOR=$#BUFFER
|
||||
fi
|
||||
}
|
||||
|
||||
zle -N peco-directories
|
||||
bindkey '^Xf' peco-directories
|
||||
zle -N peco-files
|
||||
bindkey '^X^f' peco-files
|
||||
|
||||
### peco functions ###
|
||||
515
.history/lib/functions_20210826171712.zsh
Normal file
515
.history/lib/functions_20210826171712.zsh
Normal file
|
|
@ -0,0 +1,515 @@
|
|||
function zsh_stats() {
|
||||
fc -l 1 \
|
||||
| awk '{ CMD[$2]++; count++; } END { for (a in CMD) print CMD[a] " " CMD[a]*100/count "% " a }' \
|
||||
| grep -v "./" | sort -nr | head -20 | column -c3 -s " " -t | nl
|
||||
}
|
||||
|
||||
function uninstall_oh_my_zsh() {
|
||||
env ZSH="$ZSH" sh "$ZSH/tools/uninstall.sh"
|
||||
}
|
||||
|
||||
function upgrade_oh_my_zsh() {
|
||||
echo >&2 "${fg[yellow]}Note: \`$0\` is deprecated. Use \`omz update\` instead.$reset_color"
|
||||
omz update
|
||||
}
|
||||
|
||||
function takedir() {
|
||||
mkdir -p $@ && cd ${@:$#}
|
||||
}
|
||||
|
||||
function open_command() {
|
||||
local open_cmd
|
||||
|
||||
# define the open command
|
||||
case "$OSTYPE" in
|
||||
darwin*) open_cmd='open' ;;
|
||||
cygwin*) open_cmd='cygstart' ;;
|
||||
linux*) [[ "$(uname -r)" != *icrosoft* ]] && open_cmd='nohup xdg-open' || {
|
||||
open_cmd='cmd.exe /c start ""'
|
||||
[[ -e "$1" ]] && { 1="$(wslpath -w "${1:a}")" || return 1 }
|
||||
} ;;
|
||||
msys*) open_cmd='start ""' ;;
|
||||
*) echo "Platform $OSTYPE not supported"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
${=open_cmd} "$@" &>/dev/null
|
||||
}
|
||||
|
||||
function takeurl() {
|
||||
data=$(mktemp)
|
||||
curl -L $1 > $data
|
||||
tar xf $data
|
||||
thedir=$(tar tf $data | head -1)
|
||||
rm $data
|
||||
cd $thedir
|
||||
}
|
||||
|
||||
function takegit() {
|
||||
git clone $1
|
||||
cd $(basename ${1%%.git})
|
||||
}
|
||||
|
||||
function take() {
|
||||
if [[ $1 =~ ^(https?|ftp).*\.tar\.(gz|bz2|xz)$ ]]; then
|
||||
takeurl $1
|
||||
elif [[ $1 =~ ^([A-Za-z0-9]\+@|https?|git|ssh|ftps?|rsync).*\.git/?$ ]]; then
|
||||
takegit $1
|
||||
else
|
||||
takedir $1
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Get the value of an alias.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. alias - The alias to get its value from
|
||||
# STDOUT:
|
||||
# The value of alias $1 (if it has one).
|
||||
# Return value:
|
||||
# 0 if the alias was found,
|
||||
# 1 if it does not exist
|
||||
#
|
||||
function alias_value() {
|
||||
(( $+aliases[$1] )) && echo $aliases[$1]
|
||||
}
|
||||
|
||||
#
|
||||
# Try to get the value of an alias,
|
||||
# otherwise return the input.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. alias - The alias to get its value from
|
||||
# STDOUT:
|
||||
# The value of alias $1, or $1 if there is no alias $1.
|
||||
# Return value:
|
||||
# Always 0
|
||||
#
|
||||
function try_alias_value() {
|
||||
alias_value "$1" || echo "$1"
|
||||
}
|
||||
|
||||
#
|
||||
# Set variable "$1" to default value "$2" if "$1" is not yet defined.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. name - The variable to set
|
||||
# 2. val - The default value
|
||||
# Return value:
|
||||
# 0 if the variable exists, 3 if it was set
|
||||
#
|
||||
function default() {
|
||||
(( $+parameters[$1] )) && return 0
|
||||
typeset -g "$1"="$2" && return 3
|
||||
}
|
||||
|
||||
#
|
||||
# Set environment variable "$1" to default value "$2" if "$1" is not yet defined.
|
||||
#
|
||||
# Arguments:
|
||||
# 1. name - The env variable to set
|
||||
# 2. val - The default value
|
||||
# Return value:
|
||||
# 0 if the env variable exists, 3 if it was set
|
||||
#
|
||||
function env_default() {
|
||||
[[ ${parameters[$1]} = *-export* ]] && return 0
|
||||
export "$1=$2" && return 3
|
||||
}
|
||||
|
||||
|
||||
# Required for $langinfo
|
||||
zmodload zsh/langinfo
|
||||
|
||||
# URL-encode a string
|
||||
#
|
||||
# Encodes a string using RFC 2396 URL-encoding (%-escaped).
|
||||
# See: https://www.ietf.org/rfc/rfc2396.txt
|
||||
#
|
||||
# By default, reserved characters and unreserved "mark" characters are
|
||||
# not escaped by this function. This allows the common usage of passing
|
||||
# an entire URL in, and encoding just special characters in it, with
|
||||
# the expectation that reserved and mark characters are used appropriately.
|
||||
# The -r and -m options turn on escaping of the reserved and mark characters,
|
||||
# respectively, which allows arbitrary strings to be fully escaped for
|
||||
# embedding inside URLs, where reserved characters might be misinterpreted.
|
||||
#
|
||||
# Prints the encoded string on stdout.
|
||||
# Returns nonzero if encoding failed.
|
||||
#
|
||||
# Usage:
|
||||
# omz_urlencode [-r] [-m] [-P] <string>
|
||||
#
|
||||
# -r causes reserved characters (;/?:@&=+$,) to be escaped
|
||||
#
|
||||
# -m causes "mark" characters (_.!~*''()-) to be escaped
|
||||
#
|
||||
# -P causes spaces to be encoded as '%20' instead of '+'
|
||||
function omz_urlencode() {
|
||||
emulate -L zsh
|
||||
local -a opts
|
||||
zparseopts -D -E -a opts r m P
|
||||
|
||||
local in_str=$1
|
||||
local url_str=""
|
||||
local spaces_as_plus
|
||||
if [[ -z $opts[(r)-P] ]]; then spaces_as_plus=1; fi
|
||||
local str="$in_str"
|
||||
|
||||
# URLs must use UTF-8 encoding; convert str to UTF-8 if required
|
||||
local encoding=$langinfo[CODESET]
|
||||
local safe_encodings
|
||||
safe_encodings=(UTF-8 utf8 US-ASCII)
|
||||
if [[ -z ${safe_encodings[(r)$encoding]} ]]; then
|
||||
str=$(echo -E "$str" | iconv -f $encoding -t UTF-8)
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Error converting string from $encoding to UTF-8" >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use LC_CTYPE=C to process text byte-by-byte
|
||||
local i byte ord LC_ALL=C
|
||||
export LC_ALL
|
||||
local reserved=';/?:@&=+$,'
|
||||
local mark='_.!~*''()-'
|
||||
local dont_escape="[A-Za-z0-9"
|
||||
if [[ -z $opts[(r)-r] ]]; then
|
||||
dont_escape+=$reserved
|
||||
fi
|
||||
# $mark must be last because of the "-"
|
||||
if [[ -z $opts[(r)-m] ]]; then
|
||||
dont_escape+=$mark
|
||||
fi
|
||||
dont_escape+="]"
|
||||
|
||||
# Implemented to use a single printf call and avoid subshells in the loop,
|
||||
# for performance (primarily on Windows).
|
||||
local url_str=""
|
||||
for (( i = 1; i <= ${#str}; ++i )); do
|
||||
byte="$str[i]"
|
||||
if [[ "$byte" =~ "$dont_escape" ]]; then
|
||||
url_str+="$byte"
|
||||
else
|
||||
if [[ "$byte" == " " && -n $spaces_as_plus ]]; then
|
||||
url_str+="+"
|
||||
else
|
||||
ord=$(( [##16] #byte ))
|
||||
url_str+="%$ord"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
echo -E "$url_str"
|
||||
}
|
||||
|
||||
# URL-decode a string
|
||||
#
|
||||
# Decodes a RFC 2396 URL-encoded (%-escaped) string.
|
||||
# This decodes the '+' and '%' escapes in the input string, and leaves
|
||||
# other characters unchanged. Does not enforce that the input is a
|
||||
# valid URL-encoded string. This is a convenience to allow callers to
|
||||
# pass in a full URL or similar strings and decode them for human
|
||||
# presentation.
|
||||
#
|
||||
# Outputs the encoded string on stdout.
|
||||
# Returns nonzero if encoding failed.
|
||||
#
|
||||
# Usage:
|
||||
# omz_urldecode <urlstring> - prints decoded string followed by a newline
|
||||
function omz_urldecode {
|
||||
emulate -L zsh
|
||||
local encoded_url=$1
|
||||
|
||||
# Work bytewise, since URLs escape UTF-8 octets
|
||||
local caller_encoding=$langinfo[CODESET]
|
||||
local LC_ALL=C
|
||||
export LC_ALL
|
||||
|
||||
# Change + back to ' '
|
||||
local tmp=${encoded_url:gs/+/ /}
|
||||
# Protect other escapes to pass through the printf unchanged
|
||||
tmp=${tmp:gs/\\/\\\\/}
|
||||
# Handle %-escapes by turning them into `\xXX` printf escapes
|
||||
tmp=${tmp:gs/%/\\x/}
|
||||
local decoded
|
||||
eval "decoded=\$'$tmp'"
|
||||
|
||||
# Now we have a UTF-8 encoded string in the variable. We need to re-encode
|
||||
# it if caller is in a non-UTF-8 locale.
|
||||
local safe_encodings
|
||||
safe_encodings=(UTF-8 utf8 US-ASCII)
|
||||
if [[ -z ${safe_encodings[(r)$caller_encoding]} ]]; then
|
||||
decoded=$(echo -E "$decoded" | iconv -f UTF-8 -t $caller_encoding)
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Error converting string from UTF-8 to $caller_encoding" >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo -E "$decoded"
|
||||
}
|
||||
|
||||
##################################
|
||||
# PSK Functions
|
||||
##################################
|
||||
# PSK List directories only
|
||||
lsd() {
|
||||
l | grep -E "^d"
|
||||
}
|
||||
|
||||
# ls grep
|
||||
lsg() {
|
||||
l | grep -iE "$1"
|
||||
}
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind-all() {
|
||||
# fd -H -t f . -x grep --color=always -Hi ${1}
|
||||
# Gah. Bye-bye gfind, here's an off-the-shelf improvement upon it https://github.com/burntsushi/ripgrep
|
||||
# $1 is search term, $2 is path
|
||||
# rg --no-ignore --hidden "$@"
|
||||
# even better is ag / silver searcher https://github.com/ggreer/the_silver_searcher
|
||||
ag -a --pager less "$@"
|
||||
}
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind() {
|
||||
# fd -t f . -x grep --color=always -Hi ${1}
|
||||
ag --pager less "$@"
|
||||
}
|
||||
|
||||
# Print out the matches only
|
||||
function gfindf() {
|
||||
# TODO make this a lot less shit e.g. don't search .git . Surely rg has
|
||||
# the ability to do this.
|
||||
find . -type f -exec grep --color=always -Hil $1 {} \;
|
||||
}
|
||||
|
||||
function heroic-repo-configure() {
|
||||
cp ${HOME}/src/spotify/prism-tools/heroic-test.yml ./heroic-guc.yml
|
||||
cp ${HOME}/src/spotify/prism-tools/heroic-api-gae.yml ./heroic-gae.yml
|
||||
ls -l | grep -E 'heroic.*yml|heroic.*yaml'
|
||||
mkdir logs
|
||||
}
|
||||
|
||||
# function h() {
|
||||
# NUM_LINES = ${1:-1000}
|
||||
# history | tail -n $NUM_LINES
|
||||
# }
|
||||
|
||||
# function h() {
|
||||
# set -x
|
||||
# NUM_LINES = ${1:-25}
|
||||
# \history -${NUM_LINES}
|
||||
# }
|
||||
|
||||
function agl() {
|
||||
ag --pager less "$@"
|
||||
}
|
||||
|
||||
function kill-em-all() {
|
||||
NAME=$1
|
||||
|
||||
echo "Attempting to kill $NAME by arg match..."
|
||||
pkill -fli $1
|
||||
echo "Attempting to kill $NAME by binary match..."
|
||||
pkill -li $1
|
||||
echo "...the killing... is done"
|
||||
}
|
||||
|
||||
function dateline() {
|
||||
echo "––––––––––––"
|
||||
date
|
||||
echo "––––––––––––"
|
||||
}
|
||||
|
||||
function clean-slate() {
|
||||
clear
|
||||
dateline
|
||||
}
|
||||
|
||||
alias clr=clean-slate
|
||||
alias cls=clean-slate
|
||||
|
||||
function psgr() {
|
||||
ps auwwwwx | grep -v 'grep ' | grep -E "%CPU|$1"
|
||||
}
|
||||
|
||||
function edit() {
|
||||
/Applications/Visual\ Studio\ Code.app/Contents/Resources/app/bin/code $1
|
||||
}
|
||||
|
||||
function zshrc() {
|
||||
pushd ~/.oh-my-zsh
|
||||
edit .
|
||||
popd
|
||||
}
|
||||
|
||||
function dir-sizes() {
|
||||
du -sh ./* | sort -h
|
||||
}
|
||||
|
||||
|
||||
function ssh-ds718() {
|
||||
ssh -p 658 pskadmin@192.168.2.7
|
||||
}
|
||||
|
||||
alias git-stash-list-all='gitk `git stash list --pretty=format:%gd`'
|
||||
|
||||
function master-show-protection() {
|
||||
git branch -vv | grep "origin/`git branch --show-current`"
|
||||
}
|
||||
|
||||
function git-show-branch() {
|
||||
git branch -vv | grep `git branch --show-current`
|
||||
}
|
||||
|
||||
function git-show-all-stashes() {
|
||||
echo "Hit 'q' to go to next file"
|
||||
echo ""
|
||||
git stash list | awk -F: '{ print "\n\n\n\n"; print $0; print "\n\n"; system("git stash show -p " $1); }'
|
||||
}
|
||||
|
||||
# kill most recent container instance
|
||||
alias docker-kill-latest='docker ps -l --format='{{.Names}}' | xargs docker kill'
|
||||
|
||||
# stop all containers
|
||||
docker-stop-all-containers () {
|
||||
docker container stop -t 2 $(docker container ls -q) 2>/dev/null
|
||||
}
|
||||
|
||||
function find-gig-files() {
|
||||
find . -size +1G -ls | sort -k7n # Find files larger than 1GB and then order the list by the file size
|
||||
}
|
||||
|
||||
function _start-cloud-storage() {
|
||||
bgnotify "Booting cloud sync apps..."
|
||||
cd /Applications
|
||||
open Dropbox.app 2>/dev/null &
|
||||
open Google\ Drive.app 2>/dev/null &
|
||||
# Don't do this cos it downloads my backed up photos
|
||||
# open "Google Drive File Stream.app" 2>/dev/null &
|
||||
cd -
|
||||
}
|
||||
|
||||
function start-cloud-storage() {
|
||||
(
|
||||
bgnotify "Waiting for local unison sync..."
|
||||
/Users/peter/dotfiles_psk/bin/unison-cron-job.sh
|
||||
sleep 7
|
||||
_start-cloud-storage
|
||||
) &
|
||||
}
|
||||
|
||||
# Out of action - needs work
|
||||
# function tree() {
|
||||
# DIR=$1 ;
|
||||
# shift # pops $1 off
|
||||
# /usr/local/homebrew/bin/tree -a $DIR | colorize_less "$@"
|
||||
# }
|
||||
|
||||
function _open-all-chrome-apps() {
|
||||
for APP in "${1}"/*.app; do
|
||||
echo "Opening $APP ..."
|
||||
nohup open -a "$APP" &
|
||||
done
|
||||
}
|
||||
|
||||
function open-all-chrome-apps() {
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome Apps/Chrome Apps.localized/'
|
||||
_open-all-chrome-apps "$CHROME_APP_DIR"
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome/Chrome Apps/Chrome Apps.localized/'
|
||||
_open-all-chrome-apps "$CHROME_APP_DIR"
|
||||
}
|
||||
|
||||
function post-boot-tasks() {
|
||||
open-all-chrome-apps
|
||||
docker-stop-all
|
||||
}
|
||||
|
||||
function kill-cloud-storage() {
|
||||
# TODO investigate pkill as alternative
|
||||
|
||||
# Don't do this cos it downloads my backed up photos
|
||||
# killall "Google Drive File Stream" 2>/dev/null &
|
||||
killall Dropbox 2>/dev/null &
|
||||
killall "Google Drive" 2>/dev/null &
|
||||
killall -v "FinderSyncExtension" -SIGKILL &
|
||||
}
|
||||
|
||||
# For photos, pictures, DS718
|
||||
function organise-into-dated-dirs() {
|
||||
if [ $# -eq 0 ]
|
||||
then
|
||||
echo "Please supply an extension e.g. mov or mp4"
|
||||
return
|
||||
fi
|
||||
|
||||
for x in *.${1}; do
|
||||
d=$(date -r "$x" +%Y-%m-%d)
|
||||
mkdir -p "$d"
|
||||
mv -- "$x" "$d/"
|
||||
done
|
||||
}
|
||||
|
||||
### peco functions ###
|
||||
function peco-directories() {
|
||||
local current_lbuffer="$LBUFFER"
|
||||
local current_rbuffer="$RBUFFER"
|
||||
if command -v fd >/dev/null 2>&1; then
|
||||
local dir="$(command \fd --type directory --hidden --no-ignore --exclude .git/ --color never 2>/dev/null | peco )"
|
||||
else
|
||||
local dir="$(
|
||||
command find \( -path '*/\.*' -o -fstype dev -o -fstype proc \) -type d -print 2>/dev/null \
|
||||
| sed 1d \
|
||||
| cut -b3- \
|
||||
| awk '{a[length($0)" "NR]=$0}END{PROCINFO["sorted_in"]="@ind_num_asc"; for(i in a) print a[i]}' - \
|
||||
| peco
|
||||
)"
|
||||
fi
|
||||
|
||||
if [ -n "$dir" ]; then
|
||||
dir=$(echo "$dir" | tr -d '\n')
|
||||
dir=$(printf %q "$dir")
|
||||
# echo "PSK ${dir}"
|
||||
|
||||
BUFFER="${current_lbuffer}${file}${current_rbuffer}"
|
||||
CURSOR=$#BUFFER
|
||||
fi
|
||||
}
|
||||
|
||||
function peco-files() {
|
||||
local current_lbuffer="$LBUFFER"
|
||||
local current_rbuffer="$RBUFFER"
|
||||
if command -v fd >/dev/null 2>&1; then
|
||||
local file="$(command \fd --type file --hidden --no-ignore --exclude .git/ --color never 2>/dev/null | peco)"
|
||||
elif command -v rg >/dev/null 2>&1; then
|
||||
local file="$(rg --glob "" --files --hidden --no-ignore-vcs --iglob !.git/ --color never 2>/dev/null | peco)"
|
||||
elif command -v ag >/dev/null 2>&1; then
|
||||
local file="$(ag --files-with-matches --unrestricted --skip-vcs-ignores --ignore .git/ --nocolor -g "" 2>/dev/null | peco)"
|
||||
else
|
||||
local file="$(
|
||||
command find \( -path '*/\.*' -o -fstype dev -o -fstype proc \) -type f -print 2> /dev/null \
|
||||
| sed 1d \
|
||||
| cut -b3- \
|
||||
| awk '{a[length($0)" "NR]=$0}END{PROCINFO["sorted_in"]="@ind_num_asc"; for(i in a) print a[i]}' - \
|
||||
| peco
|
||||
)"
|
||||
fi
|
||||
|
||||
if [ -n "$file" ]; then
|
||||
file=$(echo "$file" | tr -d '\n')
|
||||
file=$(printf %q "$file")
|
||||
BUFFER="${current_lbuffer}${file}${current_rbuffer}"
|
||||
CURSOR=$#BUFFER
|
||||
fi
|
||||
}
|
||||
|
||||
zle -N peco-directories
|
||||
bindkey '^Xf' peco-directories
|
||||
zle -N peco-files
|
||||
bindkey '^X^f' peco-files
|
||||
|
||||
### peco functions ###
|
||||
1
.hyper.js
Symbolic link
1
.hyper.js
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
~/Library/Application Support/Hyper/.hyper.js
|
||||
1
.p10k.zsh
Symbolic link
1
.p10k.zsh
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
/Users/peter/.p10k.zsh
|
||||
1641
.p10k.zsh.bak
Normal file
1641
.p10k.zsh.bak
Normal file
File diff suppressed because it is too large
Load diff
18
.tmux.conf
18
.tmux.conf
|
|
@ -45,6 +45,9 @@ set -g renumber-windows on
|
|||
# soften status bar color from harsh green to light gray
|
||||
set -g status-style bg='#262626',fg='#a1cfd1'
|
||||
|
||||
# set status bg to grey from https://protechnotes.com/comprehensive-tmux-tutorial-for-beginners-with-a-cheat-sheet/
|
||||
set -g status-bg colour236
|
||||
|
||||
# remove administrative debris (session name, hostname, time) in status bar
|
||||
#set -g status-left ''
|
||||
#set -g status-right ''
|
||||
|
|
@ -178,14 +181,13 @@ set -g @plugin 'tmux-plugins/tmux-open'
|
|||
set -g status-position bottom
|
||||
set -g status-justify left
|
||||
# Status line right side - 31-Oct 13:37
|
||||
set -g status-right "#[fg=cyan]%d %b %R"
|
||||
set -g status-right-length 50
|
||||
#set -g status-right "#[fg=cyan]%d %b %R"
|
||||
|
||||
setw -g window-status-current-style 'fg=colour1 bg=colour19 bold'
|
||||
setw -g window-status-current-format ' #I#[fg=colour249]:#[fg=colour255]#W#[fg=colour249]#F '
|
||||
|
||||
setw -g window-status-style 'fg=colour9 bg=colour18'
|
||||
setw -g window-status-format ' #I#[fg=colour237]:#[fg=colour250]#W#[fg=colour244]#F '
|
||||
#setw -g window-status-style 'fg=colour9 bg=colour18'
|
||||
#setw -g window-status-format ' #I#[fg=colour237]:#[fg=colour250]#W#[fg=colour244]#F '
|
||||
|
||||
# setw -g window-status-bell-style 'fg=colour255 bg=colour1 bold'
|
||||
|
||||
|
|
@ -197,7 +199,11 @@ set -g status-left "#{prefix_highlight} #[fg=green]Session: #S #[fg=yellow]#I #[
|
|||
set -g status-interval 5
|
||||
|
||||
# messages
|
||||
# set -g message-style 'fg=colour232 bg=colour16 bold'
|
||||
#set -g message-style 'fg=colour232 bg=colour16 bold'
|
||||
|
||||
# https://github.com/jonmosco/kube-tmux.git
|
||||
# set -g status-right-length 80 # long enough for our long cluster names
|
||||
# set -g status-right "#(/bin/bash $HOME/.tmux/kube-tmux/kube.tmux 250 red cyan)"
|
||||
|
||||
##########################
|
||||
# PLUGINS
|
||||
|
|
@ -205,6 +211,8 @@ set -g status-interval 5
|
|||
# List of plugins
|
||||
set -g @plugin 'tmux-plugins/tpm'
|
||||
set -g @plugin 'tmux-plugins/tmux-sensible'
|
||||
set -g @plugin 'jimeh/tmux-themepack'
|
||||
set -g @themepack 'powerline/block/blue'
|
||||
|
||||
# Other examples:
|
||||
# set -g @plugin 'github_username/plugin_name'
|
||||
|
|
|
|||
1
.zsh_favlist
Symbolic link
1
.zsh_favlist
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
/Users/peter/.zsh_favlist
|
||||
1
IntelliJ-Shortcuts.md
Symbolic link
1
IntelliJ-Shortcuts.md
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../IntelliJ-Shortcuts.md
|
||||
37
Path update for arm64 homebrew.sh
Normal file
37
Path update for arm64 homebrew.sh
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
# Path update for arm64 homebrew
|
||||
/opt/homebrew/bin:
|
||||
/Users/peter/perl5/bin:
|
||||
/Users/peter/.jenv/bin:
|
||||
/usr/local/homebrew/opt/python@3.9/bin:
|
||||
/Users/peter/.autojump/bin:
|
||||
/Users/peter/perl5/bin:
|
||||
/Users/peter/.jenv/bin:
|
||||
/usr/local/go1.17.1.darwin-amd64/bin:
|
||||
/opt/homebrew/bin/:
|
||||
/opt/local/bin:
|
||||
/opt/local/sbin:
|
||||
/Users/peter/.jenv/shims:
|
||||
/usr/local/bin:
|
||||
/usr/bin:
|
||||
/bin:
|
||||
/usr/sbin:
|
||||
/sbin:
|
||||
/usr/local/go/bin:
|
||||
/opt/puppetlabs/bin:
|
||||
/Users/peter/perl5/bin:
|
||||
/Users/peter/.jenv/bin:
|
||||
/Users/peter/bin:
|
||||
/usr/libexec/:
|
||||
/Users/peter/.krew/bin:
|
||||
/usr/local/homebrew/opt/fzf/bin:
|
||||
/Users/peter/.kube/plugins/jordanwilson230:
|
||||
/Users/peter/bin/leetcode-cli:
|
||||
/Users/peter/bin:
|
||||
/opt/puppetlabs/bin:
|
||||
|
||||
# Removed the following amd64 homebrew paths:
|
||||
/usr/local/homebrew/bin:
|
||||
/usr/local/homebrew/opt/fzf/bin:
|
||||
/usr/local/homebrew/opt/python@3.9/bin:
|
||||
/usr/local/homebrew/opt/ruby/bin:
|
||||
/usr/local/homebrew/sbin:
|
||||
34
docker-util.sh
Normal file
34
docker-util.sh
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
###
|
||||
### DOCKER UTIL
|
||||
###
|
||||
|
||||
alias dk="docker"
|
||||
alias docker-killall="killall com.docker.hyperkit"
|
||||
alias killall-docker="killall com.docker.hyperkit"
|
||||
|
||||
alias dps="docker ps"
|
||||
alias dl="docker logs "
|
||||
alias dlf="docker logs -f "
|
||||
|
||||
# stop all containers
|
||||
function docker-stop-all-containers () {
|
||||
echo "Stopping all containers..." ; docker container stop -t 2 $(docker container ls -q) 2>/dev/null ; echo ""
|
||||
}
|
||||
|
||||
function docker-lsg () {
|
||||
docker image ls | grep -Ei "'IMAGE ID'|$1"
|
||||
}
|
||||
|
||||
# kill most recent container instance
|
||||
alias docker-kill-latest='docker ps -l --format="{{.Names}}" | xargs docker kill'
|
||||
alias kill-docker-latest=docker-kill-latest
|
||||
|
||||
# Params: container ID or name
|
||||
function dlc() {
|
||||
docker logs $1 | pbcopy
|
||||
}
|
||||
|
||||
# Params: container ID/name, human date or relative period
|
||||
function dls() {
|
||||
docker logs $1 --since $2
|
||||
}
|
||||
|
|
@ -1,16 +1,3 @@
|
|||
alias ts-resolve-host="tsunami variables resolve --unit-type host "
|
||||
|
||||
function ts-variables-show() {
|
||||
tsunami variables show $1 $2
|
||||
}
|
||||
function ts-variables-show-version() {
|
||||
tsunami variables history $1 $2
|
||||
}
|
||||
|
||||
alias tsunami-resolve-host="ts-resolve-host"
|
||||
alias tsunami-variables-show="ts-variables-show"
|
||||
alias tsunami-variables-show-version="ts-variables-show-version"
|
||||
|
||||
# to avoid slow shells, we do it manually
|
||||
function kubectl() {
|
||||
if ! type __start_kubectl >/dev/null 2>&1; then
|
||||
|
|
@ -19,8 +6,9 @@ function kubectl() {
|
|||
|
||||
command kubectl "$@"
|
||||
}
|
||||
|
||||
function kube-ctx-show() {
|
||||
echo "`kube ctx -c` • `kc ns -c`"
|
||||
echo "`kubectl ctx -c` • `kubectl ns -c`"
|
||||
}
|
||||
|
||||
alias show-kube-ctx="kube-ctx-show"
|
||||
|
|
@ -32,12 +20,11 @@ function kube-list-local-contexts() {
|
|||
|
||||
alias kc-list-local-contexts="kube-list-local-contexts"
|
||||
|
||||
function kube-list-prod-contexts() {
|
||||
gcloud container clusters list --project=gke-xpn-1 --filter="resourceLabels[env]=production" --format="value(name)"
|
||||
}
|
||||
|
||||
alias kc-list-prod-contexts="kube-list-prod-contexts"
|
||||
|
||||
alias kc="kubectl"
|
||||
alias k="kubectl"
|
||||
alias mk="minikube"
|
||||
alias kube-list-contexts="kubectl config get-contexts"
|
||||
|
||||
alias kc-site="kubectl-site"
|
||||
|
|
@ -61,6 +61,14 @@ function take() {
|
|||
fi
|
||||
}
|
||||
|
||||
alias gdi="git diff --cached "
|
||||
alias gdc="git diff --cached "
|
||||
|
||||
# Params: branch A and branch B to be diffed
|
||||
function gdb() {
|
||||
git diff $1..$2
|
||||
}
|
||||
|
||||
#
|
||||
# Get the value of an alias.
|
||||
#
|
||||
|
|
@ -264,6 +272,38 @@ lsg() {
|
|||
l | grep -iE "$1"
|
||||
}
|
||||
|
||||
function alg() {
|
||||
FN=/tmp/alg.$$
|
||||
echo -e "\nAliases ———————" > $FN
|
||||
alias | grep -i $1 >> $FN
|
||||
echo -e "\nFunctions ———————" >> $FN
|
||||
functions | grep -i $1 >> $FN
|
||||
bat $FN
|
||||
rm -f $FN
|
||||
}
|
||||
|
||||
alias agr="alg"
|
||||
alias alias-grep="alg"
|
||||
|
||||
# These need to be here since they're required by gfind*
|
||||
alias ag="/usr/local/homebrew/bin/ag --ignore '*.svg' --ignore '*.xlt' --ignore '*.tsx' --ignore '*.js' --ignore '*.snap' --ignore '*.json' --ignore '*.dat' --ignore '*.builds' --ignore '*.tsv' --ignore '*.csv' --ignore '*.lock' --ignore '*.patch' --ignore '*.sum' --pager=bat"
|
||||
alias ag-no-pager="/usr/local/homebrew/bin/ag --ignore '*.svg' --ignore '*.xlt' --ignore '*.tsx' --ignore '*.js' --ignore '*.snap' --ignore '*.json' --ignore '*.dat' --ignore '*.builds' --ignore '*.tsv' --ignore '*.csv' --ignore '*.lock' --ignore '*.patch' --ignore '*.sum'"
|
||||
alias "git-grep"="git \grep"
|
||||
|
||||
function make-break() {
|
||||
echo -e "—————————————————————————————————————————— \
|
||||
\n\n——————————————————————————————————————————\n"
|
||||
}
|
||||
|
||||
# Spits out a page of alternating white lines (hypens or thereabouts)
|
||||
function page-break() {
|
||||
for i in {1..9}; do;
|
||||
make-break
|
||||
done
|
||||
}
|
||||
|
||||
alias make-big-break=page-break
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind-all() {
|
||||
# fd -H -t f . -x grep --color=always -Hi ${1}
|
||||
|
|
@ -271,27 +311,18 @@ function gfind-all() {
|
|||
# $1 is search term, $2 is path
|
||||
# rg --no-ignore --hidden "$@"
|
||||
# even better is ag / silver searcher https://github.com/ggreer/the_silver_searcher
|
||||
ag -a --pager less "$@"
|
||||
ag-no-pager --ignore-case -a --pager bat "$@"
|
||||
}
|
||||
|
||||
# the ol' gfind. Doesn't take a file pattern.
|
||||
function gfind() {
|
||||
# fd -t f . -x grep --color=always -Hi ${1}
|
||||
ag --pager less "$@"
|
||||
ag-no-pager --ignore-case --pager bat "$@"
|
||||
}
|
||||
|
||||
# Print out the matches only
|
||||
function gfindf() {
|
||||
# TODO make this a lot less shit e.g. don't search .git . Surely rg has
|
||||
# the ability to do this.
|
||||
find . -type f -exec grep --color=always -Hil $1 {} \;
|
||||
}
|
||||
|
||||
function heroic-repo-configure() {
|
||||
cp ${HOME}/src/spotify/prism-tools/heroic-test.yml ./heroic-guc.yml
|
||||
cp ${HOME}/src/spotify/prism-tools/heroic-api-gae.yml ./heroic-gae.yml
|
||||
ls -l | grep -E 'heroic.*yml|heroic.*yaml'
|
||||
mkdir logs
|
||||
ack -l $1 --pager=bat --color
|
||||
}
|
||||
|
||||
# function h() {
|
||||
|
|
@ -309,6 +340,10 @@ function agl() {
|
|||
ag --pager less "$@"
|
||||
}
|
||||
|
||||
function lsofgr() {
|
||||
sudo lsof -i -P | grep -E "$1|LISTEN" | grep -E "$1|:"
|
||||
}
|
||||
|
||||
function kill-em-all() {
|
||||
NAME=$1
|
||||
|
||||
|
|
@ -320,9 +355,9 @@ function kill-em-all() {
|
|||
}
|
||||
|
||||
function dateline() {
|
||||
echo "––––––––––––"
|
||||
echo -e "\n––––––––––––"
|
||||
date
|
||||
echo "––––––––––––"
|
||||
echo -e "––––––––––––\n"
|
||||
}
|
||||
|
||||
function clean-slate() {
|
||||
|
|
@ -333,10 +368,32 @@ function clean-slate() {
|
|||
alias clr=clean-slate
|
||||
alias cls=clean-slate
|
||||
|
||||
function psgr() {
|
||||
ps auwwwwx | grep -v 'grep ' | grep -E "%CPU|$1"
|
||||
function h() {
|
||||
NUM_LINES=$1
|
||||
if [ -z "$NUM_LINES" ]; then
|
||||
NUM_LINES=35
|
||||
fi
|
||||
\history -$NUM_LINES
|
||||
}
|
||||
|
||||
function psgr() {
|
||||
ps -e | grep -v 'grep ' | grep -iE "TIME CMD|$1"
|
||||
}
|
||||
|
||||
# Sort on the command
|
||||
function psgr-sorted() {
|
||||
echo " PID TTY TIME CMD"
|
||||
ps -e | grep -v 'grep ' | grep -iE "$1" | sort -k 4
|
||||
}
|
||||
|
||||
function lsofgr-listen() {
|
||||
echo "Searching for processes listening on port $1..."
|
||||
#echo "ℹ️ lsof can take up to 2 minutes to complete"
|
||||
# --stdin Write the prompt to the standard error and read the password from the standard input instead of using the terminal device.
|
||||
sudo --stdin < <(echo "11anfair") lsof -i -P | grep -E "COMMAND|.*:$1.*LISTEN"
|
||||
}
|
||||
alias port-grep=lsofgr
|
||||
|
||||
function edit() {
|
||||
/Applications/Visual\ Studio\ Code.app/Contents/Resources/app/bin/code $1
|
||||
}
|
||||
|
|
@ -351,6 +408,12 @@ function dir-sizes() {
|
|||
du -sh ./* | sort -h
|
||||
}
|
||||
|
||||
# Call from within the source TLD
|
||||
function download-sources-intellij() {
|
||||
mvn dependency:sources
|
||||
mvn dependency:resolve -Dclassifier=javadoc
|
||||
}
|
||||
|
||||
|
||||
function ssh-ds718() {
|
||||
ssh -p 658 pskadmin@192.168.2.7
|
||||
|
|
@ -358,28 +421,12 @@ function ssh-ds718() {
|
|||
|
||||
alias git-stash-list-all='gitk `git stash list --pretty=format:%gd`'
|
||||
|
||||
function master-show-protection() {
|
||||
git branch -vv | grep "origin/`git branch --show-current`"
|
||||
}
|
||||
|
||||
function git-show-branch() {
|
||||
git branch -vv | grep `git branch --show-current`
|
||||
}
|
||||
|
||||
function git-show-all-stashes() {
|
||||
echo "Hit 'q' to go to next file"
|
||||
echo ""
|
||||
git stash list | awk -F: '{ print "\n\n\n\n"; print $0; print "\n\n"; system("git stash show -p " $1); }'
|
||||
}
|
||||
|
||||
# kill most recent container instance
|
||||
alias docker-kill-latest='docker ps -l --format='{{.Names}}' | xargs docker kill'
|
||||
|
||||
# stop all containers
|
||||
docker-stop-all-containers () {
|
||||
docker container stop -t 2 $(docker container ls -q) 2>/dev/null
|
||||
}
|
||||
|
||||
function find-gig-files() {
|
||||
find . -size +1G -ls | sort -k7n # Find files larger than 1GB and then order the list by the file size
|
||||
}
|
||||
|
|
@ -403,25 +450,63 @@ function start-cloud-storage() {
|
|||
) &
|
||||
}
|
||||
|
||||
# Out of action - needs work
|
||||
# function tree() {
|
||||
# DIR=$1 ;
|
||||
# shift # kubectl create -f hello-k8s-replicaset.yaml
|
||||
# ps $1 off
|
||||
# /usr/local/homebrew/bin/tree -a $DIR | colorize_less "$@"
|
||||
# }
|
||||
|
||||
function tree() {
|
||||
DIR=$1 ;
|
||||
shift # pops $1 off
|
||||
/usr/local/homebrew/bin/tree -a $DIR | colorize_less "$@"
|
||||
function space() {
|
||||
echo;echo;echo;echo;echo;
|
||||
}
|
||||
|
||||
alias s="space"
|
||||
|
||||
function open-job-docs() {
|
||||
open 'https://docs.google.com/document/d/1O81om1F14fNhWhqt5VpIULfiCHmNXPkFcMoED09cidU/edit'
|
||||
open 'https://docs.google.com/document/d/1pBJfqcWhn9Wz6p6wPpPrk6_9MdGG_24qmpluz4pM3AY/edit'
|
||||
open 'https://docs.google.com/document/d/1nj_MidYJEDhk1uzhPFOZ6uFdXfZY2hdrV0_f8zJ4Lgs/edit'
|
||||
open 'https://docs.google.com/document/d/1gPNcLjrZJnJnWy0-k5SqpgP4VAUZ_ikRLR9qYEB50M0/edit'
|
||||
}
|
||||
|
||||
goclean() {
|
||||
local pkg=$1; shift || return 1
|
||||
local ost
|
||||
local cnt
|
||||
local scr
|
||||
|
||||
# Clean removes object files from package source directories (ignore error)
|
||||
go clean -i $pkg &>/dev/null
|
||||
|
||||
# Set local variables
|
||||
[[ "$(uname -m)" == "x86_64" ]] \
|
||||
&& ost="$(uname)";ost="${ost,,}_amd64" \
|
||||
&& cnt="${pkg//[^\/]}"
|
||||
|
||||
# Delete the source directory and compiled package directory(ies)
|
||||
if (("${#cnt}" == "2")); then
|
||||
rm -rf "${GOPATH%%:*}/src/${pkg%/*}"
|
||||
rm -rf "${GOPATH%%:*}/pkg/${ost}/${pkg%/*}"
|
||||
elif (("${#cnt}" > "2")); then
|
||||
rm -rf "${GOPATH%%:*}/src/${pkg%/*/*}"
|
||||
rm -rf "${GOPATH%%:*}/pkg/${ost}/${pkg%/*/*}"
|
||||
fi
|
||||
}
|
||||
|
||||
function _open-all-chrome-apps() {
|
||||
for APP in "${1}"/*.app; do
|
||||
echo "Opening $APP ..."
|
||||
nohup open -a "$APP" &
|
||||
nohup open -a "$APP" > /dev/null 2>&1 &
|
||||
done
|
||||
}
|
||||
|
||||
function open-all-chrome-apps() {
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome Apps/Chrome Apps.localized/'
|
||||
_open-all-chrome-apps "$CHROME_APP_DIR"
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome/Chrome Apps/Chrome Apps.localized/'
|
||||
_open-all-chrome-apps "$CHROME_APP_DIR"
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome Apps/Chrome Apps.localized'
|
||||
_open-all-chrome-apps $CHROME_APP_DIR
|
||||
CHROME_APP_DIR='/Users/peter/Dropbox (Personal)/_Settings/Chrome/Chrome Apps/Chrome Apps.localized'
|
||||
_open-all-chrome-apps $CHROME_APP_DIR
|
||||
}
|
||||
|
||||
function post-boot-tasks() {
|
||||
|
|
@ -439,21 +524,18 @@ function kill-cloud-storage() {
|
|||
killall -v "FinderSyncExtension" -SIGKILL &
|
||||
}
|
||||
|
||||
# For photos, pictures, DS718
|
||||
function organise-into-dated-dirs() {
|
||||
if [ $# -eq 0 ]
|
||||
then
|
||||
echo "Please supply an extension e.g. mov or mp4"
|
||||
return
|
||||
fi
|
||||
function explain-command {
|
||||
command="https://explainshell.com/explain?cmd=${1}"
|
||||
osascript <<EOD
|
||||
tell application "Safari" to make new document with properties {URL:"$command"}
|
||||
return
|
||||
EOD
|
||||
|
||||
for x in *.${1}; do
|
||||
d=$(date -r "$x" +%Y-%m-%d)
|
||||
mkdir -p "$d"
|
||||
mv -- "$x" "$d/"
|
||||
done
|
||||
}
|
||||
|
||||
alias explainer="explain-command"
|
||||
alias explain-args="explain-command"
|
||||
|
||||
### peco functions ###
|
||||
function peco-directories() {
|
||||
local current_lbuffer="$LBUFFER"
|
||||
|
|
@ -507,6 +589,9 @@ function peco-files() {
|
|||
fi
|
||||
}
|
||||
|
||||
# Include Rune funcs
|
||||
. $HOME/.oh-my-zsh/rune-shell-funcs.zsh
|
||||
|
||||
zle -N peco-directories
|
||||
bindkey '^Xf' peco-directories
|
||||
zle -N peco-files
|
||||
|
|
|
|||
1
liquidprompt
Symbolic link
1
liquidprompt
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
/Users/peter/src/liquidprompt
|
||||
1
liquidpromptrc
Symbolic link
1
liquidpromptrc
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../.config/liquidpromptrc
|
||||
|
|
@ -15,10 +15,6 @@ fi
|
|||
# Homebrew mentioned this so I'm doing it
|
||||
export SCALA_HOME=/usr/local/homebrew/opt/scala/idea
|
||||
|
||||
# From Helios user guide https://github.com/spotify/helios/blob/master/docs/user_manual.md#using-the-helios-cli
|
||||
alias helios='helios -z http://localhost:5801'
|
||||
|
||||
|
||||
# Initializes Oh My Zsh
|
||||
|
||||
# add a function path
|
||||
|
|
|
|||
3
rune-env-vars.sh
Normal file
3
rune-env-vars.sh
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export TORTILLA_INTEGRATION_LOCALSTACK_ADDRESS=localhost:54566
|
||||
export TORTILLA_INTEGRATION_INFLUXDB_ADDRESS=localhost:58886
|
||||
export TORTILLA_INTEGRATION_REDIS_ADDRESS=localhost:56379
|
||||
71
rune-shell-funcs.zsh
Normal file
71
rune-shell-funcs.zsh
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
##############################
|
||||
### Rune Labs Shell Functions
|
||||
##############################
|
||||
|
||||
alias output-blank-lines='echo "\n\n\n\n\n\n\n\n\n\n"'
|
||||
|
||||
alias mti="output-blank-lines && make test-integration"
|
||||
alias mtil="output-blank-lines && make test-integration-local"
|
||||
alias mtic="output-blank-lines && make test-integration 2>&1 | tee ./make-test-integration.log && code ./make-test-integration.log"
|
||||
alias mtilc="output-blank-lines && make test-integration-local 2>&1 | tee ./make-test-integration-local.log && code ./make-test-integration-local.log"
|
||||
|
||||
alias kill-tortillas"psgr '/var/folders/bh/' | grep 'server' | grep -v PID |awk '{ print $2 };' | xargs kill -9"
|
||||
|
||||
##########################
|
||||
### Building funcs
|
||||
##########################
|
||||
|
||||
export RUNE_SRC=${HOME}/src/rune
|
||||
export GO_MONO=${RUNE_SRC}/go-mono
|
||||
# Params: the command to execute in each source sub dir
|
||||
iterate-source-dirs() {
|
||||
page-break
|
||||
cd $GO_MONO
|
||||
|
||||
TLD=$(pwd)
|
||||
|
||||
for DIR in gorune carrotstream tortilla; do
|
||||
echo "DIR: $DIR"
|
||||
cd $DIR
|
||||
$*
|
||||
RESULT=$?
|
||||
if [ $RESULT -ne "0" ]; then
|
||||
echo "\n\n$* failed in $DIR, exiting."
|
||||
fi
|
||||
cd $GO_MONO
|
||||
done
|
||||
}
|
||||
|
||||
go-make-mocks() {
|
||||
iterate-source-dirs make mocks
|
||||
}
|
||||
|
||||
go-lint() {
|
||||
iterate-source-dirs golangci-lint run -v --timeout 2m0s ./...
|
||||
}
|
||||
|
||||
go-build() {
|
||||
iterate-source-dirs go build ./...
|
||||
}
|
||||
|
||||
go-make() {
|
||||
iterate-source-dirs make
|
||||
}
|
||||
|
||||
go-test() {
|
||||
iterate-source-dirs go test ./...
|
||||
}
|
||||
|
||||
go-make-clean() {
|
||||
iterate-source-dirs make clean
|
||||
}
|
||||
|
||||
psgr-rune() {
|
||||
psgr 'taco|tortilla|broccoli|influx|carrotstream|artichoke'
|
||||
}
|
||||
|
||||
gitignore-update() {
|
||||
pushd ${HOME}/src/rune/go-mono
|
||||
echo -e "\nprod-query*/" >>.gitignore
|
||||
popd
|
||||
}
|
||||
1
starship.toml
Symbolic link
1
starship.toml
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
/Users/peter/.config/starship.toml
|
||||
1
theme_chooser.sh
Symbolic link
1
theme_chooser.sh
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
/Users/peter/.oh-my-zsh/tools/theme_chooser.sh
|
||||
1
themes/agnoster-psk.zsh-theme
Symbolic link
1
themes/agnoster-psk.zsh-theme
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../custom/themes/agnoster-psk.zsh-theme
|
||||
Loading…
Add table
Add a link
Reference in a new issue