mirror of
https://github.com/ohmyzsh/ohmyzsh.git
synced 2026-01-02 02:19:06 +01:00
Merge remote-tracking branch 'robbyrussell/master' into develop
Conflicts: .gitignore README.textile lib/aliases.zsh lib/functions.zsh lib/git.zsh lib/misc.zsh templates/zshrc.zsh-template tools/install.sh
This commit is contained in:
commit
ced4f4decc
111 changed files with 4273 additions and 378 deletions
77
plugins/archlinux/archlinux.plugin.zsh
Normal file
77
plugins/archlinux/archlinux.plugin.zsh
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
# Archlinux zsh aliases and functions
|
||||
# Usage is also described at https://github.com/robbyrussell/oh-my-zsh/wiki/Plugins
|
||||
|
||||
# Look for yaourt, and add some useful functions if we have it.
|
||||
if [[ -x `which yaourt` ]]; then
|
||||
upgrade () {
|
||||
yaourt -Syu
|
||||
}
|
||||
alias yaconf='yaourt -C' # Fix all configuration files with vimdiff
|
||||
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
|
||||
alias yaupg='yaourt -Syu' # Synchronize with repositories before upgrading packages that are out of date on the local system.
|
||||
alias yasu='yaourt --sucre' # Same as yaupg, but without confirmation
|
||||
alias yain='yaourt -S' # Install specific package(s) from the repositories
|
||||
alias yains='yaourt -U' # Install specific package not from the repositories but from a file
|
||||
alias yare='yaourt -R' # Remove the specified package(s), retaining its configuration(s) and required dependencies
|
||||
alias yarem='yaourt -Rns' # Remove the specified package(s), its configuration(s) and unneeded dependencies
|
||||
alias yarep='yaourt -Si' # Display information about a given package in the repositories
|
||||
alias yareps='yaourt -Ss' # Search for package(s) in the repositories
|
||||
alias yaloc='yaourt -Qi' # Display information about a given package in the local database
|
||||
alias yalocs='yaourt -Qs' # Search for package(s) in the local database
|
||||
# Additional yaourt alias examples
|
||||
if [[ -x `which abs` ]]; then
|
||||
alias yaupd='yaourt -Sy && sudo abs' # Update and refresh the local package and ABS databases against repositories
|
||||
else
|
||||
alias yaupd='yaourt -Sy' # Update and refresh the local package and ABS databases against repositories
|
||||
fi
|
||||
alias yainsd='yaourt -S --asdeps' # Install given package(s) as dependencies of another package
|
||||
alias yamir='yaourt -Syy' # Force refresh of all package lists after updating /etc/pacman.d/mirrorlist
|
||||
else
|
||||
upgrade() {
|
||||
sudo pacman -Syu
|
||||
}
|
||||
fi
|
||||
|
||||
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
|
||||
alias pacupg='sudo pacman -Syu' # Synchronize with repositories before upgrading packages that are out of date on the local system.
|
||||
alias pacin='sudo pacman -S' # Install specific package(s) from the repositories
|
||||
alias pacins='sudo pacman -U' # Install specific package not from the repositories but from a file
|
||||
alias pacre='sudo pacman -R' # Remove the specified package(s), retaining its configuration(s) and required dependencies
|
||||
alias pacrem='sudo pacman -Rns' # Remove the specified package(s), its configuration(s) and unneeded dependencies
|
||||
alias pacrep='pacman -Si' # Display information about a given package in the repositories
|
||||
alias pacreps='pacman -Ss' # Search for package(s) in the repositories
|
||||
alias pacloc='pacman -Qi' # Display information about a given package in the local database
|
||||
alias paclocs='pacman -Qs' # Search for package(s) in the local database
|
||||
# Additional pacman alias examples
|
||||
if [[ -x `which abs` ]]; then
|
||||
alias pacupd='sudo pacman -Sy && sudo abs' # Update and refresh the local package and ABS databases against repositories
|
||||
else
|
||||
alias pacupd='sudo pacman -Sy' # Update and refresh the local package and ABS databases against repositories
|
||||
fi
|
||||
alias pacinsd='sudo pacman -S --asdeps' # Install given package(s) as dependencies of another package
|
||||
alias pacmir='sudo pacman -Syy' # Force refresh of all package lists after updating /etc/pacman.d/mirrorlist
|
||||
|
||||
# https://bbs.archlinux.org/viewtopic.php?id=93683
|
||||
paclist() {
|
||||
sudo pacman -Qei $(pacman -Qu|cut -d" " -f 1)|awk ' BEGIN {FS=":"}/^Name/{printf("\033[1;36m%s\033[1;37m", $2)}/^Description/{print $2}'
|
||||
}
|
||||
|
||||
alias paclsorphans='sudo pacman -Qdt'
|
||||
alias pacrmorphans='sudo pacman -Rs $(pacman -Qtdq)'
|
||||
|
||||
pacdisowned() {
|
||||
tmp=${TMPDIR-/tmp}/pacman-disowned-$UID-$$
|
||||
db=$tmp/db
|
||||
fs=$tmp/fs
|
||||
|
||||
mkdir "$tmp"
|
||||
trap 'rm -rf "$tmp"' EXIT
|
||||
|
||||
pacman -Qlq | sort -u > "$db"
|
||||
|
||||
find /bin /etc /lib /sbin /usr \
|
||||
! -name lost+found \
|
||||
\( -type d -printf '%p/\n' -o -print \) | sort > "$fs"
|
||||
|
||||
comm -23 "$fs" "$db"
|
||||
}
|
||||
|
|
@ -1,3 +1,9 @@
|
|||
if [ -f `brew --prefix`/etc/autojump ]; then
|
||||
. `brew --prefix`/etc/autojump
|
||||
if [ $commands[autojump] ]; then # check if autojump is installed
|
||||
if [ -f /usr/share/autojump/autojump.zsh ]; then # debian and ubuntu package
|
||||
. /usr/share/autojump/autojump.zsh
|
||||
elif [ -f /etc/profile.d/autojump.zsh ]; then # manual installation
|
||||
. /etc/profile.d/autojump.zsh
|
||||
elif [ $commands[brew] -a -f `brew --prefix`/etc/autojump ]; then # mac os x with brew
|
||||
. `brew --prefix`/etc/autojump
|
||||
fi
|
||||
fi
|
||||
|
|
|
|||
20
plugins/battery/battery.plugin.zsh
Normal file
20
plugins/battery/battery.plugin.zsh
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then
|
||||
function battery_pct_remaining() { echo "$(acpi | cut -f2 -d ',' | tr -cd '[:digit:]')" }
|
||||
function battery_time_remaining() { echo $(acpi | cut -f3 -d ',') }
|
||||
function battery_pct_prompt() {
|
||||
b=$(battery_pct_remaining)
|
||||
if [ $b -gt 50 ] ; then
|
||||
color='green'
|
||||
elif [ $b -gt 20 ] ; then
|
||||
color='yellow'
|
||||
else
|
||||
color='red'
|
||||
fi
|
||||
echo "%{$fg[$color]%}[$(battery_pct_remaining)%%]%{$reset_color%}"
|
||||
}
|
||||
else
|
||||
error_msg='no battery'
|
||||
function battery_pct_remaining() { echo $error_msg }
|
||||
function battery_time_remaining() { echo $error_msg }
|
||||
function battery_pct_prompt() { echo '' }
|
||||
fi
|
||||
82
plugins/bundler/_bundler
Normal file
82
plugins/bundler/_bundler
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
#compdef bundle
|
||||
|
||||
local curcontext="$curcontext" state line _gems _opts ret=1
|
||||
|
||||
_arguments -C -A "-v" -A "--version" \
|
||||
'(- 1 *)'{-v,--version}'[display version information]' \
|
||||
'1: :->cmds' \
|
||||
'*:: :->args' && ret=0
|
||||
|
||||
case $state in
|
||||
cmds)
|
||||
_values "bundle command" \
|
||||
"install[Install the gems specified by the Gemfile or Gemfile.lock]" \
|
||||
"update[Update dependencies to their latest versions]" \
|
||||
"package[Package the .gem files required by your application]" \
|
||||
"exec[Execute a script in the context of the current bundle]" \
|
||||
"config[Specify and read configuration options for bundler]" \
|
||||
"check[Determine whether the requirements for your application are installed]" \
|
||||
"list[Show all of the gems in the current bundle]" \
|
||||
"show[Show the source location of a particular gem in the bundle]" \
|
||||
"console[Start an IRB session in the context of the current bundle]" \
|
||||
"open[Open an installed gem in the editor]" \
|
||||
"viz[Generate a visual representation of your dependencies]" \
|
||||
"init[Generate a simple Gemfile, placed in the current directory]" \
|
||||
"gem[Create a simple gem, suitable for development with bundler]" \
|
||||
"help[Describe available tasks or one specific task]"
|
||||
ret=0
|
||||
;;
|
||||
args)
|
||||
case $line[1] in
|
||||
help)
|
||||
_values 'commands' \
|
||||
'install' \
|
||||
'update' \
|
||||
'package' \
|
||||
'exec' \
|
||||
'config' \
|
||||
'check' \
|
||||
'list' \
|
||||
'show' \
|
||||
'console' \
|
||||
'open' \
|
||||
'viz' \
|
||||
'init' \
|
||||
'gem' \
|
||||
'help' && ret=0
|
||||
;;
|
||||
install)
|
||||
_arguments \
|
||||
'(--no-color)--no-color[disable colorization in output]' \
|
||||
'(--local)--local[do not attempt to connect to rubygems.org]' \
|
||||
'(--quiet)--quiet[only output warnings and errors]' \
|
||||
'(--gemfile)--gemfile=-[use the specified gemfile instead of Gemfile]:gemfile' \
|
||||
'(--system)--system[install to the system location]' \
|
||||
'(--deployment)--deployment[install using defaults tuned for deployment environments]' \
|
||||
'(--frozen)--frozen[do not allow the Gemfile.lock to be updated after this install]' \
|
||||
'(--path)--path=-[specify a different path than the system default]:path:_files' \
|
||||
'(--binstubs)--binstubs=-[generate bin stubs for bundled gems to ./bin]:directory:_files' \
|
||||
'(--without)--without=-[exclude gems that are part of the specified named group]:groups'
|
||||
ret=0
|
||||
;;
|
||||
exec)
|
||||
_normal && ret=0
|
||||
;;
|
||||
(open|show)
|
||||
_gems=( $(bundle show 2> /dev/null | sed -e '/^ \*/!d; s/^ \* \([^ ]*\) .*/\1/') )
|
||||
if [[ $_gems != "" ]]; then
|
||||
_values 'gems' $_gems && ret=0
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
_opts=( $(bundle help $line[1] | sed -e '/^ \[-/!d; s/^ \[\(-[^=]*\)=.*/\1/') )
|
||||
_opts+=( $(bundle help $line[1] | sed -e '/^ -/!d; s/^ \(-.\), \[\(-[^=]*\)=.*/\1 \2/') )
|
||||
if [[ $_opts != "" ]]; then
|
||||
_values 'options' $_opts && ret=0
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
|
|
@ -1,3 +1,42 @@
|
|||
alias be="bundle exec"
|
||||
alias bi="bundle install"
|
||||
alias bl="bundle list"
|
||||
alias bp="bundle package"
|
||||
alias bu="bundle update"
|
||||
|
||||
# The following is based on https://github.com/gma/bundler-exec
|
||||
|
||||
bundled_commands=(annotate cap capify cucumber ey foreman guard heroku middleman nanoc rackup rainbows rake rspec ruby shotgun spec spork thin thor unicorn unicorn_rails)
|
||||
|
||||
## Functions
|
||||
|
||||
_bundler-installed() {
|
||||
which bundle > /dev/null 2>&1
|
||||
}
|
||||
|
||||
_within-bundled-project() {
|
||||
local check_dir=$PWD
|
||||
while [ $check_dir != "/" ]; do
|
||||
[ -f "$check_dir/Gemfile" ] && return
|
||||
check_dir="$(dirname $check_dir)"
|
||||
done
|
||||
false
|
||||
}
|
||||
|
||||
_run-with-bundler() {
|
||||
if _bundler-installed && _within-bundled-project; then
|
||||
bundle exec $@
|
||||
else
|
||||
$@
|
||||
fi
|
||||
}
|
||||
|
||||
## Main program
|
||||
for cmd in $bundled_commands; do
|
||||
eval "function bundled_$cmd () { _run-with-bundler $cmd \$@}"
|
||||
alias $cmd=bundled_$cmd
|
||||
|
||||
if which _$cmd > /dev/null 2>&1; then
|
||||
compdef _$cmd bundled_$cmd=$cmd
|
||||
fi
|
||||
done
|
||||
|
|
|
|||
36
plugins/cake/cake.plugin.zsh
Normal file
36
plugins/cake/cake.plugin.zsh
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# Set this to 1 if you want to cache the tasks
|
||||
_cake_cache_task_list=1
|
||||
|
||||
# Cache filename
|
||||
_cake_task_cache_file='.cake_task_cache'
|
||||
|
||||
_cake_get_target_list () {
|
||||
cake | grep '^cake ' | sed -e "s/cake \([^ ]*\) .*/\1/" | grep -v '^$'
|
||||
}
|
||||
|
||||
_cake_does_target_list_need_generating () {
|
||||
|
||||
if [ ${_cake_cache_task_list} -eq 0 ]; then
|
||||
return 1;
|
||||
fi
|
||||
|
||||
if [ ! -f ${_cake_task_cache_file} ]; then return 0;
|
||||
else
|
||||
accurate=$(stat -f%m $_cake_task_cache_file)
|
||||
changed=$(stat -f%m Cakefile)
|
||||
return $(expr $accurate '>=' $changed)
|
||||
fi
|
||||
}
|
||||
|
||||
_cake () {
|
||||
if [ -f Cakefile ]; then
|
||||
if _cake_does_target_list_need_generating; then
|
||||
_cake_get_target_list > ${_cake_task_cache_file}
|
||||
compadd `cat ${_cake_task_cache_file}`
|
||||
else
|
||||
compadd `_cake_get_target_list`
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _cake cake
|
||||
2
plugins/cloudapp/cloudapp.plugin.zsh
Normal file
2
plugins/cloudapp/cloudapp.plugin.zsh
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/zsh
|
||||
alias cloudapp=$ZSH/plugins/cloudapp/cloudapp.rb
|
||||
60
plugins/cloudapp/cloudapp.rb
Executable file
60
plugins/cloudapp/cloudapp.rb
Executable file
|
|
@ -0,0 +1,60 @@
|
|||
#!/usr/bin/env ruby
|
||||
#
|
||||
# cloudapp
|
||||
# Zach Holman / @holman
|
||||
#
|
||||
# Uploads a file from the command line to CloudApp, drops it into your
|
||||
# clipboard (on a Mac, at least).
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# cloudapp drunk-blake.png
|
||||
#
|
||||
# This requires Aaron Russell's cloudapp_api gem:
|
||||
#
|
||||
# gem install cloudapp_api
|
||||
#
|
||||
# Requires you set your CloudApp credentials in ~/.cloudapp as a simple file of:
|
||||
#
|
||||
# email
|
||||
# password
|
||||
|
||||
require 'rubygems'
|
||||
begin
|
||||
require 'cloudapp_api'
|
||||
rescue LoadError
|
||||
puts "You need to install cloudapp_api: gem install cloudapp_api"
|
||||
exit!(1)
|
||||
end
|
||||
|
||||
config_file = "#{ENV['HOME']}/.cloudapp"
|
||||
unless File.exist?(config_file)
|
||||
puts "You need to type your email and password (one per line) into "+
|
||||
"`~/.cloudapp`"
|
||||
exit!(1)
|
||||
end
|
||||
|
||||
email,password = File.read(config_file).split("\n")
|
||||
|
||||
class HTTParty::Response
|
||||
# Apparently HTTPOK.ok? IS NOT OKAY WTFFFFFFFFFFUUUUUUUUUUUUUU
|
||||
# LETS MONKEY PATCH IT I FEEL OKAY ABOUT IT
|
||||
def ok? ; true end
|
||||
end
|
||||
|
||||
if ARGV[0].nil?
|
||||
puts "You need to specify a file to upload."
|
||||
exit!(1)
|
||||
end
|
||||
|
||||
CloudApp.authenticate(email,password)
|
||||
url = CloudApp::Item.create(:upload, {:file => ARGV[0]}).url
|
||||
|
||||
# Say it for good measure.
|
||||
puts "Uploaded to #{url}."
|
||||
|
||||
# Get the embed link.
|
||||
url = "#{url}/#{ARGV[0].split('/').last}"
|
||||
|
||||
# Copy it to your (Mac's) clipboard.
|
||||
`echo '#{url}' | tr -d "\n" | pbcopy`
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
# Aliases
|
||||
alias as="aptitude -F \"* %p -> %d \n(%v/%V)\" \
|
||||
--no-gui --disable-columns search" # search package
|
||||
alias ad="sudo apt-get update" # update packages lists
|
||||
alias au="sudo apt-get update && \
|
||||
sudo apt-get dselect-upgrade" # upgrade packages
|
||||
alias ai="sudo apt-get install" # install package
|
||||
alias ar="sudo apt-get remove --purge && \
|
||||
sudo apt-get autoremove --purge" # remove package
|
||||
alias ap="apt-cache policy" # apt policy
|
||||
alias av="apt-cache show" # show package info
|
||||
alias acs="apt-cache search" # search package
|
||||
alias ac="sudo apt-get clean && sudo apt-get autoclean" # clean apt cache
|
||||
|
|
@ -1,53 +1,175 @@
|
|||
# https://github.com/dbbolton/
|
||||
# Authors:
|
||||
# https://github.com/AlexBio
|
||||
# https://github.com/dbb
|
||||
#
|
||||
# Debian-related zsh aliases and functions for zsh
|
||||
|
||||
# Use aptitude if installed, or apt-get if not.
|
||||
# You can just set apt_pref='apt-get' to override it.
|
||||
if [[ -e $( which aptitude ) ]]; then
|
||||
apt_pref='aptitude'
|
||||
else
|
||||
apt_pref='apt-get'
|
||||
fi
|
||||
|
||||
# Use sudo by default if it's installed
|
||||
if [[ -e $( which sudo ) ]]; then
|
||||
use_sudo=1
|
||||
fi
|
||||
|
||||
# Aliases ###################################################################
|
||||
# These are for more obscure uses of apt-get and aptitude that aren't covered
|
||||
# below.
|
||||
alias ag='apt-get'
|
||||
alias at='aptitude'
|
||||
|
||||
# Some self-explanatory aliases
|
||||
alias afs='apt-file search --regexp'
|
||||
alias acs="apt-cache search"
|
||||
alias aps='aptitude search'
|
||||
alias apsrc='apt-get source'
|
||||
alias apv='apt-cache policy'
|
||||
alias as="aptitude -F \"* %p -> %d \n(%v/%V)\" \
|
||||
--no-gui --disable-columns search" # search package
|
||||
|
||||
alias apdg='su -c "aptitude update && aptitude safe-upgrade"'
|
||||
alias apud='su -c "aptitude update"'
|
||||
alias apug='su -c "aptitude safe-upgrade"'
|
||||
# apt-file
|
||||
alias afs='apt-file search --regexp'
|
||||
|
||||
|
||||
# These are apt-get only
|
||||
alias asrc='apt-get source'
|
||||
alias ap='apt-cache policy'
|
||||
|
||||
# superuser operations ######################################################
|
||||
if [[ $use_sudo -eq 1 ]]; then
|
||||
# commands using sudo #######
|
||||
alias aac="sudo $apt_pref autoclean"
|
||||
alias abd="sudo $apt_pref build-dep"
|
||||
alias ac="sudo $apt_pref clean"
|
||||
alias ad="sudo $apt_pref update"
|
||||
alias adg="sudo $apt_pref update && sudo $apt_pref upgrade"
|
||||
alias adu="sudo $apt_pref update && sudo $apt_pref dist-upgrade"
|
||||
alias afu='sudo apt-file update'
|
||||
alias ag="sudo $apt_pref upgrade"
|
||||
alias ai="sudo $apt_pref install"
|
||||
alias ap="sudo $apt_pref purge"
|
||||
alias ar="sudo $apt_pref remove"
|
||||
|
||||
# apt-get only
|
||||
alias ads="sudo $apt_pref dselect-upgrade"
|
||||
|
||||
# Install all .deb files in the current directory.
|
||||
# Warning: you will need to put the glob in single quotes if you use:
|
||||
# glob_subst
|
||||
alias di='sudo dpkg -i ./*.deb'
|
||||
|
||||
# Remove ALL kernel images and headers EXCEPT the one in use
|
||||
alias kclean='sudo aptitude remove -P ?and(~i~nlinux-(ima|hea) \
|
||||
?not(~n`uname -r`))'
|
||||
|
||||
|
||||
# commands using su #########
|
||||
else
|
||||
alias aac='su -ls "'"$apt_pref"' autoclean" root'
|
||||
abd() {
|
||||
cmd="su -lc '$apt_pref build-dep $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
alias ac='su -ls "'"$apt_pref"' clean" root'
|
||||
alias ad='su -lc "'"$apt_pref"' update" root'
|
||||
alias adg='su -lc "'"$apt_pref"' update && aptitude safe-upgrade" root'
|
||||
alias adu='su -lc "'"$apt_pref"' update && aptitude dist-upgrade" root'
|
||||
alias afu='su -lc "apt-file update"'
|
||||
alias ag='su -lc "'"$apt_pref"' safe-upgrade" root'
|
||||
ai() {
|
||||
cmd="su -lc 'aptitude -P install $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
ap() {
|
||||
cmd="su -lc '$apt_pref -P purge $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
ar() {
|
||||
cmd="su -lc '$apt_pref -P remove $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
|
||||
# Install all .deb files in the current directory
|
||||
# Assumes glob_subst is off
|
||||
alias di='su -lc "dpkg -i ./*.deb" root'
|
||||
|
||||
# Remove ALL kernel images and headers EXCEPT the one in use
|
||||
alias kclean='su -lc '\''aptitude remove -P ?and(~i~nlinux-(ima|hea) \
|
||||
?not(~n`uname -r`))'\'' root'
|
||||
fi
|
||||
|
||||
|
||||
# Misc. #####################################################################
|
||||
# print all installed packages
|
||||
alias allpkgs='aptitude search -F "%p" --disable-columns ~i'
|
||||
|
||||
# Install all .deb files in the current directory.
|
||||
# Warning: you will need to put the glob in single quotes if you use:
|
||||
# glob_subst
|
||||
alias di='su -c "dpkg -i ./*.deb"'
|
||||
|
||||
# Create a basic .deb package
|
||||
alias mydeb='time dpkg-buildpackage -rfakeroot -us -uc'
|
||||
|
||||
# Remove ALL kernel images and headers EXCEPT the one in use
|
||||
alias kclean='su -c '\''aptitude remove -P ?and(~i~nlinux-(ima|hea) ?not(~n`uname -r`))'\'' root'
|
||||
|
||||
|
||||
|
||||
# Functions #################################################################
|
||||
|
||||
# create a simple script that can be used to 'duplicate' a system
|
||||
apt-copy() {
|
||||
print '#!/bin/sh'"\n" > apt-copy.sh
|
||||
print '#!/bin/sh'"\n" > apt-copy.sh
|
||||
|
||||
list=$(perl -m'AptPkg::Cache' -e '$c=AptPkg::Cache->new; for (keys %$c){ push @a, $_ if $c->{$_}->{'CurrentState'} eq 'Installed';} print "$_ " for sort @a;')
|
||||
cmd="$apt_pref install "
|
||||
|
||||
print 'aptitude install '"$list\n" >> apt-copy.sh
|
||||
for p in ${(f)"$(aptitude search -F "%p" --disable-columns \~i)"}; {
|
||||
cmd="${cmd} ${p}"
|
||||
}
|
||||
|
||||
chmod +x apt-copy.sh
|
||||
print $cmd "\n" >> apt-copy.sh
|
||||
|
||||
chmod +x apt-copy.sh
|
||||
}
|
||||
|
||||
# Prints apt history
|
||||
# Usage:
|
||||
# apt-history install
|
||||
# apt-history upgrade
|
||||
# apt-history remove
|
||||
# apt-history rollback
|
||||
# apt-history list
|
||||
# Based On: http://linuxcommando.blogspot.com/2008/08/how-to-show-apt-log-history.html
|
||||
apt-history () {
|
||||
case "$1" in
|
||||
install)
|
||||
zgrep --no-filename 'install ' $(ls -rt /var/log/dpkg*)
|
||||
;;
|
||||
upgrade|remove)
|
||||
zgrep --no-filename $1 $(ls -rt /var/log/dpkg*)
|
||||
;;
|
||||
rollback)
|
||||
zgrep --no-filename upgrade $(ls -rt /var/log/dpkg*) | \
|
||||
grep "$2" -A10000000 | \
|
||||
grep "$3" -B10000000 | \
|
||||
awk '{print $4"="$5}'
|
||||
;;
|
||||
list)
|
||||
zcat $(ls -rt /var/log/dpkg*)
|
||||
;;
|
||||
*)
|
||||
echo "Parameters:"
|
||||
echo " install - Lists all packages that have been installed."
|
||||
echo " upgrade - Lists all packages that have been upgraded."
|
||||
echo " remove - Lists all packages that have been removed."
|
||||
echo " rollback - Lists rollback information."
|
||||
echo " list - Lists all contains of dpkg logs."
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Kernel-package building shortcut
|
||||
dbb-build () {
|
||||
MAKEFLAGS='' # temporarily unset MAKEFLAGS ( '-j3' will fail )
|
||||
kerndeb () {
|
||||
# temporarily unset MAKEFLAGS ( '-j3' will fail )
|
||||
MAKEFLAGS=$( print - $MAKEFLAGS | perl -pe 's/-j\s*[\d]+//g' )
|
||||
print '$MAKEFLAGS set to '"'$MAKEFLAGS'"
|
||||
appendage='-custom' # this shows up in $ (uname -r )
|
||||
revision=$(date +"%Y%m%d") # this shows up in the .deb file name
|
||||
|
||||
|
|
@ -57,4 +179,3 @@ dbb-build () {
|
|||
"$revision" kernel_image kernel_headers
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
223
plugins/django/django.plugin.zsh
Normal file
223
plugins/django/django.plugin.zsh
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
#compdef manage.py
|
||||
|
||||
typeset -ga nul_args
|
||||
nul_args=(
|
||||
'--settings=-[the Python path to a settings module.]:file:_files'
|
||||
'--pythonpath=-[a directory to add to the Python path.]::directory:_directories'
|
||||
'--traceback[print traceback on exception.]'
|
||||
"--version[show program's version number and exit.]"
|
||||
{-h,--help}'[show this help message and exit.]'
|
||||
)
|
||||
|
||||
_managepy-adminindex(){
|
||||
_arguments -s : \
|
||||
$nul_args \
|
||||
'*::directory:_directories' && ret=0
|
||||
}
|
||||
|
||||
_managepy-createcachetable(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dbshell(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-diffsettings(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dumpdata(){
|
||||
_arguments -s : \
|
||||
'--format=-[specifies the output serialization format for fixtures.]:format:(json yaml xml)' \
|
||||
'--indent=-[specifies the indent level to use when pretty-printing output.]:' \
|
||||
$nul_args \
|
||||
'*::appname:_applist' && ret=0
|
||||
}
|
||||
|
||||
_managepy-flush(){
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-help(){
|
||||
_arguments -s : \
|
||||
'*:command:_managepy_cmds' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy_cmds(){
|
||||
local line
|
||||
local -a cmd
|
||||
_call_program help-command ./manage.py help \
|
||||
|& sed -n '/^ /s/[(), ]/ /gp' \
|
||||
| while read -A line; do cmd=($line $cmd) done
|
||||
_describe -t managepy-command 'manage.py command' cmd
|
||||
}
|
||||
|
||||
_managepy-inspectdb(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-loaddata(){
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'*::file:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-reset(){
|
||||
_arguments -s : \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
'*::appname:_applist' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-runfcgi(){
|
||||
local state
|
||||
|
||||
local fcgi_opts
|
||||
fcgi_opts=(
|
||||
'protocol[fcgi, scgi, ajp, ... (default fcgi)]:protocol:(fcgi scgi ajp)'
|
||||
'host[hostname to listen on..]:'
|
||||
'port[port to listen on.]:'
|
||||
'socket[UNIX socket to listen on.]::file:_files'
|
||||
'method[prefork or threaded (default prefork)]:method:(prefork threaded)'
|
||||
'maxrequests[number of requests a child handles before it is killed and a new child is forked (0 = no limit).]:'
|
||||
'maxspare[max number of spare processes / threads.]:'
|
||||
'minspare[min number of spare processes / threads.]:'
|
||||
'maxchildren[hard limit number of processes / threads.]:'
|
||||
'daemonize[whether to detach from terminal.]:boolean:(False True)'
|
||||
'pidfile[write the spawned process-id to this file.]:file:_files'
|
||||
'workdir[change to this directory when daemonizing.]:directory:_files'
|
||||
'outlog[write stdout to this file.]:file:_files'
|
||||
'errlog[write stderr to this file.]:file:_files'
|
||||
)
|
||||
|
||||
_arguments -s : \
|
||||
$nul_args \
|
||||
'*: :_values "FCGI Setting" $fcgi_opts' && ret=0
|
||||
}
|
||||
|
||||
_managepy-runserver(){
|
||||
_arguments -s : \
|
||||
'--noreload[tells Django to NOT use the auto-reloader.]' \
|
||||
'--adminmedia[specifies the directory from which to serve admin media.]:directory:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-shell(){
|
||||
_arguments -s : \
|
||||
'--plain[tells Django to use plain Python, not IPython.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sql(){}
|
||||
_managepy-sqlall(){}
|
||||
_managepy-sqlclear(){}
|
||||
_managepy-sqlcustom(){}
|
||||
_managepy-sqlflush(){}
|
||||
_managepy-sqlindexes(){}
|
||||
_managepy-sqlinitialdata(){}
|
||||
_managepy-sqlreset(){}
|
||||
_managepy-sqlsequencereset(){}
|
||||
_managepy-startapp(){}
|
||||
|
||||
_managepy-syncdb() {
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-test() {
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
'*::appname:_applist' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-testserver() {
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--addrport=-[port number or ipaddr:port to run the server on.]' \
|
||||
'*::fixture:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-validate() {
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-commands() {
|
||||
local -a commands
|
||||
|
||||
commands=(
|
||||
'adminindex:prints the admin-index template snippet for the given app name(s).'
|
||||
'createcachetable:creates the table needed to use the SQL cache backend.'
|
||||
'dbshell:runs the command-line client for the current DATABASE_ENGINE.'
|
||||
"diffsettings:displays differences between the current settings.py and Django's default settings."
|
||||
'dumpdata:Output the contents of the database as a fixture of the given format.'
|
||||
'flush:Executes ``sqlflush`` on the current database.'
|
||||
'help:manage.py help.'
|
||||
'inspectdb:Introspects the database tables in the given database and outputs a Django model module.'
|
||||
'loaddata:Installs the named fixture(s) in the database.'
|
||||
'reset:Executes ``sqlreset`` for the given app(s) in the current database.'
|
||||
'runfcgi:Run this project as a fastcgi (or some other protocol supported by flup) application,'
|
||||
'runserver:Starts a lightweight Web server for development.'
|
||||
'shell:Runs a Python interactive interpreter.'
|
||||
'sql:Prints the CREATE TABLE SQL statements for the given app name(s).'
|
||||
'sqlall:Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s).'
|
||||
'sqlclear:Prints the DROP TABLE SQL statements for the given app name(s).'
|
||||
'sqlcustom:Prints the custom table modifying SQL statements for the given app name(s).'
|
||||
'sqlflush:Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed.'
|
||||
'sqlindexes:Prints the CREATE INDEX SQL statements for the given model module name(s).'
|
||||
"sqlinitialdata:RENAMED: see 'sqlcustom'"
|
||||
'sqlreset:Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s).'
|
||||
'sqlsequencereset:Prints the SQL statements for resetting sequences for the given app name(s).'
|
||||
"startapp:Creates a Django app directory structure for the given app name in this project's directory."
|
||||
"syncdb:Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
|
||||
'test:Runs the test suite for the specified applications, or the entire site if no apps are specified.'
|
||||
'testserver:Runs a development server with data from the given fixture(s).'
|
||||
'validate:Validates all installed models.'
|
||||
)
|
||||
|
||||
_describe -t commands 'manage.py command' commands && ret=0
|
||||
}
|
||||
|
||||
_applist() {
|
||||
local line
|
||||
local -a apps
|
||||
_call_program help-command "python -c \"import os.path as op, re, django.conf, sys;\\
|
||||
bn=op.basename(op.abspath(op.curdir));[sys\\
|
||||
.stdout.write(str(re.sub(r'^%s\.(.*?)$' %
|
||||
bn, r'\1', i)) + '\n') for i in django.conf.settings.\\
|
||||
INSTALLED_APPS if re.match(r'^%s' % bn, i)]\"" \
|
||||
| while read -A line; do apps=($line $apps) done
|
||||
_values 'Application' $apps && ret=0
|
||||
}
|
||||
|
||||
_managepy() {
|
||||
local curcontext=$curcontext ret=1
|
||||
|
||||
if ((CURRENT == 2)); then
|
||||
_managepy-commands
|
||||
else
|
||||
shift words
|
||||
(( CURRENT -- ))
|
||||
curcontext="${curcontext%:*:*}:managepy-$words[1]:"
|
||||
_call_function ret _managepy-$words[1]
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _managepy manage.py
|
||||
compdef _managepy django
|
||||
compdef _managepy django-manage
|
||||
8
plugins/extract/_extract
Normal file
8
plugins/extract/_extract
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
#compdef extract
|
||||
#autoload
|
||||
|
||||
_arguments \
|
||||
'(-r --remove)'{-r,--remove}'[Remove archive.]' \
|
||||
"*::archive file:_files -g '(#i)*.(tar|tgz|tbz|tbz2|txz|tlz|gz|bz2|xz|lzma|Z|zip|rar|7z|deb)(-.)'" && return 0
|
||||
|
||||
|
||||
80
plugins/extract/extract.plugin.zsh
Normal file
80
plugins/extract/extract.plugin.zsh
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
# FILE: extract.plugin.zsh
|
||||
# DESCRIPTION: oh-my-zsh plugin file.
|
||||
# AUTHOR: Sorin Ionescu (sorin.ionescu@gmail.com)
|
||||
# VERSION: 1.0.1
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
function extract() {
|
||||
local remove_archive
|
||||
local success
|
||||
local file_name
|
||||
local extract_dir
|
||||
|
||||
if (( $# == 0 )); then
|
||||
echo "Usage: extract [-option] [file ...]"
|
||||
echo
|
||||
echo Options:
|
||||
echo " -r, --remove Remove archive."
|
||||
echo
|
||||
echo "Report bugs to <sorin.ionescu@gmail.com>."
|
||||
fi
|
||||
|
||||
remove_archive=1
|
||||
if [[ "$1" == "-r" ]] || [[ "$1" == "--remove" ]]; then
|
||||
remove_archive=0
|
||||
shift
|
||||
fi
|
||||
|
||||
while (( $# > 0 )); do
|
||||
if [[ ! -f "$1" ]]; then
|
||||
echo "extract: '$1' is not a valid file" 1>&2
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
success=0
|
||||
file_name="$( basename "$1" )"
|
||||
extract_dir="$( echo "$file_name" | sed "s/\.${1##*.}//g" )"
|
||||
case "$1" in
|
||||
(*.tar.gz|*.tgz) tar xvzf "$1" ;;
|
||||
(*.tar.bz2|*.tbz|*.tbz2) tar xvjf "$1" ;;
|
||||
(*.tar.xz|*.txz) tar --xz --help &> /dev/null \
|
||||
&& tar --xz -xvf "$1" \
|
||||
|| xzcat "$1" | tar xvf - ;;
|
||||
(*.tar.zma|*.tlz) tar --lzma --help &> /dev/null \
|
||||
&& tar --lzma -xvf "$1" \
|
||||
|| lzcat "$1" | tar xvf - ;;
|
||||
(*.tar) tar xvf "$1" ;;
|
||||
(*.gz) gunzip "$1" ;;
|
||||
(*.bz2) bunzip2 "$1" ;;
|
||||
(*.xz) unxz "$1" ;;
|
||||
(*.lzma) unlzma "$1" ;;
|
||||
(*.Z) uncompress "$1" ;;
|
||||
(*.zip) unzip "$1" -d $extract_dir ;;
|
||||
(*.rar) unrar e -ad "$1" ;;
|
||||
(*.7z) 7za x "$1" ;;
|
||||
(*.deb)
|
||||
mkdir -p "$extract_dir/control"
|
||||
mkdir -p "$extract_dir/data"
|
||||
cd "$extract_dir"; ar vx "../${1}" > /dev/null
|
||||
cd control; tar xzvf ../control.tar.gz
|
||||
cd ../data; tar xzvf ../data.tar.gz
|
||||
cd ..; rm *.tar.gz debian-binary
|
||||
cd ..
|
||||
;;
|
||||
(*)
|
||||
echo "extract: '$1' cannot be extracted" 1>&2
|
||||
success=1
|
||||
;;
|
||||
esac
|
||||
|
||||
(( success = $success > 0 ? $success : $? ))
|
||||
(( $success == 0 )) && (( $remove_archive == 0 )) && rm "$1"
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
alias x=extract
|
||||
|
||||
34
plugins/gas/_gas
Normal file
34
plugins/gas/_gas
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
#compdef gas
|
||||
|
||||
local curcontext="$curcontext" state line cmds ret=1
|
||||
|
||||
_arguments -C \
|
||||
'(- 1 *)'{-v,--version}'[display version information]' \
|
||||
'(-h|--help)'{-h,--help}'[show help information]' \
|
||||
'1: :->cmds' \
|
||||
'*: :->args' && ret=0
|
||||
|
||||
case $state in
|
||||
cmds)
|
||||
cmds=(
|
||||
"version:Prints Gas's version"
|
||||
"use:Uses author"
|
||||
"show:Shows your current user"
|
||||
"list:Lists your authors"
|
||||
"import:Imports current user to gasconfig"
|
||||
"help:Describe available tasks or one specific task"
|
||||
"delete:Deletes author"
|
||||
"add:Adds author to gasconfig"
|
||||
)
|
||||
_describe -t commands 'gas command' cmds && ret=0
|
||||
;;
|
||||
args)
|
||||
case $line[1] in
|
||||
(use|delete)
|
||||
_values -S , 'authors' $(cat ~/.gas | sed -n -e 's/^\[\(.*\)\]/\1/p') && ret=0
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
|
|
@ -9,7 +9,7 @@ alias gup='git fetch && git rebase'
|
|||
compdef _git gup=git-fetch
|
||||
alias gp='git push'
|
||||
compdef _git gp=git-push
|
||||
gdv() { git-diff -w "$@" | view - }
|
||||
gdv() { git diff -w "$@" | view - }
|
||||
compdef _git gdv=git-diff
|
||||
alias gc='git commit -v'
|
||||
compdef _git gc=git-commit
|
||||
|
|
@ -17,6 +17,7 @@ alias gca='git commit -v -a'
|
|||
compdef _git gca=git-commit
|
||||
alias gco='git checkout'
|
||||
compdef _git gco=git-checkout
|
||||
alias gcm='git checkout master'
|
||||
alias gb='git branch'
|
||||
compdef _git gb=git-branch
|
||||
alias gba='git branch -a'
|
||||
|
|
@ -27,11 +28,23 @@ alias gcp='git cherry-pick'
|
|||
compdef _git gcp=git-cherry-pick
|
||||
alias glg='git log --stat --max-count=5'
|
||||
compdef _git glg=git-log
|
||||
alias glgg='git log --graph --max-count=5'
|
||||
compdef _git glgg=git-log
|
||||
alias gss='git status -s'
|
||||
compdef _git gss=git-status
|
||||
alias ga='git add'
|
||||
compdef _git ga=git-add
|
||||
alias gm='git merge'
|
||||
compdef _git gm=git-merge
|
||||
alias grh='git reset HEAD'
|
||||
alias grhh='git reset HEAD --hard'
|
||||
|
||||
# Git and svn mix
|
||||
alias git-svn-dcommit-push='git svn dcommit && git push github master:svntrunk'
|
||||
compdef git-svn-dcommit-push=git
|
||||
|
||||
alias gsr='git svn rebase'
|
||||
alias gsd='git svn dcommit'
|
||||
#
|
||||
# Will return the current branch name
|
||||
# Usage example: git pull origin $(current_branch)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,70 @@
|
|||
# hub alias from defunkt
|
||||
# https://github.com/defunkt/hub
|
||||
if [ "$commands[(I)hub]" ]; then
|
||||
# Setup hub function for git, if it is available; http://github.com/defunkt/hub
|
||||
if [ "$commands[(I)hub]" ] && [ "$commands[(I)ruby]" ]; then
|
||||
# eval `hub alias -s zsh`
|
||||
function git(){hub "$@"}
|
||||
function git(){
|
||||
if ! (( $+_has_working_hub )); then
|
||||
hub --version &> /dev/null
|
||||
_has_working_hub=$(($? == 0))
|
||||
fi
|
||||
if (( $_has_working_hub )) ; then
|
||||
hub "$@"
|
||||
else
|
||||
command git "$@"
|
||||
fi
|
||||
}
|
||||
fi
|
||||
|
||||
# Functions #################################################################
|
||||
|
||||
# https://github.com/dbb
|
||||
|
||||
|
||||
# empty_gh [NAME_OF_REPO]
|
||||
#
|
||||
# Use this when creating a new repo from scratch.
|
||||
empty_gh() { # [NAME_OF_REPO]
|
||||
repo = $1
|
||||
ghuser=$( git config github.user )
|
||||
|
||||
mkdir "$repo"
|
||||
cd "$repo"
|
||||
git init
|
||||
touch README
|
||||
git add README
|
||||
git commit -m 'Initial commit.'
|
||||
git remote add origin git@github.com:${ghuser}/${repo}.git
|
||||
git push -u origin master
|
||||
}
|
||||
|
||||
# new_gh [DIRECTORY]
|
||||
#
|
||||
# Use this when you have a directory that is not yet set up for git.
|
||||
# This function will add all non-hidden files to git.
|
||||
new_gh() { # [DIRECTORY]
|
||||
cd "$1"
|
||||
ghuser=$( git config github.user )
|
||||
|
||||
git init
|
||||
# add all non-dot files
|
||||
print '.*'"\n"'*~' >> .gitignore
|
||||
git add ^.*
|
||||
git commit -m 'Initial commit.'
|
||||
git remote add origin git@github.com:${ghuser}/${repo}.git
|
||||
git push -u origin master
|
||||
}
|
||||
|
||||
# exist_gh [DIRECTORY]
|
||||
#
|
||||
# Use this when you have a git repo that's ready to go and you want to add it
|
||||
# to your GitHub.
|
||||
exist_gh() { # [DIRECTORY]
|
||||
cd "$1"
|
||||
name=$( git config user.name )
|
||||
ghuser=$( git config github.user )
|
||||
|
||||
git remote add origin git@github.com:${ghuser}/${repo}.git
|
||||
git push -u origin master
|
||||
}
|
||||
|
||||
# End Functions #############################################################
|
||||
|
||||
|
|
|
|||
80
plugins/gnu-utils/gnu-utils.plugin.zsh
Normal file
80
plugins/gnu-utils/gnu-utils.plugin.zsh
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
# FILE: gnu-utils.plugin.zsh
|
||||
# DESCRIPTION: oh-my-zsh plugin file.
|
||||
# AUTHOR: Sorin Ionescu (sorin.ionescu@gmail.com)
|
||||
# VERSION: 1.0.0
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
if [[ -x "${commands[gwhoami]}" ]]; then
|
||||
__gnu_utils() {
|
||||
emulate -L zsh
|
||||
local gcmds
|
||||
local gcmd
|
||||
local cmd
|
||||
local prefix
|
||||
|
||||
# coreutils
|
||||
gcmds=('g[' 'gbase64' 'gbasename' 'gcat' 'gchcon' 'gchgrp' 'gchmod'
|
||||
'gchown' 'gchroot' 'gcksum' 'gcomm' 'gcp' 'gcsplit' 'gcut' 'gdate'
|
||||
'gdd' 'gdf' 'gdir' 'gdircolors' 'gdirname' 'gdu' 'gecho' 'genv' 'gexpand'
|
||||
'gexpr' 'gfactor' 'gfalse' 'gfmt' 'gfold' 'ggroups' 'ghead' 'ghostid'
|
||||
'gid' 'ginstall' 'gjoin' 'gkill' 'glink' 'gln' 'glogname' 'gls' 'gmd5sum'
|
||||
'gmkdir' 'gmkfifo' 'gmknod' 'gmktemp' 'gmv' 'gnice' 'gnl' 'gnohup' 'gnproc'
|
||||
'god' 'gpaste' 'gpathchk' 'gpinky' 'gpr' 'gprintenv' 'gprintf' 'gptx' 'gpwd'
|
||||
'greadlink' 'grm' 'grmdir' 'gruncon' 'gseq' 'gsha1sum' 'gsha224sum'
|
||||
'gsha256sum' 'gsha384sum' 'gsha512sum' 'gshred' 'gshuf' 'gsleep' 'gsort'
|
||||
'gsplit' 'gstat' 'gstty' 'gsum' 'gsync' 'gtac' 'gtail' 'gtee' 'gtest'
|
||||
'gtimeout' 'gtouch' 'gtr' 'gtrue' 'gtruncate' 'gtsort' 'gtty' 'guname'
|
||||
'gunexpand' 'guniq' 'gunlink' 'guptime' 'gusers' 'gvdir' 'gwc' 'gwho'
|
||||
'gwhoami' 'gyes')
|
||||
|
||||
# Not part of coreutils, installed separately.
|
||||
gcmds+=('gsed' 'gtar' 'gtime')
|
||||
|
||||
for gcmd in "${gcmds[@]}"; do
|
||||
#
|
||||
# This method allows for builtin commands to be primary but it's
|
||||
# lost if hash -r or rehash -f is executed. Thus, those two
|
||||
# functions have to be wrapped.
|
||||
#
|
||||
(( ${+commands[$gcmd]} )) && hash ${gcmd[2,-1]}=${commands[$gcmd]}
|
||||
|
||||
#
|
||||
# This method generates wrapper functions.
|
||||
# It will override shell builtins.
|
||||
#
|
||||
# (( ${+commands[$gcmd]} )) && \
|
||||
# eval "function $gcmd[2,-1]() { \"${prefix}/${gcmd//"["/"\\["}\" \"\$@\"; }"
|
||||
|
||||
#
|
||||
# This method is inflexible since the aliases are at risk of being
|
||||
# overriden resulting in the BSD coreutils being called.
|
||||
#
|
||||
# (( ${+commands[$gcmd]} )) && \
|
||||
# alias "$gcmd[2,-1]"="${prefix}/${gcmd//"["/"\\["}"
|
||||
done
|
||||
|
||||
return 0
|
||||
}
|
||||
__gnu_utils;
|
||||
|
||||
function hash() {
|
||||
if [[ "$*" =~ "-(r|f)" ]]; then
|
||||
builtin hash "$@"
|
||||
__gnu_utils
|
||||
else
|
||||
builtin hash "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
function rehash() {
|
||||
if [[ "$*" =~ "-f" ]]; then
|
||||
builtin rehash "$@"
|
||||
__gnu_utils
|
||||
else
|
||||
builtin rehash "$@"
|
||||
fi
|
||||
}
|
||||
fi
|
||||
|
||||
119
plugins/gradle/gradle.plugin.zsh
Normal file
119
plugins/gradle/gradle.plugin.zsh
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
#!zsh
|
||||
##############################################################################
|
||||
# A descriptive listing of core Gradle commands
|
||||
############################################################################
|
||||
function _gradle_core_commands() {
|
||||
local ret=1 state
|
||||
_arguments ':subcommand:->subcommand' && ret=0
|
||||
|
||||
case $state in
|
||||
subcommand)
|
||||
subcommands=(
|
||||
"properties:Display all project properties"
|
||||
"tasks:Calculate and display all tasks"
|
||||
"dependencies:Calculate and display all dependencies"
|
||||
"projects:Discover and display all sub-projects"
|
||||
"build:Build the project"
|
||||
"help:Display help"
|
||||
)
|
||||
_describe -t subcommands 'gradle subcommands' subcommands && ret=0
|
||||
esac
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
function _gradle_arguments() {
|
||||
_arguments -C \
|
||||
'-a[Do not rebuild project dependencies]' \
|
||||
'-h[Help]' \
|
||||
'-D[System property]' \
|
||||
'-d[Log at the debug level]' \
|
||||
'--gui[Launches the Gradle GUI app]' \
|
||||
'--stop[Stop the Gradle daemon]' \
|
||||
'--daemon[Use the Gradle daemon]' \
|
||||
'--no-daemon[Do not use the Gradle daemon]' \
|
||||
'--no-opt[Do not perform any task optimization]' \
|
||||
'-i[Log at the info level]' \
|
||||
'-m[Dry run]' \
|
||||
'-P[Set a project property]' \
|
||||
'--profile[Profile the build time]' \
|
||||
'-q[Log at the quiet level (only show errors)]' \
|
||||
'-v[Print the Gradle version info]' \
|
||||
'-x[Specify a task to be excluded]' \
|
||||
'*::command:->command' \
|
||||
&& return 0
|
||||
}
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Are we in a directory containing a build.gradle file?
|
||||
############################################################################
|
||||
function in_gradle() {
|
||||
if [[ -f build.gradle ]]; then
|
||||
echo 1
|
||||
fi
|
||||
}
|
||||
|
||||
############################################################################
|
||||
# Define the stat_cmd command based on platform behavior
|
||||
##########################################################################
|
||||
stat -f%m . > /dev/null 2>&1
|
||||
if [ "$?" = 0 ]; then
|
||||
stat_cmd=(stat -f%m)
|
||||
else
|
||||
stat_cmd=(stat -L --format=%Y)
|
||||
fi
|
||||
|
||||
############################################################################## Examine the build.gradle file to see if its
|
||||
# timestamp has changed, and if so, regen
|
||||
# the .gradle_tasks cache file
|
||||
############################################################################
|
||||
_gradle_does_task_list_need_generating () {
|
||||
if [ ! -f .gradletasknamecache ]; then return 0;
|
||||
else
|
||||
accurate=$($stat_cmd .gradletasknamecache)
|
||||
changed=$($stat_cmd build.gradle)
|
||||
return $(expr $accurate '>=' $changed)
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Discover the gradle tasks by running "gradle tasks --all"
|
||||
############################################################################
|
||||
_gradle_tasks () {
|
||||
if [ in_gradle ]; then
|
||||
_gradle_arguments
|
||||
if _gradle_does_task_list_need_generating; then
|
||||
gradle tasks --all | grep "^[ ]*[a-zA-Z0-9]*\ -\ " | sed "s/ - .*$//" | sed "s/[\ ]*//" > .gradletasknamecache
|
||||
fi
|
||||
compadd -X "==== Gradle Tasks ====" `cat .gradletasknamecache`
|
||||
fi
|
||||
}
|
||||
|
||||
_gradlew_tasks () {
|
||||
if [ in_gradle ]; then
|
||||
_gradle_arguments
|
||||
if _gradle_does_task_list_need_generating; then
|
||||
gradlew tasks --all | grep "^[ ]*[a-zA-Z0-9]*\ -\ " | sed "s/ - .*$//" | sed "s/[\ ]*//" > .gradletasknamecache
|
||||
fi
|
||||
compadd -X "==== Gradlew Tasks ====" `cat .gradletasknamecache`
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Register the completions against the gradle and gradlew commands
|
||||
############################################################################
|
||||
compdef _gradle_tasks gradle
|
||||
compdef _gradlew_tasks gradlew
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Open questions for future improvements:
|
||||
# 1) Should 'gradle tasks' use --all or just the regular set?
|
||||
# 2) Should gradlew use the same approach as gradle?
|
||||
# 3) Should only the " - " be replaced with a colon so it can work
|
||||
# with the richer descriptive method of _arguments?
|
||||
# gradle tasks | grep "^[a-zA-Z0-9]*\ -\ " | sed "s/ - /\:/"
|
||||
#############################################################################
|
||||
54
plugins/grails/grails.plugin.zsh
Executable file
54
plugins/grails/grails.plugin.zsh
Executable file
|
|
@ -0,0 +1,54 @@
|
|||
_enumerateGrailsScripts() {
|
||||
# Default directoryies
|
||||
directories=($GRAILS_HOME/scripts ~/.grails/scripts ./scripts)
|
||||
|
||||
# Check all of the plugins directories, if they exist
|
||||
if [ -d plugins ]
|
||||
then
|
||||
directories+=(plugins/*/scripts)
|
||||
fi
|
||||
|
||||
# Enumerate all of the Groovy files
|
||||
files=()
|
||||
for dir in $directories;
|
||||
do
|
||||
if [ -d $dir ]
|
||||
then
|
||||
files+=($dir/[^_]*.groovy)
|
||||
fi
|
||||
done
|
||||
|
||||
# Don't try to basename ()
|
||||
if [ ${#files} -eq 0 ];
|
||||
then
|
||||
return
|
||||
fi
|
||||
|
||||
# - Strip the path
|
||||
# - Remove all scripts with a leading '_'
|
||||
# - PackagePlugin_.groovy -> PackagePlugin
|
||||
# - PackagePlugin -> Package-Plugin
|
||||
# - Package-Plugin -> package-plugin
|
||||
basename $files \
|
||||
| sed -E -e 's/^_?([^_]+)_?.groovy/\1/'\
|
||||
-e 's/([a-z])([A-Z])/\1-\2/g' \
|
||||
| tr "[:upper:]" "[:lower:]" \
|
||||
| sort \
|
||||
| uniq
|
||||
}
|
||||
|
||||
_grails() {
|
||||
if (( CURRENT == 2 )); then
|
||||
scripts=( $(_enumerateGrailsScripts) )
|
||||
|
||||
if [ ${#scripts} -ne 0 ];
|
||||
then
|
||||
_multi_parts / scripts
|
||||
return
|
||||
fi
|
||||
fi
|
||||
|
||||
_files
|
||||
}
|
||||
|
||||
compdef _grails grails
|
||||
158
plugins/heroku/_heroku
Normal file
158
plugins/heroku/_heroku
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
#compdef heroku
|
||||
|
||||
# Heroku Autocomplete plugin for Oh-My-Zsh
|
||||
# Requires: The Heroku client gem (https://github.com/heroku/heroku)
|
||||
# Author: Ali B. (http://awhitebox.com)
|
||||
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
"account\:confirm_billing":"Confirm that your account can be billed at the end of the month"
|
||||
"addons":"list installed addons"
|
||||
"addons\:list":"list all available addons"
|
||||
"addons\:add":"install an addon"
|
||||
"addons\:upgrade":"upgrade an existing addon"
|
||||
"addons\:downgrade":"downgrade an existing addon"
|
||||
"addons\:remove":"uninstall an addon"
|
||||
"addons\:open":"open an addon's dashboard in your browser"
|
||||
"apps":"list your apps"
|
||||
"apps\:info":"show detailed app information"
|
||||
"apps\:create":"create a new app"
|
||||
"apps\:rename":"rename the app"
|
||||
"apps\:open":"open the app in a web browser"
|
||||
"apps\:destroy":"permanently destroy an app"
|
||||
"auth\:login":"log in with your heroku credentials"
|
||||
"auth\:logout":"clear local authentication credentials"
|
||||
"config":"display the config vars for an app"
|
||||
"config\:add":"add one or more config vars"
|
||||
"config\:remove":"remove a config var"
|
||||
"db\:push":"push local data up to your app"
|
||||
"db\:pull":"pull heroku data down into your local database"
|
||||
"domains":"list custom domains for an app"
|
||||
"domains\:add":"add a custom domain to an app"
|
||||
"domains\:remove":"remove a custom domain from an app"
|
||||
"domains\:clear":"remove all custom domains from an app"
|
||||
"help":"list available commands or display help for a specific command"
|
||||
"keys":"display keys for the current user"
|
||||
"keys\:add":"add a key for the current user"
|
||||
"keys\:remove":"remove a key from the current user"
|
||||
"keys\:clear":"remove all authentication keys from the current user"
|
||||
"logs":"display recent log output"
|
||||
"logs\:cron":"DEPRECATED: display cron logs from legacy logging"
|
||||
"logs\:drains":"manage syslog drains"
|
||||
"maintenance\:on":"put the app into maintenance mode"
|
||||
"maintenance\:off":"take the app out of maintenance mode"
|
||||
"pg\:info":"display database information"
|
||||
"pg\:ingress":"allow direct connections to the database from this IP for one minute"
|
||||
"pg\:promote":"sets DATABASE as your DATABASE_URL"
|
||||
"pg\:psql":"open a psql shell to the database"
|
||||
"pg\:reset":"delete all data in DATABASE"
|
||||
"pg\:unfollow":"stop a replica from following and make it a read/write database"
|
||||
"pg\:wait":"monitor database creation, exit when complete"
|
||||
"pgbackups":"list captured backups"
|
||||
"pgbackups\:url":"get a temporary URL for a backup"
|
||||
"pgbackups\:capture":"capture a backup from a database id"
|
||||
"pgbackups\:restore":"restore a backup to a database"
|
||||
"pgbackups\:destroy":"destroys a backup"
|
||||
"plugins":"list installed plugins"
|
||||
"plugins\:install":"install a plugin"
|
||||
"plugins\:uninstall":"uninstall a plugin"
|
||||
"ps\:dynos":"scale to QTY web processes"
|
||||
"ps\:workers":"scale to QTY background processes"
|
||||
"ps":"list processes for an app"
|
||||
"ps\:restart":"restart an app process"
|
||||
"ps\:scale":"scale processes by the given amount"
|
||||
"releases":"list releases"
|
||||
"releases\:info":"view detailed information for a release"
|
||||
"rollback":"roll back to an older release"
|
||||
"run":"run an attached process"
|
||||
"run\:rake":"remotely execute a rake command"
|
||||
"run\:console":"open a remote console session"
|
||||
"sharing":"list collaborators on an app"
|
||||
"sharing\:add":"add a collaborator to an app"
|
||||
"sharing\:remove":"remove a collaborator from an app"
|
||||
"sharing\:transfer":"transfer an app to a new owner"
|
||||
"ssl":"list certificates for an app"
|
||||
"ssl\:add":"add an ssl certificate to an app"
|
||||
"ssl\:remove":"remove an ssl certificate from an app"
|
||||
"ssl\:clear":"remove all ssl certificates from an app"
|
||||
"stack":"show the list of available stacks"
|
||||
"stack\:migrate":"prepare migration of this app to a new stack"
|
||||
"version":"show heroku client version"
|
||||
)
|
||||
|
||||
_arguments '*:: :->command'
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
_describe -t commands "heroku command" _1st_arguments
|
||||
return
|
||||
fi
|
||||
|
||||
local -a _command_args
|
||||
case "$words[1]" in
|
||||
apps:info)
|
||||
_command_args=(
|
||||
'(-r|--raw)'{-r,--raw}'[output info as raw key/value pairs]' \
|
||||
)
|
||||
;;
|
||||
apps:create)
|
||||
_command_args=(
|
||||
'(-a|--addons)'{-a,--addons}'[a list of addons to install]' \
|
||||
'(-r|--remote)'{-r,--remote}'[the git remote to create, default "heroku"]' \
|
||||
'(-s|--stack)'{-s,--stack}'[the stack on which to create the app]' \
|
||||
)
|
||||
;;
|
||||
config)
|
||||
_command_args=(
|
||||
'(-s|--shell)'{-s,--shell}'[output config vars in shell format]' \
|
||||
)
|
||||
;;
|
||||
db:push)
|
||||
_command_args=(
|
||||
'(-c|--chunksize)'{-c,--chunksize}'[specify the number of rows to send in each batch]' \
|
||||
'(-d|--debug)'{-d,--debug}'[enable debugging output]' \
|
||||
'(-e|--exclude)'{-e,--exclude}'[exclude the specified tables from the push]' \
|
||||
'(-f|--filter)'{-f,--filter}'[only push certain tables]' \
|
||||
'(-r|--resume)'{-r,--resume}'[resume transfer described by a .dat file]' \
|
||||
'(-t|--tables)'{-t,--tables}'[only push the specified tables]' \
|
||||
)
|
||||
;;
|
||||
db:pull)
|
||||
_command_args=(
|
||||
'(-c|--chunksize)'{-c,--chunksize}'[specify the number of rows to send in each batch]' \
|
||||
'(-d|--debug)'{-d,--debug}'[enable debugging output]' \
|
||||
'(-e|--exclude)'{-e,--exclude}'[exclude the specified tables from the pull]' \
|
||||
'(-f|--filter)'{-f,--filter}'[only pull certain tables]' \
|
||||
'(-r|--resume)'{-r,--resume}'[resume transfer described by a .dat file]' \
|
||||
'(-t|--tables)'{-t,--tables}'[only pull the specified tables]' \
|
||||
)
|
||||
;;
|
||||
keys)
|
||||
_command_args=(
|
||||
'(-l|--long)'{-l,--long}'[display extended information for each key]' \
|
||||
)
|
||||
;;
|
||||
logs)
|
||||
_command_args=(
|
||||
'(-n|--num)'{-n,--num}'[the number of lines to display]' \
|
||||
'(-p|--ps)'{-p,--ps}'[only display logs from the given process]' \
|
||||
'(-s|--source)'{-s,--source}'[only display logs from the given source]' \
|
||||
'(-t|--tail)'{-t,--tail}'[continually stream logs]' \
|
||||
)
|
||||
;;
|
||||
pgbackups:capture)
|
||||
_command_args=(
|
||||
'(-e|--expire)'{-e,--expire}'[if no slots are available to capture, delete the oldest backup to make room]' \
|
||||
)
|
||||
;;
|
||||
stack)
|
||||
_command_args=(
|
||||
'(-a|--all)'{-a,--all}'[include deprecated stacks]' \
|
||||
)
|
||||
;;
|
||||
esac
|
||||
|
||||
_arguments \
|
||||
$_command_args \
|
||||
'(--app)--app[the app name]' \
|
||||
&& return 0
|
||||
|
||||
7
plugins/history-substring-search/README
Normal file
7
plugins/history-substring-search/README
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
To activate this script, load it into an interactive ZSH session:
|
||||
|
||||
% source history-substring-search.zsh
|
||||
|
||||
See the "history-substring-search.zsh" file for more information:
|
||||
|
||||
% sed -n '2,/^$/s/^#//p' history-substring-search.zsh | more
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
# This file integrates the history-substring-search script into oh-my-zsh.
|
||||
|
||||
source "$ZSH/plugins/history-substring-search/history-substring-search.zsh"
|
||||
|
||||
if test "$CASE_SENSITIVE" = true; then
|
||||
unset HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS
|
||||
fi
|
||||
|
||||
if test "$DISABLE_COLOR" = true; then
|
||||
unset HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
unset HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
fi
|
||||
642
plugins/history-substring-search/history-substring-search.zsh
Normal file
642
plugins/history-substring-search/history-substring-search.zsh
Normal file
|
|
@ -0,0 +1,642 @@
|
|||
#!/usr/bin/env zsh
|
||||
#
|
||||
# This is a clean-room implementation of the Fish[1] shell's history search
|
||||
# feature, where you can type in any part of any previously entered command
|
||||
# and press the UP and DOWN arrow keys to cycle through the matching commands.
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# Usage
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# 1. Load this script into your interactive ZSH session:
|
||||
#
|
||||
# % source history-substring-search.zsh
|
||||
#
|
||||
# If you want to use the zsh-syntax-highlighting[6] script along with this
|
||||
# script, then make sure that you load it *before* you load this script:
|
||||
#
|
||||
# % source zsh-syntax-highlighting.zsh
|
||||
# % source history-substring-search.zsh
|
||||
#
|
||||
# 2. Type any part of any previous command and then:
|
||||
#
|
||||
# * Press the UP arrow key to select the nearest command that (1) contains
|
||||
# your query and (2) is older than the current command in the command
|
||||
# history.
|
||||
#
|
||||
# * Press the DOWN arrow key to select the nearest command that (1)
|
||||
# contains your query and (2) is newer than the current command in the
|
||||
# command history.
|
||||
#
|
||||
# * Press ^U (the Control and U keys simultaneously) to abort the search.
|
||||
#
|
||||
# 3. If a matching command spans more than one line of text, press the LEFT
|
||||
# arrow key to move the cursor away from the end of the command, and then:
|
||||
#
|
||||
# * Press the UP arrow key to move the cursor to the line above. When the
|
||||
# cursor reaches the first line of the command, pressing the UP arrow
|
||||
# key again will cause this script to perform another search.
|
||||
#
|
||||
# * Press the DOWN arrow key to move the cursor to the line below. When
|
||||
# the cursor reaches the last line of the command, pressing the DOWN
|
||||
# arrow key again will cause this script to perform another search.
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# Configuration
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# This script defines the following global variables. You may override their
|
||||
# default values only after having loaded this script into your ZSH session.
|
||||
#
|
||||
# * HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND is a global variable that defines
|
||||
# how the query should be highlighted inside a matching command. Its default
|
||||
# value causes this script to highlight using bold, white text on a magenta
|
||||
# background. See the "Character Highlighting" section in the zshzle(1) man
|
||||
# page to learn about the kinds of values you may assign to this variable.
|
||||
#
|
||||
# * HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND is a global variable that
|
||||
# defines how the query should be highlighted when no commands in the
|
||||
# history match it. Its default value causes this script to highlight using
|
||||
# bold, white text on a red background. See the "Character Highlighting"
|
||||
# section in the zshzle(1) man page to learn about the kinds of values you
|
||||
# may assign to this variable.
|
||||
#
|
||||
# * HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS is a global variable that defines
|
||||
# how the command history will be searched for your query. Its default value
|
||||
# causes this script to perform a case-insensitive search. See the "Globbing
|
||||
# Flags" section in the zshexpn(1) man page to learn about the kinds of
|
||||
# values you may assign to this variable.
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# History
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# This script was originally written by Peter Stephenson[2], who published it
|
||||
# to the ZSH users mailing list (thereby making it public domain) in September
|
||||
# 2009. It was later revised by Guido van Steen and released under the BSD
|
||||
# license (see below) as part of the fizsh[3] project in January 2011.
|
||||
#
|
||||
# It was later extracted from fizsh[3] release 1.0.1, refactored heavily, and
|
||||
# repackaged as both an oh-my-zsh plugin[4] and as an independently loadable
|
||||
# ZSH script[5] by Suraj N. Kurapati in 2011.
|
||||
#
|
||||
# It was further developed[4] by Guido van Steen, Suraj N. Kurapati, Sorin
|
||||
# Ionescu, and Vincent Guerci in 2011.
|
||||
#
|
||||
# [1]: http://fishshell.com
|
||||
# [2]: http://www.zsh.org/mla/users/2009/msg00818.html
|
||||
# [3]: http://sourceforge.net/projects/fizsh/
|
||||
# [4]: https://github.com/robbyrussell/oh-my-zsh/pull/215
|
||||
# [5]: https://github.com/sunaku/zsh-history-substring-search
|
||||
# [6]: https://github.com/nicoulaj/zsh-syntax-highlighting
|
||||
#
|
||||
##############################################################################
|
||||
#
|
||||
# Copyright (c) 2009 Peter Stephenson
|
||||
# Copyright (c) 2011 Guido van Steen
|
||||
# Copyright (c) 2011 Suraj N. Kurapati
|
||||
# Copyright (c) 2011 Sorin Ionescu
|
||||
# Copyright (c) 2011 Vincent Guerci
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# * Neither the name of the FIZSH nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this
|
||||
# software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# configuration variables
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND='bg=magenta,fg=white,bold'
|
||||
HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND='bg=red,fg=white,bold'
|
||||
HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS='i'
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# the main ZLE widgets
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
function history-substring-search-up() {
|
||||
_history-substring-search-begin
|
||||
|
||||
_history-substring-search-up-history ||
|
||||
_history-substring-search-up-buffer ||
|
||||
_history-substring-search-up-search
|
||||
|
||||
_history-substring-search-end
|
||||
}
|
||||
|
||||
function history-substring-search-down() {
|
||||
_history-substring-search-begin
|
||||
|
||||
_history-substring-search-down-history ||
|
||||
_history-substring-search-down-buffer ||
|
||||
_history-substring-search-down-search
|
||||
|
||||
_history-substring-search-end
|
||||
}
|
||||
|
||||
zle -N history-substring-search-up
|
||||
zle -N history-substring-search-down
|
||||
|
||||
bindkey '\e[A' history-substring-search-up
|
||||
bindkey '\e[B' history-substring-search-down
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# implementation details
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
setopt extendedglob
|
||||
zmodload -F zsh/parameter
|
||||
|
||||
#
|
||||
# We have to "override" some keys and widgets if the
|
||||
# zsh-syntax-highlighting plugin has not been loaded:
|
||||
#
|
||||
# https://github.com/nicoulaj/zsh-syntax-highlighting
|
||||
#
|
||||
if [[ $+functions[_zsh_highlight] -eq 0 ]]; then
|
||||
#
|
||||
# Dummy implementation of _zsh_highlight()
|
||||
# that simply removes existing highlights
|
||||
#
|
||||
function _zsh_highlight() {
|
||||
region_highlight=()
|
||||
}
|
||||
|
||||
#
|
||||
# Remove existing highlights when the user
|
||||
# inserts printable characters into $BUFFER
|
||||
#
|
||||
function ordinary-key-press() {
|
||||
if [[ $KEYS == [[:print:]] ]]; then
|
||||
region_highlight=()
|
||||
fi
|
||||
zle .self-insert
|
||||
}
|
||||
zle -N self-insert ordinary-key-press
|
||||
|
||||
#
|
||||
# Override ZLE widgets to invoke _zsh_highlight()
|
||||
#
|
||||
# https://github.com/nicoulaj/zsh-syntax-highlighting/blob/
|
||||
# bb7fcb79fad797a40077bebaf6f4e4a93c9d8163/zsh-syntax-highlighting.zsh#L121
|
||||
#
|
||||
#--------------8<-------------------8<-------------------8<-----------------
|
||||
#
|
||||
# Copyright (c) 2010-2011 zsh-syntax-highlighting contributors
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
#
|
||||
# * Neither the name of the zsh-syntax-highlighting contributors nor the
|
||||
# names of its contributors may be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
|
||||
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
|
||||
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Load ZSH module zsh/zleparameter, needed to override user defined widgets.
|
||||
zmodload zsh/zleparameter 2>/dev/null || {
|
||||
echo 'zsh-syntax-highlighting: failed loading zsh/zleparameter, exiting.' >&2
|
||||
return -1
|
||||
}
|
||||
|
||||
# Override ZLE widgets to make them invoke _zsh_highlight.
|
||||
for event in ${${(f)"$(zle -la)"}:#(_*|orig-*|.run-help|.which-command)}; do
|
||||
if [[ "$widgets[$event]" == completion:* ]]; then
|
||||
eval "zle -C orig-$event ${${${widgets[$event]}#*:}/:/ } ; $event() { builtin zle orig-$event && _zsh_highlight } ; zle -N $event"
|
||||
else
|
||||
case $event in
|
||||
accept-and-menu-complete)
|
||||
eval "$event() { builtin zle .$event && _zsh_highlight } ; zle -N $event"
|
||||
;;
|
||||
|
||||
# The following widgets should NOT remove any previously
|
||||
# applied highlighting. Therefore we do not remap them.
|
||||
.forward-char|.backward-char|.up-line-or-history|.down-line-or-history)
|
||||
;;
|
||||
|
||||
.*)
|
||||
clean_event=$event[2,${#event}] # Remove the leading dot in the event name
|
||||
case ${widgets[$clean_event]-} in
|
||||
(completion|user):*)
|
||||
;;
|
||||
*)
|
||||
eval "$clean_event() { builtin zle $event && _zsh_highlight } ; zle -N $clean_event"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
done
|
||||
unset event clean_event
|
||||
#-------------->8------------------->8------------------->8-----------------
|
||||
fi
|
||||
|
||||
function _history-substring-search-begin() {
|
||||
_history_substring_search_move_cursor_eol=false
|
||||
_history_substring_search_query_highlight=
|
||||
|
||||
#
|
||||
# Continue using the previous $_history_substring_search_result by default,
|
||||
# unless the current query was cleared or a new/different query was entered.
|
||||
#
|
||||
if [[ -z $BUFFER || $BUFFER != $_history_substring_search_result ]]; then
|
||||
#
|
||||
# For the purpose of highlighting we will also keep
|
||||
# a version without doubly-escaped meta characters.
|
||||
#
|
||||
_history_substring_search_query=$BUFFER
|
||||
|
||||
#
|
||||
# $BUFFER contains the text that is in the command-line currently.
|
||||
# we put an extra "\\" before meta characters such as "\(" and "\)",
|
||||
# so that they become "\\\(" and "\\\)".
|
||||
#
|
||||
_history_substring_search_query_escaped=${BUFFER//(#m)[\][()|\\*?#<>~^]/\\$MATCH}
|
||||
|
||||
#
|
||||
# Find all occurrences of the search query in the history file.
|
||||
#
|
||||
# (k) turns it an array of line numbers.
|
||||
#
|
||||
# (on) seems to remove duplicates, which are default
|
||||
# options. They can be turned off by (ON).
|
||||
#
|
||||
_history_substring_search_matches=(${(kon)history[(R)(#$HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS)*${_history_substring_search_query_escaped}*]})
|
||||
|
||||
#
|
||||
# Define the range of values that $_history_substring_search_match_index
|
||||
# can take: [0, $_history_substring_search_matches_count_plus].
|
||||
#
|
||||
_history_substring_search_matches_count=$#_history_substring_search_matches
|
||||
_history_substring_search_matches_count_plus=$(( _history_substring_search_matches_count + 1 ))
|
||||
_history_substring_search_matches_count_sans=$(( _history_substring_search_matches_count - 1 ))
|
||||
|
||||
#
|
||||
# If $_history_substring_search_match_index is equal to
|
||||
# $_history_substring_search_matches_count_plus, this indicates that we
|
||||
# are beyond the beginning of $_history_substring_search_matches.
|
||||
#
|
||||
# If $_history_substring_search_match_index is equal to 0, this indicates
|
||||
# that we are beyond the end of $_history_substring_search_matches.
|
||||
#
|
||||
# If we have initially pressed "up" we have to initialize
|
||||
# $_history_substring_search_match_index to
|
||||
# $_history_substring_search_matches_count_plus so that it will be
|
||||
# decreased to $_history_substring_search_matches_count.
|
||||
#
|
||||
# If we have initially pressed "down" we have to initialize
|
||||
# $_history_substring_search_match_index to
|
||||
# $_history_substring_search_matches_count so that it will be increased to
|
||||
# $_history_substring_search_matches_count_plus.
|
||||
#
|
||||
if [[ $WIDGET == history-substring-search-down ]]; then
|
||||
_history_substring_search_match_index=$_history_substring_search_matches_count
|
||||
else
|
||||
_history_substring_search_match_index=$_history_substring_search_matches_count_plus
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function _history-substring-search-end() {
|
||||
_history_substring_search_result=$BUFFER
|
||||
|
||||
# move the cursor to the end of the command line
|
||||
if [[ $_history_substring_search_move_cursor_eol == true ]]; then
|
||||
CURSOR=${#BUFFER}
|
||||
fi
|
||||
|
||||
# highlight command line using zsh-syntax-highlighting
|
||||
_zsh_highlight
|
||||
|
||||
# highlight the search query inside the command line
|
||||
if [[ -n $_history_substring_search_query_highlight && -n $_history_substring_search_query ]]; then
|
||||
#
|
||||
# The following expression yields a variable $MBEGIN, which
|
||||
# indicates the begin position + 1 of the first occurrence
|
||||
# of _history_substring_search_query_escaped in $BUFFER.
|
||||
#
|
||||
: ${(S)BUFFER##(#m$HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS)($_history_substring_search_query##)}
|
||||
local begin=$(( MBEGIN - 1 ))
|
||||
local end=$(( begin + $#_history_substring_search_query ))
|
||||
region_highlight+=("$begin $end $_history_substring_search_query_highlight")
|
||||
fi
|
||||
|
||||
# For debugging purposes:
|
||||
# zle -R "mn: "$_history_substring_search_match_index" m#: "${#_history_substring_search_matches}
|
||||
# read -k -t 200 && zle -U $REPLY
|
||||
|
||||
# Exit successfully from the history-substring-search-* widgets.
|
||||
true
|
||||
}
|
||||
|
||||
function _history-substring-search-up-buffer() {
|
||||
#
|
||||
# Check if the UP arrow was pressed to move the cursor within a multi-line
|
||||
# buffer. This amounts to three tests:
|
||||
#
|
||||
# 1. $#buflines -gt 1.
|
||||
#
|
||||
# 2. $CURSOR -ne $#BUFFER.
|
||||
#
|
||||
# 3. Check if we are on the first line of the current multi-line buffer.
|
||||
# If so, pressing UP would amount to leaving the multi-line buffer.
|
||||
#
|
||||
# We check this by adding an extra "x" to $LBUFFER, which makes
|
||||
# sure that xlbuflines is always equal to the number of lines
|
||||
# until $CURSOR (including the line with the cursor on it).
|
||||
#
|
||||
local buflines XLBUFFER xlbuflines
|
||||
buflines=(${(f)BUFFER})
|
||||
XLBUFFER=$LBUFFER"x"
|
||||
xlbuflines=(${(f)XLBUFFER})
|
||||
|
||||
if [[ $#buflines -gt 1 && $CURSOR -ne $#BUFFER && $#xlbuflines -ne 1 ]]; then
|
||||
zle up-line-or-history
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-down-buffer() {
|
||||
#
|
||||
# Check if the DOWN arrow was pressed to move the cursor within a multi-line
|
||||
# buffer. This amounts to three tests:
|
||||
#
|
||||
# 1. $#buflines -gt 1.
|
||||
#
|
||||
# 2. $CURSOR -ne $#BUFFER.
|
||||
#
|
||||
# 3. Check if we are on the last line of the current multi-line buffer.
|
||||
# If so, pressing DOWN would amount to leaving the multi-line buffer.
|
||||
#
|
||||
# We check this by adding an extra "x" to $RBUFFER, which makes
|
||||
# sure that xrbuflines is always equal to the number of lines
|
||||
# from $CURSOR (including the line with the cursor on it).
|
||||
#
|
||||
local buflines XRBUFFER xrbuflines
|
||||
buflines=(${(f)BUFFER})
|
||||
XRBUFFER="x"$RBUFFER
|
||||
xrbuflines=(${(f)XRBUFFER})
|
||||
|
||||
if [[ $#buflines -gt 1 && $CURSOR -ne $#BUFFER && $#xrbuflines -ne 1 ]]; then
|
||||
zle down-line-or-history
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-up-history() {
|
||||
#
|
||||
# Behave like up in ZSH, except clear the $BUFFER
|
||||
# when beginning of history is reached like in Fish.
|
||||
#
|
||||
if [[ -z $_history_substring_search_query ]]; then
|
||||
|
||||
# we have reached the absolute top of history
|
||||
if [[ $HISTNO -eq 1 ]]; then
|
||||
BUFFER=
|
||||
|
||||
# going up from somewhere below the top of history
|
||||
else
|
||||
zle up-history
|
||||
fi
|
||||
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-down-history() {
|
||||
#
|
||||
# Behave like down-history in ZSH, except clear the
|
||||
# $BUFFER when end of history is reached like in Fish.
|
||||
#
|
||||
if [[ -z $_history_substring_search_query ]]; then
|
||||
|
||||
# going down from the absolute top of history
|
||||
if [[ $HISTNO -eq 1 && -z $BUFFER ]]; then
|
||||
BUFFER=${history[1]}
|
||||
_history_substring_search_move_cursor_eol=true
|
||||
|
||||
# going down from somewhere above the bottom of history
|
||||
else
|
||||
zle down-history
|
||||
fi
|
||||
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-up-search() {
|
||||
_history_substring_search_move_cursor_eol=true
|
||||
|
||||
#
|
||||
# Highlight matches during history-substring-up-search:
|
||||
#
|
||||
# The following constants have been initialized in
|
||||
# _history-substring-search-up/down-search():
|
||||
#
|
||||
# $_history_substring_search_matches is the current list of matches
|
||||
# $_history_substring_search_matches_count is the current number of matches
|
||||
# $_history_substring_search_matches_count_plus is the current number of matches + 1
|
||||
# $_history_substring_search_matches_count_sans is the current number of matches - 1
|
||||
# $_history_substring_search_match_index is the index of the current match
|
||||
#
|
||||
# The range of values that $_history_substring_search_match_index can take
|
||||
# is: [0, $_history_substring_search_matches_count_plus]. A value of 0
|
||||
# indicates that we are beyond the end of
|
||||
# $_history_substring_search_matches. A value of
|
||||
# $_history_substring_search_matches_count_plus indicates that we are beyond
|
||||
# the beginning of $_history_substring_search_matches.
|
||||
#
|
||||
# In _history-substring-search-up-search() the initial value of
|
||||
# $_history_substring_search_match_index is
|
||||
# $_history_substring_search_matches_count_plus. This value is set in
|
||||
# _history-substring-search-begin(). _history-substring-search-up-search()
|
||||
# will initially decrease it to $_history_substring_search_matches_count.
|
||||
#
|
||||
if [[ $_history_substring_search_match_index -ge 2 ]]; then
|
||||
#
|
||||
# Highlight the next match:
|
||||
#
|
||||
# 1. Decrease the value of $_history_substring_search_match_index.
|
||||
#
|
||||
# 2. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index-- ))
|
||||
BUFFER=$history[$_history_substring_search_matches[$_history_substring_search_match_index]]
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq 1 ]]; then
|
||||
#
|
||||
# We will move beyond the end of $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Decrease the value of $_history_substring_search_match_index.
|
||||
#
|
||||
# 2. Save the current buffer in $_history_substring_search_old_buffer,
|
||||
# so that it can be retrieved by
|
||||
# _history-substring-search-down-search() later.
|
||||
#
|
||||
# 3. Make $BUFFER equal to $_history_substring_search_query.
|
||||
#
|
||||
# 4. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index-- ))
|
||||
_history_substring_search_old_buffer=$BUFFER
|
||||
BUFFER=$_history_substring_search_query
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq $_history_substring_search_matches_count_plus ]]; then
|
||||
#
|
||||
# We were beyond the beginning of $_history_substring_search_matches but
|
||||
# UP makes us move back to $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Decrease the value of $_history_substring_search_match_index.
|
||||
#
|
||||
# 2. Restore $BUFFER from $_history_substring_search_old_buffer.
|
||||
#
|
||||
# 3. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index-- ))
|
||||
BUFFER=$_history_substring_search_old_buffer
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
fi
|
||||
}
|
||||
|
||||
function _history-substring-search-down-search() {
|
||||
_history_substring_search_move_cursor_eol=true
|
||||
|
||||
#
|
||||
# Highlight matches during history-substring-up-search:
|
||||
#
|
||||
# The following constants have been initialized in
|
||||
# _history-substring-search-up/down-search():
|
||||
#
|
||||
# $_history_substring_search_matches is the current list of matches
|
||||
# $_history_substring_search_matches_count is the current number of matches
|
||||
# $_history_substring_search_matches_count_plus is the current number of matches + 1
|
||||
# $_history_substring_search_matches_count_sans is the current number of matches - 1
|
||||
# $_history_substring_search_match_index is the index of the current match
|
||||
#
|
||||
# The range of values that $_history_substring_search_match_index can take
|
||||
# is: [0, $_history_substring_search_matches_count_plus]. A value of 0
|
||||
# indicates that we are beyond the end of
|
||||
# $_history_substring_search_matches. A value of
|
||||
# $_history_substring_search_matches_count_plus indicates that we are beyond
|
||||
# the beginning of $_history_substring_search_matches.
|
||||
#
|
||||
# In _history-substring-search-down-search() the initial value of
|
||||
# $_history_substring_search_match_index is
|
||||
# $_history_substring_search_matches_count. This value is set in
|
||||
# _history-substring-search-begin().
|
||||
# _history-substring-search-down-search() will initially increase it to
|
||||
# $_history_substring_search_matches_count_plus.
|
||||
#
|
||||
if [[ $_history_substring_search_match_index -le $_history_substring_search_matches_count_sans ]]; then
|
||||
#
|
||||
# Highlight the next match:
|
||||
#
|
||||
# 1. Increase $_history_substring_search_match_index by 1.
|
||||
#
|
||||
# 2. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index++ ))
|
||||
BUFFER=$history[$_history_substring_search_matches[$_history_substring_search_match_index]]
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq $_history_substring_search_matches_count ]]; then
|
||||
#
|
||||
# We will move beyond the beginning of $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Increase $_history_substring_search_match_index by 1.
|
||||
#
|
||||
# 2. Save the current buffer in $_history_substring_search_old_buffer, so
|
||||
# that it can be retrieved by _history-substring-search-up-search()
|
||||
# later.
|
||||
#
|
||||
# 3. Make $BUFFER equal to $_history_substring_search_query.
|
||||
#
|
||||
# 4. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index++ ))
|
||||
_history_substring_search_old_buffer=$BUFFER
|
||||
BUFFER=$_history_substring_search_query
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq 0 ]]; then
|
||||
#
|
||||
# We were beyond the end of $_history_substring_search_matches but DOWN
|
||||
# makes us move back to the $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Increase $_history_substring_search_match_index by 1.
|
||||
#
|
||||
# 2. Restore $BUFFER from $_history_substring_search_old_buffer.
|
||||
#
|
||||
# 3. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index++ ))
|
||||
BUFFER=$_history_substring_search_old_buffer
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
fi
|
||||
}
|
||||
|
||||
# -*- mode: zsh; sh-indentation: 2; indent-tabs-mode: nil; sh-basic-offset: 2; -*-
|
||||
# vim: ft=zsh sw=2 ts=2 et
|
||||
14
plugins/jake-node/jake-node.plugin.zsh
Normal file
14
plugins/jake-node/jake-node.plugin.zsh
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
#---oh-my-zsh plugin : task Autocomplete for Jake tool---
|
||||
# Jake : https://github.com/mde/jake
|
||||
# Warning : Jakefile should have the right case : Jakefile or jakefile
|
||||
# Tested on : MacOSX 10.7 (Lion), Ubuntu 11.10
|
||||
# Author : Alexandre Lacheze (@al3xstrat)
|
||||
# Inspiration : http://weblog.rubyonrails.org/2006/3/9/fast-rake-task-completion-for-zsh
|
||||
|
||||
function _jake () {
|
||||
if [ -f Jakefile ]||[ -f jakefile ]; then
|
||||
compadd `jake -T | cut -d " " -f 2 | sed -E "s/.\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"`
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _jake jake
|
||||
9
plugins/kate/kate.plugin.zsh
Normal file
9
plugins/kate/kate.plugin.zsh
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
# Kate
|
||||
# Start kate always silent
|
||||
alias kate='kate >/dev/null 2>&1'
|
||||
|
||||
function kt () {
|
||||
cd $1
|
||||
kate $1
|
||||
}
|
||||
181
plugins/knife/_knife
Normal file
181
plugins/knife/_knife
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
#compdef knife
|
||||
|
||||
# These flags should be available everywhere according to man knife
|
||||
knife_general_flags=( --help --server-url --key --config --editor --format --log_level --logfile --no-editor --user --print-after --version --yes )
|
||||
|
||||
# knife has a very special syntax, some example calls are:
|
||||
# knife status
|
||||
# knife cookbook list
|
||||
# knife role show ROLENAME
|
||||
# knife data bag show DATABAGNAME
|
||||
# knife role show ROLENAME --attribute ATTRIBUTENAME
|
||||
# knife cookbook show COOKBOOKNAME COOKBOOKVERSION recipes
|
||||
|
||||
# The -Q switch in compadd allow for completions of things like "data bag" without having to go through two rounds of completion and avoids zsh inserting a \ for escaping spaces
|
||||
_knife() {
|
||||
local curcontext="$curcontext" state line
|
||||
typeset -A opt_args
|
||||
cloudproviders=(bluebox ec2 rackspace slicehost terremark)
|
||||
_arguments \
|
||||
'1: :->knifecmd'\
|
||||
'2: :->knifesubcmd'\
|
||||
'3: :->knifesubcmd2' \
|
||||
'4: :->knifesubcmd3' \
|
||||
'5: :->knifesubcmd4' \
|
||||
'6: :->knifesubcmd5'
|
||||
|
||||
case $state in
|
||||
knifecmd)
|
||||
compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" exec environment index node recipe role search ssh status windows $cloudproviders
|
||||
;;
|
||||
knifesubcmd)
|
||||
case $words[2] in
|
||||
(bluebox|ec2|rackspace|slicehost|terremark)
|
||||
compadd "$@" server images
|
||||
;;
|
||||
client)
|
||||
compadd -Q "$@" "bulk delete" list create show delete edit reregister
|
||||
;;
|
||||
configure)
|
||||
compadd "$@" client
|
||||
;;
|
||||
cookbook)
|
||||
compadd -Q "$@" test list create download delete "metadata from" show "bulk delete" metadata upload
|
||||
;;
|
||||
environment)
|
||||
compadd -Q "$@" list create delete edit show "from file"
|
||||
;;
|
||||
node)
|
||||
compadd -Q "$@" "from file" create show edit delete list run_list "bulk delete"
|
||||
;;
|
||||
recipe)
|
||||
compadd "$@" list
|
||||
;;
|
||||
role)
|
||||
compadd -Q "$@" "bulk delete" create delete edit "from file" list show
|
||||
;;
|
||||
windows)
|
||||
compadd "$@" bootstrap
|
||||
;;
|
||||
*)
|
||||
_arguments '2:Subsubcommands:($(_knife_options1))'
|
||||
esac
|
||||
;;
|
||||
knifesubcmd2)
|
||||
case $words[3] in
|
||||
server)
|
||||
compadd "$@" list create delete
|
||||
;;
|
||||
images)
|
||||
compadd "$@" list
|
||||
;;
|
||||
site)
|
||||
compadd "$@" vendor show share search download list unshare
|
||||
;;
|
||||
(show|delete|edit)
|
||||
_arguments '3:Subsubcommands:($(_chef_$words[2]s_remote))'
|
||||
;;
|
||||
(upload|test)
|
||||
_arguments '3:Subsubcommands:($(_chef_$words[2]s_local) --all)'
|
||||
;;
|
||||
list)
|
||||
compadd -a "$@" knife_general_flags
|
||||
;;
|
||||
bag)
|
||||
compadd -Q "$@" show edit list "from file" create delete
|
||||
;;
|
||||
*)
|
||||
_arguments '3:Subsubcommands:($(_knife_options2))'
|
||||
esac
|
||||
;;
|
||||
knifesubcmd3)
|
||||
case $words[3] in
|
||||
show)
|
||||
case $words[2] in
|
||||
cookbook)
|
||||
versioncomp=1
|
||||
_arguments '4:Cookbookversions:($(_cookbook_versions) latest)'
|
||||
;;
|
||||
(node|client|role)
|
||||
compadd "$@" --attribute
|
||||
esac
|
||||
esac
|
||||
case $words[4] in
|
||||
(show|edit)
|
||||
_arguments '4:Subsubsubcommands:($(_chef_$words[2]_$words[3]s_remote))'
|
||||
;;
|
||||
file)
|
||||
_arguments '*:file or directory:_files -g "*.(rb|json)"'
|
||||
;;
|
||||
list)
|
||||
compadd -a "$@" knife_general_flags
|
||||
;;
|
||||
*)
|
||||
_arguments '*:Subsubcommands:($(_knife_options3))'
|
||||
esac
|
||||
;;
|
||||
knifesubcmd4)
|
||||
if (( versioncomp > 0 )); then
|
||||
compadd "$@" attributes definitions files libraries providers recipes resources templates
|
||||
else
|
||||
_arguments '*:Subsubcommands:($(_knife_options2))'
|
||||
fi
|
||||
;;
|
||||
knifesubcmd5)
|
||||
_arguments '*:Subsubcommands:($(_knife_options3))'
|
||||
esac
|
||||
}
|
||||
|
||||
# Helper functions to provide the argument completion for several depths of commands
|
||||
_knife_options1() {
|
||||
( for line in $( knife $words[2] --help | grep -v "^knife" ); do echo $line | grep "\-\-"; done )
|
||||
}
|
||||
|
||||
_knife_options2() {
|
||||
( for line in $( knife $words[2] $words[3] --help | grep -v "^knife" ); do echo $line | grep "\-\-"; done )
|
||||
}
|
||||
|
||||
_knife_options3() {
|
||||
( for line in $( knife $words[2] $words[3] $words[4] --help | grep -v "^knife" ); do echo $line | grep "\-\-"; done )
|
||||
}
|
||||
|
||||
# The chef_x_remote functions use knife to get a list of objects of type x on the server
|
||||
_chef_roles_remote() {
|
||||
(knife role list --format json | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_clients_remote() {
|
||||
(knife client list --format json | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_nodes_remote() {
|
||||
(knife node list --format json | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_cookbooks_remote() {
|
||||
(knife cookbook list --format json | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_sitecookbooks_remote() {
|
||||
(knife cookbook site list --format json | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_data_bags_remote() {
|
||||
(knife data bag list --format json | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_environments_remote() {
|
||||
(knife environment list | awk '{print $1}')
|
||||
}
|
||||
|
||||
# The chef_x_local functions use the knife config to find the paths of relevant objects x to be uploaded to the server
|
||||
_chef_cookbooks_local() {
|
||||
(for i in $( grep cookbook_path $HOME/.chef/knife.rb | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' ); do ls $i; done)
|
||||
}
|
||||
|
||||
# This function extracts the available cookbook versions on the chef server
|
||||
_cookbook_versions() {
|
||||
(knife cookbook show $words[4] | grep -v $words[4] | grep -v -E '\]|\[|\{|\}' | sed 's/ //g' | sed 's/"//g')
|
||||
}
|
||||
|
||||
_knife "$@"
|
||||
|
|
@ -8,19 +8,30 @@ alias rtfm='man'
|
|||
alias visible='echo'
|
||||
alias invisible='cat'
|
||||
alias moar='more'
|
||||
alias tldr='less'
|
||||
alias alwayz='tail -f'
|
||||
|
||||
alias icanhas='mkdir'
|
||||
alias gimmeh='touch'
|
||||
alias donotwant='rm'
|
||||
alias dowant='cp'
|
||||
alias gtfo='mv'
|
||||
alias nowai='chmod'
|
||||
|
||||
alias hai='cd'
|
||||
alias iz='ls'
|
||||
alias plz='pwd'
|
||||
alias ihasbucket='df -h'
|
||||
|
||||
alias inur='locate'
|
||||
alias iminurbase='finger'
|
||||
|
||||
alias btw='nice'
|
||||
alias obtw='nohup'
|
||||
|
||||
alias nomz='ps -aux'
|
||||
alias nomnom='killall'
|
||||
|
||||
alias byes='exit'
|
||||
alias cya='reboot'
|
||||
alias kthxbai='halt'
|
||||
|
|
|
|||
14
plugins/mercurial/mercurial.plugin.zsh
Normal file
14
plugins/mercurial/mercurial.plugin.zsh
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
|
||||
# Mercurial
|
||||
alias hgc='hg commit'
|
||||
alias hgb='hg branch'
|
||||
alias hgba='hg branches'
|
||||
alias hgco='hg checkout'
|
||||
alias hgd='hg diff'
|
||||
alias hged='hg diffmerge'
|
||||
# pull and update
|
||||
alias hgl='hg pull -u'
|
||||
alias hgp='hg push'
|
||||
alias hgs='hg status'
|
||||
# this is the 'git commit --amend' equivalent
|
||||
alias hgca='hg qimport -r tip ; hg qrefresh -e ; hg qfinish tip'
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
#compdef npm
|
||||
|
||||
# Node Package Manager 0.3.15 completion, letting npm do all the completion work
|
||||
|
||||
_npm() {
|
||||
compadd -- $(_npm_complete $words)
|
||||
}
|
||||
|
||||
# We want to show all errors of any substance, but never the "npm (not )ok" one.
|
||||
# (Also doesn't consider "ERR! no match found" worth breaking the terminal for.)
|
||||
_npm_complete() {
|
||||
local ask_npm
|
||||
ask_npm=(npm completion --color false --loglevel error -- $@)
|
||||
{ _call_program npm $ask_npm 2>&1 >&3 \
|
||||
| egrep -v '^(npm (not |)ok|ERR! no match found)$' >&2; \
|
||||
} 3>&1
|
||||
}
|
||||
|
||||
_npm "$@"
|
||||
1
plugins/npm/npm.plugin.zsh
Normal file
1
plugins/npm/npm.plugin.zsh
Normal file
|
|
@ -0,0 +1 @@
|
|||
eval "$(npm completion 2>/dev/null)"
|
||||
5
plugins/nyan/nyan.plugin.zsh
Normal file
5
plugins/nyan/nyan.plugin.zsh
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
if [[ -x `which nc` ]]; then
|
||||
alias nyan='nc -v miku.acm.uiuc.edu 23' # nyan cat
|
||||
fi
|
||||
|
||||
|
||||
5
plugins/osx/_man-preview
Normal file
5
plugins/osx/_man-preview
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
#compdef man-preview
|
||||
#autoload
|
||||
|
||||
_man
|
||||
|
||||
|
|
@ -1,63 +1,99 @@
|
|||
alias showfiles='defaults write com.apple.finder AppleShowAllFiles TRUE; killall Finder'
|
||||
alias hidefiles='defaults write com.apple.finder AppleShowAllFiles FALSE; killall Finder'
|
||||
# ------------------------------------------------------------------------------
|
||||
# FILE: osx.plugin.zsh
|
||||
# DESCRIPTION: oh-my-zsh plugin file.
|
||||
# AUTHOR: Sorin Ionescu (sorin.ionescu@gmail.com)
|
||||
# VERSION: 1.0.1
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# Recursively delete .DS_Store files
|
||||
alias rm-dsstore="find . -name '*.DS_Store' -type f -delete"
|
||||
|
||||
function savepath() {
|
||||
pwd > ~/.current_path~
|
||||
}
|
||||
|
||||
function tab() {
|
||||
savepath
|
||||
osascript >/dev/null <<EOF
|
||||
on do_submenu(app_name, menu_name, menu_item, submenu_item)
|
||||
-- bring the target application to the front
|
||||
tell application app_name
|
||||
activate
|
||||
end tell
|
||||
tell application "System Events"
|
||||
tell process app_name
|
||||
tell menu bar 1
|
||||
tell menu bar item menu_name
|
||||
tell menu menu_name
|
||||
tell menu item menu_item
|
||||
tell menu menu_item
|
||||
click menu item submenu_item
|
||||
end tell
|
||||
end tell
|
||||
end tell
|
||||
local command="cd \\\"$PWD\\\""
|
||||
(( $# > 0 )) && command="${command}; $*"
|
||||
|
||||
the_app=$(
|
||||
osascript 2>/dev/null <<EOF
|
||||
tell application "System Events"
|
||||
name of first item of (every process whose frontmost is true)
|
||||
end tell
|
||||
EOF
|
||||
)
|
||||
|
||||
[[ "$the_app" == 'Terminal' ]] && {
|
||||
osascript 2>/dev/null <<EOF
|
||||
tell application "System Events"
|
||||
tell process "Terminal" to keystroke "t" using command down
|
||||
tell application "Terminal" to do script "${command}" in front window
|
||||
end tell
|
||||
EOF
|
||||
}
|
||||
|
||||
[[ "$the_app" == 'iTerm' ]] && {
|
||||
osascript 2>/dev/null <<EOF
|
||||
tell application "iTerm"
|
||||
set current_terminal to current terminal
|
||||
tell current_terminal
|
||||
launch session "Default Session"
|
||||
set current_session to current session
|
||||
tell current_session
|
||||
write text "${command}"
|
||||
end tell
|
||||
end tell
|
||||
end tell
|
||||
end tell
|
||||
end do_submenu
|
||||
EOF
|
||||
}
|
||||
}
|
||||
|
||||
do_submenu("Terminal", "Shell", "New Tab", 1)
|
||||
function pfd() {
|
||||
osascript 2>/dev/null <<EOF
|
||||
tell application "Finder"
|
||||
return POSIX path of (target of window 1 as alias)
|
||||
end tell
|
||||
EOF
|
||||
}
|
||||
|
||||
function itab() {
|
||||
savepath
|
||||
osascript >/dev/null <<EOF
|
||||
on do_submenu(app_name, menu_name, menu_item)
|
||||
-- bring the target application to the front
|
||||
tell application app_name
|
||||
activate
|
||||
end tell
|
||||
tell application "System Events"
|
||||
tell process app_name
|
||||
tell menu bar 1
|
||||
tell menu bar item menu_name
|
||||
tell menu menu_name
|
||||
click menu item menu_item
|
||||
end tell
|
||||
end tell
|
||||
end tell
|
||||
end tell
|
||||
end tell
|
||||
end do_submenu
|
||||
|
||||
do_submenu("iTerm", "Shell", "New Tab")
|
||||
function pfs() {
|
||||
osascript 2>/dev/null <<EOF
|
||||
set output to ""
|
||||
tell application "Finder" to set the_selection to selection
|
||||
set item_count to count the_selection
|
||||
repeat with item_index from 1 to count the_selection
|
||||
if item_index is less than item_count then set the_delimiter to "\n"
|
||||
if item_index is item_count then set the_delimiter to ""
|
||||
set output to output & ((item item_index of the_selection as alias)'s POSIX path) & the_delimiter
|
||||
end repeat
|
||||
EOF
|
||||
}
|
||||
|
||||
function cdf() {
|
||||
cd "$(pfd)"
|
||||
}
|
||||
|
||||
function pushdf() {
|
||||
pushd "$(pfd)"
|
||||
}
|
||||
|
||||
function quick-look() {
|
||||
(( $# > 0 )) && qlmanage -p $* &>/dev/null &
|
||||
}
|
||||
|
||||
function man-preview() {
|
||||
man -t "$@" | open -f -a Preview
|
||||
}
|
||||
|
||||
function trash() {
|
||||
local trash_dir="${HOME}/.Trash"
|
||||
local temp_ifs=$IFS
|
||||
IFS=$'\n'
|
||||
for item in "$@"; do
|
||||
if [[ -e "$item" ]]; then
|
||||
item_name="$(basename $item)"
|
||||
if [[ -e "${trash_dir}/${item_name}" ]]; then
|
||||
mv -f "$item" "${trash_dir}/${item_name} $(date "+%H-%M-%S")"
|
||||
else
|
||||
mv -f "$item" "${trash_dir}/"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
IFS=$temp_ifs
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,30 +3,43 @@
|
|||
|
||||
# pip zsh completion, based on homebrew completion
|
||||
|
||||
_pip_all() {
|
||||
# we cache the list of packages (originally from the macports plugin)
|
||||
if (( ! $+piplist )); then
|
||||
echo -n " (caching package index...)"
|
||||
piplist=($(pip search * | cut -d ' ' -f 1 | tr '[A-Z]' '[a-z]'))
|
||||
fi
|
||||
}
|
||||
|
||||
_pip_installed() {
|
||||
installed_pkgs=(`pip freeze`)
|
||||
installed_pkgs=(`pip freeze | cut -d '=' -f 1`)
|
||||
}
|
||||
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
'bundle:Create pybundles (archives containing multiple packages)'
|
||||
'freeze:Output all currently installed packages (exact versions) to stdout'
|
||||
'help:Show available commands'
|
||||
'install:Install packages'
|
||||
'search:Search PyPI'
|
||||
'uninstall:Uninstall packages'
|
||||
'unzip:Unzip individual packages'
|
||||
'zip:Zip individual packages'
|
||||
'bundle:create pybundles (archives containing multiple packages)'
|
||||
'freeze:output all currently installed packages (exact versions) to stdout'
|
||||
'help:show available commands'
|
||||
'install:install packages'
|
||||
'search:search PyPI'
|
||||
'uninstall:uninstall packages'
|
||||
'unzip:unzip individual packages'
|
||||
'zip:zip individual packages'
|
||||
)
|
||||
|
||||
local expl
|
||||
local -a pkgs installed_pkgs
|
||||
local -a all_pkgs installed_pkgs
|
||||
|
||||
_arguments \
|
||||
'(--version)--version[Show version number of program and exit]' \
|
||||
'(-v --verbose)'{-v,--verbose}'[Give more output]' \
|
||||
'(-q --quiet)'{-q,--quiet}'[Give less output]' \
|
||||
'(-h --help)'{-h,--help}'[Show help]' \
|
||||
'(--version)--version[show version number of program and exit]' \
|
||||
'(-h --help)'{-h,--help}'[show help]' \
|
||||
'(-E --environment)'{-E,--environment}'[virtualenv environment to run pip in]' \
|
||||
'(-s --enable-site-packages)'{-s,--enable-site-packages}'[include site-packages in virtualenv]' \
|
||||
'(-v --verbose)'{-v,--verbose}'[give more output]' \
|
||||
'(-q --quiet)'{-q,--quiet}'[give less output]' \
|
||||
'(--log)--log[log file location]' \
|
||||
'(--proxy)--proxy[proxy in form user:passwd@proxy.server:port]' \
|
||||
'(--timeout)--timeout[socket timeout (default 15s)]' \
|
||||
'*:: :->subcmds' && return 0
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
|
|
@ -35,10 +48,25 @@ if (( CURRENT == 1 )); then
|
|||
fi
|
||||
|
||||
case "$words[1]" in
|
||||
list)
|
||||
if [[ "$state" == forms ]]; then
|
||||
_pip_installed
|
||||
_requested installed_pkgs expl 'installed packages' compadd -a installed_pkgs
|
||||
search)
|
||||
_arguments \
|
||||
'(--index)--index[base URL of Python Package Index]' ;;
|
||||
freeze)
|
||||
_arguments \
|
||||
'(-l --local)'{-l,--local}'[report only virtualenv packages]' ;;
|
||||
install)
|
||||
_arguments \
|
||||
'(-U --upgrade)'{-U,--upgrade}'[upgrade all packages to the newest available version]' \
|
||||
'(-f --find-links)'{-f,--find-links}'[URL for finding packages]' \
|
||||
'(--no-deps --no-dependencies)'{--no-deps,--no-dependencies}'[iIgnore package dependencies]' \
|
||||
'(--no-install)--no-install[only download packages]' \
|
||||
'(--no-download)--no-download[only install downloaded packages]' \
|
||||
'(--install-option)--install-option[extra arguments to be supplied to the setup.py]' \
|
||||
'1: :->packages' && return 0
|
||||
|
||||
if [[ "$state" == packages ]]; then
|
||||
_pip_all
|
||||
_wanted piplist expl 'packages' compadd -a piplist
|
||||
fi ;;
|
||||
uninstall)
|
||||
_pip_installed
|
||||
|
|
|
|||
|
|
@ -1,10 +1,66 @@
|
|||
# Thanks to Christopher Sexton
|
||||
# https://gist.github.com/965032
|
||||
function kapow {
|
||||
touch ~/.pow/$1/tmp/restart.txt
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "$fg[yellow]Pow restarting $1...$reset_color"
|
||||
fi
|
||||
# Restart a rack app running under pow
|
||||
# http://pow.cx/
|
||||
#
|
||||
# Adds a kapow command that will restart an app
|
||||
#
|
||||
# $ kapow myapp
|
||||
#
|
||||
# Supports command completion.
|
||||
#
|
||||
# If you are not already using completion you might need to enable it with
|
||||
#
|
||||
# autoload -U compinit compinit
|
||||
#
|
||||
# Changes:
|
||||
#
|
||||
# Defaults to the current application, and will walk up the tree to find
|
||||
# a config.ru file and restart the corresponding app
|
||||
#
|
||||
# Will Detect if a app does not exist in pow and print a (slightly) helpful
|
||||
# error message
|
||||
|
||||
rack_root_detect(){
|
||||
setopt chaselinks
|
||||
local orgdir=$(pwd)
|
||||
local basedir=$(pwd)
|
||||
|
||||
while [[ $basedir != '/' ]]; do
|
||||
test -e "$basedir/config.ru" && break
|
||||
builtin cd ".." 2>/dev/null
|
||||
basedir="$(pwd)"
|
||||
done
|
||||
|
||||
builtin cd $orgdir 2>/dev/null
|
||||
[[ ${basedir} == "/" ]] && return 1
|
||||
echo `basename $basedir | sed -E "s/.(com|net|org)//"`
|
||||
}
|
||||
|
||||
kapow(){
|
||||
local vhost=$1
|
||||
[ ! -n "$vhost" ] && vhost=$(rack_root_detect)
|
||||
if [ ! -h ~/.pow/$vhost ]
|
||||
then
|
||||
echo "pow: This domain isn’t set up yet. Symlink your application to ${vhost} first."
|
||||
return 1
|
||||
fi
|
||||
|
||||
[ ! -d ~/.pow/${vhost}/tmp ] && mkdir -p ~/.pow/$vhost/tmp
|
||||
touch ~/.pow/$vhost/tmp/restart.txt;
|
||||
[ $? -eq 0 ] && echo "pow: restarting $vhost.dev"
|
||||
}
|
||||
compctl -W ~/.pow -/ kapow
|
||||
|
||||
powit(){
|
||||
local basedir=$(pwd)
|
||||
local vhost=$1
|
||||
[ ! -n "$vhost" ] && vhost=$(rack_root_detect)
|
||||
if [ ! -h ~/.pow/$vhost ]
|
||||
then
|
||||
echo "pow: Symlinking your app with pow. ${vhost}"
|
||||
[ ! -d ~/.pow/${vhost} ] && ln -s $basedir ~/.pow/$vhost
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# View the standard out (puts) from any pow app
|
||||
alias kaput="tail -f ~/Library/Logs/Pow/apps/*"
|
||||
|
|
|
|||
4
plugins/powder/_powder
Normal file
4
plugins/powder/_powder
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
#compdef powder
|
||||
#autoload
|
||||
|
||||
compadd `powder help | grep powder | cut -d " " -f 4`
|
||||
5
plugins/python/python.plugin.zsh
Normal file
5
plugins/python/python.plugin.zsh
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# Find python file
|
||||
alias pyfind='find . -name "*.py"'
|
||||
|
||||
# Remove python compiled byte-code
|
||||
alias pyclean='find . -type f -name "*.py[co]" -exec rm -f \{\} \;'
|
||||
|
|
@ -1,13 +1,5 @@
|
|||
# Rails 3 aliases, backwards-compatible with Rails 2.
|
||||
|
||||
function _bundle_command {
|
||||
if command -v bundle && [ -e "Gemfile" ]; then
|
||||
bundle exec $@
|
||||
else
|
||||
$@
|
||||
fi
|
||||
}
|
||||
|
||||
function _rails_command () {
|
||||
if [ -e "script/server" ]; then
|
||||
ruby script/$@
|
||||
|
|
@ -25,6 +17,5 @@ alias rp='_rails_command plugin'
|
|||
alias rs='_rails_command server'
|
||||
alias rsd='_rails_command server --debugger'
|
||||
alias devlog='tail -f log/development.log'
|
||||
|
||||
alias rspec='_bundle_command rspec'
|
||||
alias cuke='_bundle_command cucumber'
|
||||
alias rdm='rake db:migrate'
|
||||
alias rdr='rake db:rollback'
|
||||
|
|
|
|||
6
plugins/rake/rake.plugin.zsh
Normal file
6
plugins/rake/rake.plugin.zsh
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
alias rake="noglob rake" # allows square brackts for rake task invocation
|
||||
alias brake='noglob bundle exec rake' # execute the bundled rake gem
|
||||
alias srake='noglob sudo rake' # noglob must come before sudo
|
||||
alias sbrake='noglob sudo bundle exec rake' # altogether now ...
|
||||
|
||||
|
||||
44
plugins/rbenv/rbenv.plugin.zsh
Normal file
44
plugins/rbenv/rbenv.plugin.zsh
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
FOUND_RBENV=0
|
||||
for rbenvdir in "$HOME/.rbenv" "/usr/local/rbenv" "/opt/rbenv" ; do
|
||||
if [ -d $rbenvdir/bin -a $FOUND_RBENV -eq 0 ] ; then
|
||||
FOUND_RBENV=1
|
||||
export RBENV_ROOT=$rbenvdir
|
||||
export PATH=${rbenvdir}/bin:$PATH
|
||||
eval "$(rbenv init -)"
|
||||
|
||||
alias rubies="rbenv versions"
|
||||
alias gemsets="rbenv gemset list"
|
||||
|
||||
function current_ruby() {
|
||||
echo "$(rbenv version-name)"
|
||||
}
|
||||
|
||||
function current_gemset() {
|
||||
echo "$(rbenv gemset active 2&>/dev/null | sed -e ":a" -e '$ s/\n/+/gp;N;b a' | head -n1)"
|
||||
}
|
||||
|
||||
function gems {
|
||||
local rbenv_path=$(rbenv prefix)
|
||||
gem list $@ | sed \
|
||||
-Ee "s/\([0-9\.]+( .+)?\)/$fg[blue]&$reset_color/g" \
|
||||
-Ee "s|$(echo $rbenv_path)|$fg[magenta]\$rbenv_path$reset_color|g" \
|
||||
-Ee "s/$current_ruby@global/$fg[yellow]&$reset_color/g" \
|
||||
-Ee "s/$current_ruby$current_gemset$/$fg[green]&$reset_color/g"
|
||||
}
|
||||
|
||||
function rbenv_prompt_info() {
|
||||
if [[ -n $(current_gemset) ]] ; then
|
||||
echo "$(current_ruby)@$(current_gemset)"
|
||||
else
|
||||
echo "$(current_ruby)"
|
||||
fi
|
||||
}
|
||||
fi
|
||||
done
|
||||
unset rbenvdir
|
||||
|
||||
if [ $FOUND_RBENV -eq 0 ] ; then
|
||||
alias rubies='ruby -v'
|
||||
function gemsets() { echo 'not supported' }
|
||||
function rbenv_prompt_info() { echo "system: $(ruby -v | cut -f-2 -d ' ')" }
|
||||
fi
|
||||
|
|
@ -51,7 +51,7 @@ _1st_arguments=(
|
|||
'keys:find all keys matching the given pattern'
|
||||
'lastsave:get the UNIX timestamp of the last successful save to disk'
|
||||
'lindex:get an element from a list by its index'
|
||||
'linset:insert an element before or after another element in a list'
|
||||
'linsert:insert an element before or after another element in a list'
|
||||
'llen:get the length of a list'
|
||||
'lpop:remove and get the first element in a list'
|
||||
'lpush:prepend a value to a list'
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@
|
|||
alias sgem='sudo gem'
|
||||
|
||||
# Find ruby file
|
||||
alias rfind='find . -name *.rb | xargs grep -n'
|
||||
alias rfind='find . -name "*.rb" | xargs grep -n'
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ function gems {
|
|||
local current_gemset=`rvm-prompt g`
|
||||
|
||||
gem list $@ | sed \
|
||||
-Ee "s/\([0-9\.]+( .+)?\)/$fg[blue]&$reset_color/g" \
|
||||
-Ee "s/\([0-9, \.]+( .+)?\)/$fg[blue]&$reset_color/g" \
|
||||
-Ee "s|$(echo $rvm_path)|$fg[magenta]\$rvm_path$reset_color|g" \
|
||||
-Ee "s/$current_ruby@global/$fg[yellow]&$reset_color/g" \
|
||||
-Ee "s/$current_ruby$current_gemset$/$fg[green]&$reset_color/g"
|
||||
|
|
|
|||
64
plugins/sprunge/sprunge.plugin.zsh
Normal file
64
plugins/sprunge/sprunge.plugin.zsh
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
# Contributed and SLIGHTLY modded by Matt Parnell/ilikenwf <parwok -at- gmail>
|
||||
# Created by the blogger at the URL below...I don't know where to find his/her name
|
||||
# Original found at http://www.shellperson.net/sprunge-pastebin-script/
|
||||
|
||||
usage() {
|
||||
description | fmt -s >&2
|
||||
}
|
||||
|
||||
description() {
|
||||
cat << HERE
|
||||
|
||||
DESCRIPTION
|
||||
Upload data and fetch URL from the pastebin http://sprunge.us
|
||||
|
||||
USAGE
|
||||
$0 filename.txt
|
||||
$0 text string
|
||||
$0 < filename.txt
|
||||
piped_data | $0
|
||||
|
||||
NOTES
|
||||
--------------------------------------------------------------------------
|
||||
* INPUT METHODS *
|
||||
$0 can accept piped data, STDIN redirection [<filename.txt], text strings following the command as arguments, or filenames as arguments. Only one of these methods can be used at a time, so please see the note on precedence. Also, note that using a pipe or STDIN redirection will treat tabs as spaces, or disregard them entirely (if they appear at the beginning of a line). So I suggest using a filename as an argument if tabs are important either to the function or readability of the code.
|
||||
|
||||
* PRECEDENCE *
|
||||
STDIN redirection has precedence, then piped input, then a filename as an argument, and finally text strings as an arguments.
|
||||
|
||||
EXAMPLE:
|
||||
echo piped | "$0" arguments.txt < stdin_redirection.txt
|
||||
|
||||
In this example, the contents of file_as_stdin_redirection.txt would be uploaded. Both the piped_text and the file_as_argument.txt are ignored. If there is piped input and arguments, the arguments will be ignored, and the piped input uploaded.
|
||||
|
||||
* FILENAMES *
|
||||
If a filename is misspelled or doesn't have the necessary path description, it will NOT generate an error, but will instead treat it as a text string and upload it.
|
||||
--------------------------------------------------------------------------
|
||||
|
||||
HERE
|
||||
exit
|
||||
}
|
||||
|
||||
sprunge() {
|
||||
if [ -t 0 ]; then
|
||||
echo Running interactively, checking for arguments... >&2
|
||||
if [ "$*" ]; then
|
||||
echo Arguments present... >&2
|
||||
if [ -f "$*" ]; then
|
||||
echo Uploading the contents of "$*"... >&2
|
||||
cat "$*"
|
||||
else
|
||||
echo Uploading the text: \""$*"\"... >&2
|
||||
echo "$*"
|
||||
fi | curl -F 'sprunge=<-' http://sprunge.us
|
||||
else
|
||||
echo No arguments found, printing USAGE and exiting. >&2
|
||||
usage
|
||||
fi
|
||||
else
|
||||
echo Using input from a pipe or STDIN redirection... >&2
|
||||
while read -r line ; do
|
||||
echo $line
|
||||
done | curl -F 'sprunge=<-' http://sprunge.us
|
||||
fi
|
||||
}
|
||||
|
|
@ -1,23 +1,62 @@
|
|||
# Based on code from Joseph M. Reagle
|
||||
# http://www.cygwin.com/ml/cygwin/2001-06/msg00537.html
|
||||
#
|
||||
# INSTRUCTIONS
|
||||
#
|
||||
# To enabled agent forwarding support add the following to
|
||||
# your .zshrc file:
|
||||
#
|
||||
# zstyle :omz:plugins:ssh-agent agent-forwarding on
|
||||
#
|
||||
# To load multiple identies use the identities style, For
|
||||
# example:
|
||||
#
|
||||
# zstyle :omz:plugins:ssh-agent id_rsa id_rsa2 id_github
|
||||
#
|
||||
#
|
||||
# CREDITS
|
||||
#
|
||||
# Based on code from Joseph M. Reagle
|
||||
# http://www.cygwin.com/ml/cygwin/2001-06/msg00537.html
|
||||
#
|
||||
# Agent forwarding support based on ideas from
|
||||
# Florent Thoumie and Jonas Pfenniger
|
||||
#
|
||||
|
||||
local SSH_ENV=$HOME/.ssh/environment-$HOST
|
||||
local _plugin__ssh_env=$HOME/.ssh/environment-$HOST
|
||||
local _plugin__forwarding
|
||||
|
||||
function start_agent {
|
||||
/usr/bin/env ssh-agent | sed 's/^echo/#echo/' > ${SSH_ENV}
|
||||
chmod 600 ${SSH_ENV}
|
||||
. ${SSH_ENV} > /dev/null
|
||||
/usr/bin/ssh-add;
|
||||
function _plugin__start_agent()
|
||||
{
|
||||
local -a identities
|
||||
|
||||
# start ssh-agent and setup environment
|
||||
/usr/bin/env ssh-agent | sed 's/^echo/#echo/' > ${_plugin__ssh_env}
|
||||
chmod 600 ${_plugin__ssh_env}
|
||||
. ${_plugin__ssh_env} > /dev/null
|
||||
|
||||
# load identies
|
||||
zstyle -a :omz:plugins:ssh-agent identities identities
|
||||
echo starting...
|
||||
/usr/bin/ssh-add $HOME/.ssh/${^identities}
|
||||
}
|
||||
|
||||
# Source SSH settings, if applicable
|
||||
# test if agent-forwarding is enabled
|
||||
zstyle -b :omz:plugins:ssh-agent agent-forwarding _plugin__forwarding
|
||||
if [[ ${_plugin__forwarding} == "yes" && -n "$SSH_AUTH_SOCK" ]]; then
|
||||
# Add a nifty symlink for screen/tmux if agent forwarding
|
||||
[[ -L $SSH_AUTH_SOCK ]] || ln -sf "$SSH_AUTH_SOCK" /tmp/ssh-agent-$USER-screen
|
||||
|
||||
if [ -f "${SSH_ENV}" ]; then
|
||||
. ${SSH_ENV} > /dev/null
|
||||
elif [ -f "${_plugin__ssh_env}" ]; then
|
||||
# Source SSH settings, if applicable
|
||||
. ${_plugin__ssh_env} > /dev/null
|
||||
ps -ef | grep ${SSH_AGENT_PID} | grep ssh-agent$ > /dev/null || {
|
||||
start_agent;
|
||||
_plugin__start_agent;
|
||||
}
|
||||
else
|
||||
start_agent;
|
||||
_plugin__start_agent;
|
||||
fi
|
||||
|
||||
# tidy up after ourselves
|
||||
unfunction _plugin__start_agent
|
||||
unset _plugin__forwarding
|
||||
unset _plugin__ssh_env
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
function svn_prompt_info {
|
||||
if [[ -d .svn ]]; then
|
||||
if [ $(in_svn) ]; then
|
||||
echo "$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_PREFIX\
|
||||
$ZSH_THEME_REPO_NAME_COLOR$(svn_get_repo_name)$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_SUFFIX$ZSH_PROMPT_BASE_COLOR$(svn_dirty)$ZSH_PROMPT_BASE_COLOR"
|
||||
fi
|
||||
|
|
@ -13,7 +13,7 @@ function in_svn() {
|
|||
}
|
||||
|
||||
function svn_get_repo_name {
|
||||
if [ in_svn ]; then
|
||||
if [ $(in_svn) ]; then
|
||||
svn info | sed -n 's/Repository\ Root:\ .*\///p' | read SVN_ROOT
|
||||
|
||||
svn info | sed -n "s/URL:\ .*$SVN_ROOT\///p" | sed "s/\/.*$//"
|
||||
|
|
@ -21,14 +21,14 @@ function svn_get_repo_name {
|
|||
}
|
||||
|
||||
function svn_get_rev_nr {
|
||||
if [ in_svn ]; then
|
||||
if [ $(in_svn) ]; then
|
||||
svn info 2> /dev/null | sed -n s/Revision:\ //p
|
||||
fi
|
||||
}
|
||||
|
||||
function svn_dirty_choose {
|
||||
if [ in_svn ]; then
|
||||
s=$(svn status 2>/dev/null)
|
||||
if [ $(in_svn) ]; then
|
||||
s=$(svn status|grep -E '^\s*[ACDIM!?L]' 2>/dev/null)
|
||||
if [ $s ]; then
|
||||
echo $1
|
||||
else
|
||||
|
|
|
|||
13
plugins/symfony2/symfony2.plugin.zsh
Normal file
13
plugins/symfony2/symfony2.plugin.zsh
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# Symfony2 basic command completion
|
||||
|
||||
_symfony2_get_command_list () {
|
||||
app/console --no-ansi | sed "1,/Available commands/d" | awk '/^ [a-z]+/ { print $1 }'
|
||||
}
|
||||
|
||||
_symfony2 () {
|
||||
if [ -f app/console ]; then
|
||||
compadd `_symfony2_get_command_list`
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _symfony2 app/console
|
||||
248
plugins/taskwarrior/_task
Normal file
248
plugins/taskwarrior/_task
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
#compdef task
|
||||
# zsh completion for taskwarrior
|
||||
#
|
||||
# taskwarrior - a command line task list manager.
|
||||
#
|
||||
# Copyright 2010 - 2011 Johannes Schlatow
|
||||
# Copyright 2009 P.C. Shyamshankar
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
#
|
||||
# http://www.opensource.org/licenses/mit-license.php
|
||||
#
|
||||
typeset -g _task_cmds _task_projects _task_tags _task_config _task_modifiers
|
||||
_task_projects=($(task _projects))
|
||||
_task_tags=($(task _tags))
|
||||
_task_ids=($(task _ids))
|
||||
_task_config=($(task _config))
|
||||
_task_columns=($(task _columns))
|
||||
_task_modifiers=(
|
||||
'before' \
|
||||
'after' \
|
||||
'none' \
|
||||
'any' \
|
||||
'is' \
|
||||
'isnt' \
|
||||
'has' \
|
||||
'hasnt' \
|
||||
'startswith' \
|
||||
'endswith' \
|
||||
'word' \
|
||||
'noword'
|
||||
)
|
||||
_task_conjunctions=(
|
||||
'and' \
|
||||
'or' \
|
||||
'xor' \
|
||||
'\)'
|
||||
'\('
|
||||
)
|
||||
_task_cmds=($(task _commands))
|
||||
_task_zshcmds=( ${(f)"$(task _zshcommands)"} )
|
||||
|
||||
_task() {
|
||||
_arguments -s -S \
|
||||
"*::task default:_task_default"
|
||||
return 0
|
||||
}
|
||||
|
||||
local -a reply args word
|
||||
word=$'[^\0]#\0'
|
||||
|
||||
# priorities
|
||||
local -a task_priorities
|
||||
_regex_words values 'task priorities' \
|
||||
'H:High' \
|
||||
'M:Middle' \
|
||||
'L:Low'
|
||||
task_priorities=("$reply[@]")
|
||||
|
||||
# projects
|
||||
local -a task_projects
|
||||
task_projects=(
|
||||
/"$word"/
|
||||
":values:task projects:compadd -a _task_projects"
|
||||
)
|
||||
|
||||
local -a _task_dates
|
||||
_regex_words values 'task dates' \
|
||||
'tod*ay:Today' \
|
||||
'yes*terday:Yesterday' \
|
||||
'tom*orrow:Tomorrow' \
|
||||
'sow:Start of week' \
|
||||
'soww:Start of work week' \
|
||||
'socw:Start of calendar week' \
|
||||
'som:Start of month' \
|
||||
'soy:Start of year' \
|
||||
'eow:End of week' \
|
||||
'eoww:End of work week' \
|
||||
'eocw:End of calendar week' \
|
||||
'eom:End of month' \
|
||||
'eoy:End of year' \
|
||||
'mon:Monday' \
|
||||
'tue:Tuesday'\
|
||||
'wed:Wednesday' \
|
||||
'thu:Thursday' \
|
||||
'fri:Friday' \
|
||||
'sat:Saturday' \
|
||||
'sun:Sunday'
|
||||
_task_dates=("$reply[@]")
|
||||
|
||||
local -a _task_reldates
|
||||
_regex_words values 'task reldates' \
|
||||
'hrs:n hours' \
|
||||
'day:n days' \
|
||||
'1st:first' \
|
||||
'2nd:second' \
|
||||
'3rd:third' \
|
||||
'th:4th, 5th, etc.' \
|
||||
'wks:weeks'
|
||||
_task_reldates=("$reply[@]")
|
||||
|
||||
task_dates=(
|
||||
\( "$_task_dates[@]" \|
|
||||
\( /$'[0-9][0-9]#'/- \( "$_task_reldates[@]" \) \)
|
||||
\)
|
||||
)
|
||||
|
||||
_regex_words values 'task frequencies' \
|
||||
'daily:Every day' \
|
||||
'day:Every day' \
|
||||
'weekdays:Every day skipping weekend days' \
|
||||
'weekly:Every week' \
|
||||
'biweekly:Every two weeks' \
|
||||
'fortnight:Every two weeks' \
|
||||
+ 'monthly:Every month' \
|
||||
'quarterly:Every three months' \
|
||||
'semiannual:Every six months' \
|
||||
'annual:Every year' \
|
||||
'yearly:Every year' \
|
||||
'biannual:Every two years' \
|
||||
'biyearly:Every two years'
|
||||
_task_freqs=("$reply[@]")
|
||||
|
||||
local -a _task_frequencies
|
||||
_regex_words values 'task frequencies' \
|
||||
'd:days' \
|
||||
'w:weeks' \
|
||||
'q:quarters' \
|
||||
'y:years'
|
||||
_task_frequencies=("$reply[@]")
|
||||
|
||||
task_freqs=(
|
||||
\( "$_task_freqs[@]" \|
|
||||
\( /$'[0-9][0-9]#'/- \( "$_task_frequencies[@]" \) \)
|
||||
\)
|
||||
)
|
||||
|
||||
# attributes
|
||||
local -a task_attributes
|
||||
_regex_words -t ':' default 'task attributes' \
|
||||
'pro*ject:Project name:$task_projects' \
|
||||
'du*e:Due date:$task_dates' \
|
||||
'wa*it:Date until task becomes pending:$task_dates' \
|
||||
're*cur:Recurrence frequency:$task_freqs' \
|
||||
'pri*ority:priority:$task_priorities' \
|
||||
'un*til:Recurrence end date:$task_dates' \
|
||||
'fg:Foreground color' \
|
||||
'bg:Background color' \
|
||||
'li*mit:Desired number of rows in report'
|
||||
task_attributes=("$reply[@]")
|
||||
|
||||
args=(
|
||||
\( "$task_attributes[@]" \|
|
||||
\( /'(project|due|wait|recur|priority|until|fg|bg|limit).'/- \( /$'[^:]#:'/ ":default:modifiers:compadd -S ':' -a _task_modifiers" \) \) \|
|
||||
\( /'(rc).'/- \( /$'[^:]#:'/ ":arguments:config:compadd -S ':' -a _task_config" \) \) \|
|
||||
\( /'(+|-)'/- \( /"$word"/ ":values:remove tag:compadd -a _task_tags" \) \) \|
|
||||
\( /"$word"/ \)
|
||||
\) \#
|
||||
)
|
||||
_regex_arguments _task_attributes "${args[@]}"
|
||||
|
||||
## task commands
|
||||
|
||||
# filter completion
|
||||
(( $+functions[_task_filter] )) ||
|
||||
_task_filter() {
|
||||
_task_attributes "$@"
|
||||
|
||||
# TODO complete conjunctions only if the previous word is a filter expression, i.e. attribute, ID, any non-command
|
||||
_describe -t default 'task conjunctions' _task_conjunctions
|
||||
}
|
||||
|
||||
# merge completion
|
||||
(( $+functions[_task_merge] )) ||
|
||||
_task_merge() {
|
||||
# TODO match URIs in .taskrc
|
||||
_files
|
||||
}
|
||||
|
||||
# push completion
|
||||
(( $+functions[_task_push] )) ||
|
||||
_task_push() {
|
||||
# TODO match URIs in .taskrc
|
||||
_files
|
||||
}
|
||||
|
||||
# pull completion
|
||||
(( $+functions[_task_pull] )) ||
|
||||
_task_pull() {
|
||||
# TODO match URIs in .taskrc
|
||||
_files
|
||||
}
|
||||
|
||||
# execute completion
|
||||
(( $+functions[_task_execute] )) ||
|
||||
_task_execute() {
|
||||
_files
|
||||
}
|
||||
|
||||
# id-only completion
|
||||
(( $+functions[_task_id] )) ||
|
||||
_task_id() {
|
||||
_describe -t values 'task IDs' _task_zshids
|
||||
}
|
||||
|
||||
## first level completion => task sub-command completion
|
||||
(( $+functions[_task_default] )) ||
|
||||
_task_default() {
|
||||
local cmd ret=1
|
||||
|
||||
integer i=1
|
||||
while (( i < $#words ))
|
||||
do
|
||||
cmd="${_task_cmds[(r)$words[$i]]}"
|
||||
if (( $#cmd )); then
|
||||
_call_function ret _task_${cmd} ||
|
||||
_call_function ret _task_filter ||
|
||||
_message "No command remaining."
|
||||
return ret
|
||||
fi
|
||||
(( i++ ))
|
||||
done
|
||||
|
||||
# update IDs
|
||||
_task_zshids=( ${(f)"$(task _zshids)"} )
|
||||
|
||||
_describe -t commands 'task command' _task_zshcmds
|
||||
_describe -t values 'task IDs' _task_zshids
|
||||
_call_function ret _task_filter
|
||||
|
||||
return ret
|
||||
}
|
||||
21
plugins/taskwarrior/taskwarrior.plugin.zsh
Normal file
21
plugins/taskwarrior/taskwarrior.plugin.zsh
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
################################################################################
|
||||
# Author: Pete Clark
|
||||
# Email: pete[dot]clark[at]gmail[dot]com
|
||||
# Version: 0.1 (05/24/2011)
|
||||
# License: WTFPL<http://sam.zoy.org/wtfpl/>
|
||||
#
|
||||
# This oh-my-zsh plugin adds smart tab completion for
|
||||
# TaskWarrior<http://taskwarrior.org/>. It uses the zsh tab completion
|
||||
# script (_task) distributed with TaskWarrior for the completion definitions.
|
||||
#
|
||||
# Typing task[tabtab] will give you a list of current tasks, task 66[tabtab]
|
||||
# gives a list of available modifications for that task, etc.
|
||||
################################################################################
|
||||
|
||||
zstyle ':completion:*:*:task:*' verbose yes
|
||||
zstyle ':completion:*:*:task:*:descriptions' format '%U%B%d%b%u'
|
||||
|
||||
zstyle ':completion:*:*:task:*' group-name ''
|
||||
|
||||
alias t=task
|
||||
compdef _task t=task
|
||||
11
plugins/terminalapp/terminalapp.plugin.zsh
Normal file
11
plugins/terminalapp/terminalapp.plugin.zsh
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# Set Apple Terminal.app resume directory
|
||||
# based on this answer: http://superuser.com/a/315029
|
||||
|
||||
function chpwd {
|
||||
local SEARCH=' '
|
||||
local REPLACE='%20'
|
||||
local PWD_URL="file://$HOSTNAME${PWD//$SEARCH/$REPLACE}"
|
||||
printf '\e]7;%s\a' "$PWD_URL"
|
||||
}
|
||||
|
||||
chpwd
|
||||
38
plugins/terminitor/_terminitor
Normal file
38
plugins/terminitor/_terminitor
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
#compdef terminitor
|
||||
#autoload
|
||||
|
||||
# terminitor zsh completion
|
||||
|
||||
_terminitor_available_scripts() {
|
||||
scripts=(`for SCRIPT in ~/.config/terminitor/*.term ; do basename $SCRIPT .term ; done`)
|
||||
}
|
||||
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
'create:create a Termfile in directory'
|
||||
'delete:delete terminitor script'
|
||||
'edit:open termitor script'
|
||||
'fetch:clone the designated repo and run setup'
|
||||
'help:Describe available tasks or one specific task'
|
||||
'init:create initial root terminitor folder'
|
||||
'list:lists all terminitor scripts'
|
||||
'setup:execute setup in the terminitor script'
|
||||
'start:runs the terminitor script'
|
||||
'update:update Terminitor to new global path(.config/.terminitor)'
|
||||
)
|
||||
|
||||
local expl
|
||||
|
||||
_arguments \
|
||||
'*:: :->subcmds' && return 0
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
_describe -t commands "terminitor task" _1st_arguments
|
||||
return
|
||||
fi
|
||||
|
||||
case "$words[1]" in
|
||||
start|edit|delete|setup)
|
||||
_terminitor_available_scripts
|
||||
_wanted scripts expl 'installed scripts' compadd -a scripts ;;
|
||||
esac
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
alias et='mate .'
|
||||
alias ett='mate app config lib db public spec test Rakefile Capfile Todo'
|
||||
alias ett='mate Gemfile app config features lib db public spec test Rakefile Capfile Todo'
|
||||
alias etp='mate app config lib db public spec test vendor/plugins vendor/gems Rakefile Capfile Todo'
|
||||
alias etts='mate app config lib db public script spec test vendor/plugins vendor/gems Rakefile Capfile Todo'
|
||||
|
||||
|
|
|
|||
23
plugins/vundle/vundle.plugin.zsh
Normal file
23
plugins/vundle/vundle.plugin.zsh
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
function vundle-init () {
|
||||
if [ ! -d ~/.vim/bundle/vundle/ ]
|
||||
then
|
||||
mkdir -p ~/.vim/bundle/vundle/
|
||||
fi
|
||||
|
||||
if [ ! -d ~/.vim/bundle/vundle/.git/ ]
|
||||
then
|
||||
git clone http://github.com/gmarik/vundle.git ~/.vim/bundle/vundle
|
||||
echo "\n\tRead about vim configuration for vundle at https://github.com/gmarik/vundle\n"
|
||||
fi
|
||||
}
|
||||
|
||||
function vundle () {
|
||||
vundle-init
|
||||
vim -c "execute \"BundleInstall\" | q | q"
|
||||
}
|
||||
|
||||
|
||||
function vundle-update () {
|
||||
vundle-init
|
||||
vim -c "execute \"BundleInstall!\" | q | q"
|
||||
}
|
||||
29
plugins/wakeonlan/README
Normal file
29
plugins/wakeonlan/README
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
This plugin provides a wrapper around the "wakeonlan" tool available from most
|
||||
distributions' package repositories, or from the following website:
|
||||
|
||||
http://gsd.di.uminho.pt/jpo/software/wakeonlan/
|
||||
|
||||
In order to use this wrapper, create the ~/.wakeonlan directory, and place in
|
||||
that directory one file for each device you would like to be able to wake. Give
|
||||
the file a name that describes the device, such as its hostname. Each file
|
||||
should contain a line with the mac address of the target device and the network
|
||||
broadcast address.
|
||||
|
||||
For instance, there might be a file ~/.wakeonlan/leto with the following
|
||||
contents:
|
||||
|
||||
00:11:22:33:44:55:66 192.168.0.255
|
||||
|
||||
To wake that device, use the following command:
|
||||
|
||||
# wake leto
|
||||
|
||||
The available device names will be autocompleted, so:
|
||||
|
||||
# wake <tab>
|
||||
|
||||
...will suggest "leto", along with any other configuration files that were
|
||||
placed in the ~/.wakeonlan directory.
|
||||
|
||||
For more information regarding the configuration file format, check the
|
||||
wakeonlan man page.
|
||||
4
plugins/wakeonlan/_wake
Normal file
4
plugins/wakeonlan/_wake
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
#compdef wake
|
||||
#autoload
|
||||
|
||||
_arguments "1:device to wake:_files -W '$HOME/.wakeonlan'" && return 0
|
||||
14
plugins/wakeonlan/wakeonlan.plugin.zsh
Normal file
14
plugins/wakeonlan/wakeonlan.plugin.zsh
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
function wake() {
|
||||
local config_file="$HOME/.wakeonlan/$1"
|
||||
if [[ ! -f "$config_file" ]]; then
|
||||
echo "ERROR: There is no configuration file at \"$config_file\"."
|
||||
return 1
|
||||
fi
|
||||
|
||||
if (( ! $+commands[wakeonlan] )); then
|
||||
echo "ERROR: Can't find \"wakeonlan\". Are you sure it's installed?"
|
||||
return 1
|
||||
fi
|
||||
|
||||
wakeonlan -f "$config_file"
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue