Merge pull request #3 from robbyrussell/master

Updating
This commit is contained in:
Josh Price 2013-09-29 14:21:42 -07:00
commit a95f398875
143 changed files with 5133 additions and 436 deletions

5
.gitignore vendored
View file

@ -1,9 +1,8 @@
locals.zsh
log/.zsh_history
projects.zsh
custom/*
!custom/example
!custom/example.zsh
custom/example
custom/example.zsh
*.swp
!custom/example.zshcache
cache/

View file

@ -1,5 +1,6 @@
A handful of functions, auto-complete helpers, and stuff that makes you shout...
!https://s3.amazonaws.com/ohmyzsh/oh-my-zsh-logo.png!
oh-my-zsh is an open source, community-driven framework for managing your ZSH configuration. It comes bundled with a ton of helpful functions, helpers, plugins, themes, and few things that make you shout...
bq. "OH MY ZSHELL!"
@ -63,6 +64,17 @@ If you want to override any of the default behavior, just add a new file (ending
If you have many functions which go well together you can put them as a *.plugin.zsh file in the @custom/plugins/@ directory and then enable this plugin.
If you would like to override the functionality of a plugin distributed with oh-my-zsh, create a plugin of the same name in the @custom/plugins/@ directory and it will be loaded instead of the one in @plugins/@.
h3. Updates
By default you will be prompted to check for updates. If you would like oh-my-zsh to automatically update itself without prompting you, set the following in your ~/.zshrc
@DISABLE_UPDATE_PROMPT=true@
To disable updates entirely, put this in your ~/.zshrc
@DISABLE_AUTO_UPDATE=true@
To upgrade directly from the command line, just run @upgrade_oh_my_zsh@
h3. Uninstalling
@ -76,9 +88,7 @@ h3. (Don't) Send us your theme! (for now)
-I'm hoping to collect a bunch of themes for our command prompts. You can see existing ones in the @themes/@ directory.-
We have enough themes for the time being. Please fork the project and add on in there, you can let people know how to grab it from there.
We have enough themes for the time being. Please fork the project and add on in there, you can let people know how to grab it from there.
h2. Contributors

View file

@ -0,0 +1,28 @@
# ------------------------------------------------------------------------------
# FILE: sfffe.plugin.zsh
# DESCRIPTION: search file for FE
# AUTHOR: yleo77 (ylep77@gmail.com)
# VERSION: 0.1
# REQUIRE: ack
# ------------------------------------------------------------------------------
if [ ! -x $(which ack) ]; then
echo \'ack\' is not installed!
exit -1
fi
ajs() {
ack "$@" --type js
}
acss() {
ack "$@" --type css
}
fjs() {
find ./ -name "$@*" -type f | grep '\.js'
}
fcss() {
find ./ -name "$@*" -type f | grep '\.css'
}

View file

@ -17,7 +17,7 @@ alias history='fc -l 1'
# List direcory contents
alias lsa='ls -lah'
alias l='ls -lA1'
alias l='ls -la'
alias ll='ls -l'
alias la='ls -lA'
alias sl=ls # often screw this up

View file

@ -34,8 +34,10 @@ cdpath=(.)
# use /etc/hosts and known_hosts for hostname completion
[ -r /etc/ssh/ssh_known_hosts ] && _global_ssh_hosts=(${${${${(f)"$(</etc/ssh/ssh_known_hosts)"}:#[\|]*}%%\ *}%%,*}) || _global_ssh_hosts=()
[ -r ~/.ssh/known_hosts ] && _ssh_hosts=(${${${${(f)"$(<$HOME/.ssh/known_hosts)"}:#[\|]*}%%\ *}%%,*}) || _ssh_hosts=()
[ -r ~/.ssh/config ] && _ssh_config=($(cat ~/.ssh/config | sed -ne 's/Host[=\t ]//p')) || _ssh_config=()
[ -r /etc/hosts ] && : ${(A)_etc_hosts:=${(s: :)${(ps:\t:)${${(f)~~"$(</etc/hosts)"}%%\#*}##[:blank:]#[^[:blank:]]#}}} || _etc_hosts=()
hosts=(
"$_ssh_config[@]"
"$_global_ssh_hosts[@]"
"$_ssh_hosts[@]"
"$_etc_hosts[@]"
@ -43,6 +45,7 @@ hosts=(
localhost
)
zstyle ':completion:*:hosts' hosts $hosts
zstyle ':completion:*' users off
# Use caching so that commands like apt and dpkg complete are useable
zstyle ':completion::complete:*' use-cache 1

View file

@ -1,10 +1,14 @@
setopt correct_all
alias man='nocorrect man'
alias mv='nocorrect mv'
alias mysql='nocorrect mysql'
alias mkdir='nocorrect mkdir'
alias gist='nocorrect gist'
alias heroku='nocorrect heroku'
alias ebuild='nocorrect ebuild'
alias hpodder='nocorrect hpodder'
if [[ "$DISABLE_CORRECTION" == "true" ]]; then
return
else
setopt correct_all
alias man='nocorrect man'
alias mv='nocorrect mv'
alias mysql='nocorrect mysql'
alias mkdir='nocorrect mkdir'
alias gist='nocorrect gist'
alias heroku='nocorrect heroku'
alias ebuild='nocorrect ebuild'
alias hpodder='nocorrect hpodder'
alias sudo='nocorrect sudo'
fi

View file

@ -2,6 +2,7 @@
setopt auto_name_dirs
setopt auto_pushd
setopt pushd_ignore_dups
setopt pushdminus
alias ..='cd ..'
alias cd..='cd ..'
@ -11,14 +12,14 @@ alias cd.....='cd ../../../..'
alias cd/='cd /'
alias 1='cd -'
alias 2='cd +2'
alias 3='cd +3'
alias 4='cd +4'
alias 5='cd +5'
alias 6='cd +6'
alias 7='cd +7'
alias 8='cd +8'
alias 9='cd +9'
alias 2='cd -2'
alias 3='cd -3'
alias 4='cd -4'
alias 5='cd -5'
alias 6='cd -6'
alias 7='cd -7'
alias 8='cd -8'
alias 9='cd -9'
cd () {
if [[ "x$*" == "x..." ]]; then
@ -29,6 +30,9 @@ cd () {
cd ../../../..
elif [[ "x$*" == "x......" ]]; then
cd ../../../../..
elif [ -d ~/.autoenv ]; then
source ~/.autoenv/activate.sh
autoenv_cd "$@"
else
builtin cd "$@"
fi

View file

@ -1,7 +1,7 @@
# get the name of the branch we are on
function git_prompt_info() {
ref=$(git symbolic-ref HEAD 2> /dev/null) || \
ref=$(git rev-parse --short HEAD 2> /dev/null) || return
ref=$(command git symbolic-ref HEAD 2> /dev/null) || \
ref=$(command git rev-parse --short HEAD 2> /dev/null) || return
echo "$ZSH_THEME_GIT_PROMPT_PREFIX${ref#refs/heads/}$(parse_git_dirty)$ZSH_THEME_GIT_PROMPT_SUFFIX"
}
@ -9,24 +9,33 @@ function git_prompt_info() {
# Checks if working tree is dirty
parse_git_dirty() {
local SUBMODULE_SYNTAX=''
if [[ "$(git config --get oh-my-zsh.hide-status)" != "1" ]]; then
local GIT_STATUS=''
local CLEAN_MESSAGE='nothing to commit (working directory clean)'
if [[ "$(command git config --get oh-my-zsh.hide-status)" != "1" ]]; then
if [[ $POST_1_7_2_GIT -gt 0 ]]; then
SUBMODULE_SYNTAX="--ignore-submodules=dirty"
fi
if [[ -n $(git status -s ${SUBMODULE_SYNTAX} 2> /dev/null) ]]; then
if [[ "$DISABLE_UNTRACKED_FILES_DIRTY" == "true" ]]; then
GIT_STATUS=$(command git status -s ${SUBMODULE_SYNTAX} -uno 2> /dev/null | tail -n1)
else
GIT_STATUS=$(command git status -s ${SUBMODULE_SYNTAX} 2> /dev/null | tail -n1)
fi
if [[ -n $GIT_STATUS ]]; then
echo "$ZSH_THEME_GIT_PROMPT_DIRTY"
else
echo "$ZSH_THEME_GIT_PROMPT_CLEAN"
fi
else
echo "$ZSH_THEME_GIT_PROMPT_CLEAN"
fi
}
# get the difference between the local and remote branches
git_remote_status() {
remote=${$(git rev-parse --verify ${hook_com[branch]}@{upstream} --symbolic-full-name 2>/dev/null)/refs\/remotes\/}
remote=${$(command git rev-parse --verify ${hook_com[branch]}@{upstream} --symbolic-full-name 2>/dev/null)/refs\/remotes\/}
if [[ -n ${remote} ]] ; then
ahead=$(git rev-list ${hook_com[branch]}@{upstream}..HEAD 2>/dev/null | wc -l)
behind=$(git rev-list HEAD..${hook_com[branch]}@{upstream} 2>/dev/null | wc -l)
ahead=$(command git rev-list ${hook_com[branch]}@{upstream}..HEAD 2>/dev/null | wc -l)
behind=$(command git rev-list HEAD..${hook_com[branch]}@{upstream} 2>/dev/null | wc -l)
if [ $ahead -eq 0 ] && [ $behind -gt 0 ]
then
@ -43,26 +52,26 @@ git_remote_status() {
# Checks if there are commits ahead from remote
function git_prompt_ahead() {
if $(echo "$(git log origin/$(current_branch)..HEAD 2> /dev/null)" | grep '^commit' &> /dev/null); then
if $(echo "$(command git log origin/$(current_branch)..HEAD 2> /dev/null)" | grep '^commit' &> /dev/null); then
echo "$ZSH_THEME_GIT_PROMPT_AHEAD"
fi
}
# Formats prompt string for current git commit short SHA
function git_prompt_short_sha() {
SHA=$(git rev-parse --short HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
SHA=$(command git rev-parse --short HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
}
# Formats prompt string for current git commit long SHA
function git_prompt_long_sha() {
SHA=$(git rev-parse HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
SHA=$(command git rev-parse HEAD 2> /dev/null) && echo "$ZSH_THEME_GIT_PROMPT_SHA_BEFORE$SHA$ZSH_THEME_GIT_PROMPT_SHA_AFTER"
}
# Get the status of the working tree
git_prompt_status() {
INDEX=$(git status --porcelain -b 2> /dev/null)
INDEX=$(command git status --porcelain -b 2> /dev/null)
STATUS=""
if $(echo "$INDEX" | grep '^?? ' &> /dev/null); then
if $(echo "$INDEX" | grep -E '^\?\? ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_UNTRACKED$STATUS"
fi
if $(echo "$INDEX" | grep '^A ' &> /dev/null); then
@ -87,7 +96,7 @@ git_prompt_status() {
elif $(echo "$INDEX" | grep '^AD ' &> /dev/null); then
STATUS="$ZSH_THEME_GIT_PROMPT_DELETED$STATUS"
fi
if $(git rev-parse --verify refs/stash >/dev/null 2>&1); then
if $(command git rev-parse --verify refs/stash >/dev/null 2>&1); then
STATUS="$ZSH_THEME_GIT_PROMPT_STASHED$STATUS"
fi
if $(echo "$INDEX" | grep '^UU ' &> /dev/null); then
@ -112,7 +121,7 @@ function git_compare_version() {
local INPUT_GIT_VERSION=$1;
local INSTALLED_GIT_VERSION
INPUT_GIT_VERSION=(${(s/./)INPUT_GIT_VERSION});
INSTALLED_GIT_VERSION=($(git --version));
INSTALLED_GIT_VERSION=($(command git --version 2>/dev/null));
INSTALLED_GIT_VERSION=(${(s/./)INSTALLED_GIT_VERSION[3]});
for i in {1..3}; do

View file

@ -1,5 +1,7 @@
## Command history configuration
HISTFILE=$HOME/.zsh_history
if [ -z $HISTFILE ]; then
HISTFILE=$HOME/.zsh_history
fi
HISTSIZE=10000
SAVEHIST=10000

2
lib/rbenv.zsh Normal file
View file

@ -0,0 +1,2 @@
# using the rbenv plugin will override this with a real implementation
function rbenv_prompt_info() {}

View file

@ -8,7 +8,7 @@ function title {
fi
if [[ "$TERM" == screen* ]]; then
print -Pn "\ek$1:q\e\\" #set screen hardstatus, usually truncated at 20 chars
elif [[ "$TERM" == xterm* ]] || [[ $TERM == rxvt* ]] || [[ "$TERM_PROGRAM" == "iTerm.app" ]]; then
elif [[ "$TERM" == xterm* ]] || [[ $TERM == rxvt* ]] || [[ $TERM == ansi ]] || [[ "$TERM_PROGRAM" == "iTerm.app" ]]; then
print -Pn "\e]2;$2:q\a" #set window name
print -Pn "\e]1;$1:q\a" #set icon (=tab) name (will override window name on broken terminal)
fi
@ -27,7 +27,9 @@ function omz_termsupport_preexec {
emulate -L zsh
setopt extended_glob
local CMD=${1[(wr)^(*=*|sudo|ssh|rake|-*)]} #cmd name only, or if this is sudo or ssh, the next cmd
title "$CMD" "%100>...>${2:gs/%/%%}%<<"
local LINE="${2:gs/$/\\$}"
LINE="${LINE:gs/%/%%}"
title "$CMD" "%100>...>$LINE%<<"
}
autoload -U add-zsh-hook

View file

@ -7,7 +7,13 @@ export LSCOLORS="Gxfxcxdxbxegedabagacad"
if [ "$DISABLE_LS_COLORS" != "true" ]
then
# Find the option for using colors in ls, depending on the version: Linux or BSD
ls --color -d . &>/dev/null 2>&1 && alias ls='ls --color=tty' || alias ls='ls -G'
if [[ "$(uname -s)" == "NetBSD" ]]; then
# On NetBSD, test if "gls" (GNU ls) is installed (this one supports colors);
# otherwise, leave ls as is, because NetBSD's ls doesn't support -G
gls --color -d . &>/dev/null 2>&1 && alias ls='gls --color=tty'
else
ls --color -d . &>/dev/null 2>&1 && alias ls='ls --color=tty' || alias ls='ls -G'
fi
fi
#setopt no_beep

View file

@ -1,7 +1,7 @@
# Check for updates on initial load...
if [ "$DISABLE_AUTO_UPDATE" != "true" ]
then
/usr/bin/env ZSH=$ZSH zsh $ZSH/tools/check_for_upgrade.sh
/usr/bin/env ZSH=$ZSH DISABLE_UPDATE_PROMPT=$DISABLE_UPDATE_PROMPT zsh $ZSH/tools/check_for_upgrade.sh
fi
# Initializes Oh My Zsh
@ -38,10 +38,20 @@ for plugin ($plugins); do
fi
done
# Figure out the SHORT hostname
if [ -n "$commands[scutil]" ]; then
# OS X
SHORT_HOST=$(scutil --get ComputerName)
else
SHORT_HOST=${HOST/.*/}
fi
# Save the location of the current completion dump file.
ZSH_COMPDUMP="${ZDOTDIR:-${HOME}}/.zcompdump-${SHORT_HOST}-${ZSH_VERSION}"
# Load and run compinit
autoload -U compinit
compinit -i
compinit -i -d "${ZSH_COMPDUMP}"
# Load all of the plugins that were defined in ~/.zshrc
for plugin ($plugins); do
@ -73,6 +83,9 @@ else
if [ -f "$ZSH_CUSTOM/$ZSH_THEME.zsh-theme" ]
then
source "$ZSH_CUSTOM/$ZSH_THEME.zsh-theme"
elif [ -f "$ZSH_CUSTOM/themes/$ZSH_THEME.zsh-theme" ]
then
source "$ZSH_CUSTOM/themes/$ZSH_THEME.zsh-theme"
else
source "$ZSH/themes/$ZSH_THEME.zsh-theme"
fi

View file

@ -1,17 +1,7 @@
stat -f%m . > /dev/null 2>&1
if [ "$?" = 0 ]; then
stat_cmd=(stat -f%m)
else
stat_cmd=(stat -L --format=%Y)
fi
_ant_does_target_list_need_generating () {
if [ ! -f .ant_targets ]; then return 0;
else
accurate=$($stat_cmd .ant_targets)
changed=$($stat_cmd build.xml)
return $(expr $accurate '>=' $changed)
fi
[ ! -f .ant_targets ] && return 0;
[ .ant_targets -nt build.xml ] && return 0;
return 1;
}
_ant () {

View file

@ -8,7 +8,7 @@ if [[ -x `which yaourt` ]]; then
}
alias yaconf='yaourt -C' # Fix all configuration files with vimdiff
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
alias yaupg='yaourt -Syu' # Synchronize with repositories before upgrading packages that are out of date on the local system.
alias yaupg='yaourt -Syua' # Synchronize with repositories before upgrading packages (AUR packages too) that are out of date on the local system.
alias yasu='yaourt --sucre' # Same as yaupg, but without confirmation
alias yain='yaourt -S' # Install specific package(s) from the repositories
alias yains='yaourt -U' # Install specific package not from the repositories but from a file
@ -18,6 +18,8 @@ if [[ -x `which yaourt` ]]; then
alias yareps='yaourt -Ss' # Search for package(s) in the repositories
alias yaloc='yaourt -Qi' # Display information about a given package in the local database
alias yalocs='yaourt -Qs' # Search for package(s) in the local database
alias yalst='yaourt -Qe' # List installed packages, even those installed from AUR (they're tagged as "local")
alias yaorph='yaourt -Qtd' # Remove orphans using yaourt
# Additional yaourt alias examples
if [[ -x `which abs` ]]; then
alias yaupd='yaourt -Sy && sudo abs' # Update and refresh the local package and ABS databases against repositories

View file

@ -0,0 +1,18 @@
# The use_env call below is a reusable command to activate/create a new Python
# virtualenv, requiring only a single declarative line of code in your .env files.
# It only performs an action if the requested virtualenv is not the current one.
use_env() {
typeset venv
venv="$1"
if [[ "${VIRTUAL_ENV:t}" != "$venv" ]]; then
if workon | grep -q "$venv"; then
workon "$venv"
else
echo -n "Create virtualenv $venv now? (Yn) "
read answer
if [[ "$answer" == "Y" ]]; then
mkvirtualenv "$venv"
fi
fi
fi
}

View file

@ -3,6 +3,8 @@ if [ $commands[autojump] ]; then # check if autojump is installed
. /usr/share/autojump/autojump.zsh
elif [ -f /etc/profile.d/autojump.zsh ]; then # manual installation
. /etc/profile.d/autojump.zsh
elif [ -f /etc/profile.d/autojump.sh ]; then # gentoo installation
. /etc/profile.d/autojump.sh
elif [ -f $HOME/.autojump/etc/profile.d/autojump.zsh ]; then # manual user-local installation
. $HOME/.autojump/etc/profile.d/autojump.zsh
elif [ -f /opt/local/etc/profile.d/autojump.zsh ]; then # mac os x with ports

View file

@ -10,12 +10,16 @@
if [[ $(uname) == "Darwin" ]] ; then
function battery_pct() {
typeset -F maxcapacity=$(ioreg -rc "AppleSmartBattery"| grep '^.*"MaxCapacity"\ =\ ' | sed -e 's/^.*"MaxCapacity"\ =\ //')
typeset -F currentcapacity=$(ioreg -rc "AppleSmartBattery"| grep '^.*"CurrentCapacity"\ =\ ' | sed -e 's/^.*CurrentCapacity"\ =\ //')
integer i=$(((currentcapacity/maxcapacity) * 100))
echo $i
}
function battery_pct_remaining() {
if [[ $(ioreg -rc AppleSmartBattery | grep -c '^.*"ExternalConnected"\ =\ No') -eq 1 ]] ; then
typeset -F maxcapacity=$(ioreg -rc "AppleSmartBattery"| grep '^.*"MaxCapacity"\ =\ ' | sed -e 's/^.*"MaxCapacity"\ =\ //')
typeset -F currentcapacity=$(ioreg -rc "AppleSmartBattery"| grep '^.*"CurrentCapacity"\ =\ ' | sed -e 's/^.*CurrentCapacity"\ =\ //')
integer i=$(((currentcapacity/maxcapacity) * 100))
echo $i
battery_pct
else
echo "External Power"
fi
@ -42,17 +46,27 @@ if [[ $(uname) == "Darwin" ]] ; then
fi
echo "%{$fg[$color]%}[$(battery_pct_remaining)%%]%{$reset_color%}"
else
echo ""
echo ""
fi
}
elif [[ $(uname) == "Linux" ]] ; then
if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then
function battery_pct_remaining() { echo "$(acpi | cut -f2 -d ',' | tr -cd '[:digit:]')" }
function battery_time_remaining() { echo $(acpi | cut -f3 -d ',') }
function battery_pct_prompt() {
b=$(battery_pct_remaining)
function battery_pct_remaining() {
if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then
echo "$(acpi | cut -f2 -d ',' | tr -cd '[:digit:]')"
fi
}
function battery_time_remaining() {
if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then
echo $(acpi | cut -f3 -d ',')
fi
}
function battery_pct_prompt() {
b=$(battery_pct_remaining)
if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then
if [ $b -gt 50 ] ; then
color='green'
elif [ $b -gt 20 ] ; then
@ -61,11 +75,8 @@ elif [[ $(uname) == "Linux" ]] ; then
color='red'
fi
echo "%{$fg[$color]%}[$(battery_pct_remaining)%%]%{$reset_color%}"
}
else
error_msg='no battery'
function battery_pct_remaining() { echo $error_msg }
function battery_time_remaining() { echo $error_msg }
function battery_pct_prompt() { echo '' }
fi
else
echo "∞"
fi
}
fi

58
plugins/bower/_bower Executable file
View file

@ -0,0 +1,58 @@
# Credits to npm's awesome completion utility.
#
# Bower completion script, based on npm completion script.
###-begin-bower-completion-###
#
# Installation: bower completion >> ~/.bashrc (or ~/.zshrc)
# Or, maybe: bower completion > /usr/local/etc/bash_completion.d/bower
#
COMP_WORDBREAKS=${COMP_WORDBREAKS/=/}
COMP_WORDBREAKS=${COMP_WORDBREAKS/@/}
export COMP_WORDBREAKS
if type complete &>/dev/null; then
_bower_completion () {
local si="$IFS"
IFS=$'\n' COMPREPLY=($(COMP_CWORD="$COMP_CWORD" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
bower completion -- "${COMP_WORDS[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
complete -F _bower_completion bower
elif type compdef &>/dev/null; then
_bower_completion() {
si=$IFS
compadd -- $(COMP_CWORD=$((CURRENT-1)) \
COMP_LINE=$BUFFER \
COMP_POINT=0 \
bower completion -- "${words[@]}" \
2>/dev/null)
IFS=$si
}
compdef _bower_completion bower
elif type compctl &>/dev/null; then
_bower_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
bower completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
compctl -K _bower_completion bower
fi
###-end-bower-completion-###

View file

@ -0,0 +1,81 @@
alias bi="bower install"
alias bl="bower list"
alias bs="bower search"
_bower_installed_packages () {
bower_package_list=$(bower ls --no-color 2>/dev/null| awk 'NR>3{print p}{p=$0}'| cut -d ' ' -f 2|sed 's/#.*//')
}
_bower ()
{
local -a _1st_arguments _no_color _dopts _save_dev _force_lastest _production
local expl
typeset -A opt_args
_no_color=('--no-color[Do not print colors (available in all commands)]')
_dopts=(
'(--save)--save[Save installed packages into the project"s bower.json dependencies]'
'(--force)--force[Force fetching remote resources even if a local copy exists on disk]'
)
_save_dev=('(--save-dev)--save-dev[Save installed packages into the project"s bower.json devDependencies]')
_force_lastest=('(--force-latest)--force-latest[Force latest version on conflict]')
_production=('(--production)--production[Do not install project devDependencies]')
_1st_arguments=(
'cache-clean:Clean the Bower cache, or the specified package caches' \
'help:Display help information about Bower' \
'info:Version info and description of a particular package' \
'init:Interactively create a bower.json file' \
'install:Install a package locally' \
'link:Symlink a package folder' \
'lookup:Look up a package URL by name' \
'register:Register a package' \
'search:Search for a package by name' \
'uninstall:Remove a package' \
'update:Update a package' \
{ls,list}:'[List all installed packages]'
)
_arguments \
$_no_color \
'*:: :->subcmds' && return 0
if (( CURRENT == 1 )); then
_describe -t commands "bower subcommand" _1st_arguments
return
fi
case "$words[1]" in
install)
_arguments \
$_dopts \
$_save_dev \
$_force_lastest \
$_no_color \
$_production
;;
update)
_arguments \
$_dopts \
$_no_color \
$_force_lastest
_bower_installed_packages
compadd "$@" $(echo $bower_package_list)
;;
uninstall)
_arguments \
$_no_color \
$_dopts
_bower_installed_packages
compadd "$@" $(echo $bower_package_list)
;;
*)
$_no_color \
;;
esac
}
compdef _bower bower

View file

@ -28,6 +28,7 @@ _1st_arguments=(
'missing:check all installed formuale for missing dependencies.'
'outdated:list formulas for which a newer version is available'
'prune:remove dead links'
'reinstall:reinstall a formula'
'remove:remove a formula'
'search:search for a formula (/regex/ or string)'
'server:start a local web app that lets you browse formulae (requires Sinatra)'
@ -35,6 +36,7 @@ _1st_arguments=(
'update:freshen up links'
'upgrade:upgrade outdated formulae'
'uses:show formulas which depend on a formula'
'versions:show all available formula versions'
)
local expl
@ -71,10 +73,10 @@ case "$words[1]" in
_brew_installed_formulae
_wanted installed_formulae expl 'installed formulae' compadd -a installed_formulae
fi ;;
install|home|homepage|log|info|abv|uses|cat|deps|edit|options)
install|home|homepage|log|info|abv|uses|cat|deps|edit|options|versions)
_brew_all_formulae
_wanted formulae expl 'all formulae' compadd -a formulae ;;
remove|rm|uninstall|unlink|cleanup|link|ln)
reinstall|remove|rm|uninstall|unlink|cleanup|link|ln)
_brew_installed_formulae
_wanted installed_formulae expl 'installed formulae' compadd -a installed_formulae ;;
esac

View file

@ -23,6 +23,7 @@ case $state in
"viz[Generate a visual representation of your dependencies]" \
"init[Generate a simple Gemfile, placed in the current directory]" \
"gem[Create a simple gem, suitable for development with bundler]" \
"clean[Cleans up unused gems in your bundler directory]" \
"help[Describe available tasks or one specific task]"
ret=0
;;
@ -62,6 +63,14 @@ case $state in
exec)
_normal && ret=0
;;
clean)
_arguments \
'(--force)--force[forces clean even if --path is not set]' \
'(--dry-run)--dry-run[only print out changes, do not actually clean gems]' \
'(--no-color)--no-color[Disable colorization in output]' \
'(--verbose)--verbose[Enable verbose output mode]'
ret=0
;;
(open|show)
_gems=( $(bundle show 2> /dev/null | sed -e '/^ \*/!d; s/^ \* \([^ ]*\) .*/\1/') )
if [[ $_gems != "" ]]; then

View file

@ -1,12 +1,20 @@
alias be="bundle exec"
alias bi="bundle install"
alias bl="bundle list"
alias bp="bundle package"
alias bo="bundle open"
alias bu="bundle update"
if [[ "$(uname)" == 'Darwin' ]]
then
local cores_num="$(sysctl hw.ncpu | awk '{print $2}')"
else
local cores_num="$(nproc)"
fi
eval "alias bi='bundle install --jobs=$cores_num'"
# The following is based on https://github.com/gma/bundler-exec
bundled_commands=(annotate cap capify cucumber foreman guard middleman nanoc rackup rainbows rake rspec ruby shotgun spec spork thin thor unicorn unicorn_rails puma zeus)
bundled_commands=(annotate berks cap capify cucumber foodcritic foreman guard jekyll kitchen knife middleman nanoc rackup rainbows rake rspec ruby shotgun spec spin spork strainer tailor taps thin thor unicorn unicorn_rails puma)
## Functions
@ -41,3 +49,4 @@ for cmd in $bundled_commands; do
compdef _$cmd bundled_$cmd=$cmd
fi
done

View file

@ -0,0 +1,36 @@
function _cabal_commands() {
local ret=1 state
_arguments ':subcommand:->subcommand' && ret=0
case $state in
subcommand)
subcommands=(
"bench:Run the benchmark, if any (configure with UserHooks)"
"build:Make this package ready for installation"
"check:Check the package for common mistakes"
"clean:Clean up after a build"
"copy:Copy teh files into the install locations"
"configure:Prepare to build the package"
"fetch:Downloads packages for later installation"
"haddock:Generate HAddock HTML documentation"
"help:Help about commands"
"hscolour:Generate HsColour colourised code, in HTML format"
"info:Display detailed information about a particular package"
"init:Interactively create a .cabal file"
"install:Installs a list of packages"
"list:List packages matching a search string"
"register:Register this package with the compiler"
"report:Upload build reports to a remote server"
"sdist:Generate a source distribution file (.tar.gz)"
"test:Run the test suite, if any (configure with UserHooks)"
"unpack:Unpacks packages for user inspection"
"update:Updates list of known packages"
"upload:Uploads source packages to Hackage"
)
_describe -t subcommands 'cabal subcommands' subcommands && ret=0
esac
return ret
}
compdef _cabal_commands cabal

View file

@ -14,12 +14,9 @@ _cake_does_target_list_need_generating () {
return 1;
fi
if [ ! -f ${_cake_task_cache_file} ]; then return 0;
else
accurate=$(stat -f%m $_cake_task_cache_file)
changed=$(stat -f%m Cakefile)
return $(expr $accurate '>=' $changed)
fi
[ ! -f ${_cake_task_cache_file} ] && return 0;
[ ${_cake_task_cache_file} -nt Cakefile ] && return 0;
return 1;
}
_cake () {
@ -33,4 +30,4 @@ _cake () {
fi
}
compdef _cake cake
compdef _cake cake

View file

@ -1,10 +1,10 @@
#compdef cap
#autoload
if [ -f config/deploy.rb ]; then
if [[ -f config/deploy.rb || -f Capfile ]]; then
if [[ ! -f .cap_tasks~ || config/deploy.rb -nt .cap_tasks~ ]]; then
echo "\nGenerating .cap_tasks~..." > /dev/stderr
cap --tasks | grep '#' | cut -d " " -f 2 > .cap_tasks~
cap -v --tasks | grep '#' | cut -d " " -f 2 > .cap_tasks~
fi
compadd `cat .cap_tasks~`
fi

129
plugins/celery/_celery Normal file
View file

@ -0,0 +1,129 @@
#compdef celery
#autoload
#celery zsh completion
_celery () {
local -a _1st_arguments ifargs dopts controlargs
typeset -A opt_args
_1st_arguments=('worker' 'events' 'beat' 'shell' 'multi' 'amqp' 'status' 'inspect' \
'control' 'purge' 'list' 'migrate' 'call' 'result' 'report')
ifargs=('--app=' '--broker=' '--loader=' '--config=' '--version')
dopts=('--detach' '--umask=' '--gid=' '--uid=' '--pidfile=' '--logfile=' '--loglevel=')
controlargs=('--timeout' '--destination')
_arguments \
'(-A --app=)'{-A,--app}'[app instance to use (e.g. module.attr_name):APP]' \
'(-b --broker=)'{-b,--broker}'[url to broker. default is "amqp://guest@localhost//":BROKER]' \
'(--loader)--loader[name of custom loader class to use.:LOADER]' \
'(--config)--config[Name of the configuration module:CONFIG]' \
'(--workdir)--workdir[Optional directory to change to after detaching.:WORKING_DIRECTORY]' \
'(-q --quiet)'{-q,--quiet}'[Don"t show as much output.]' \
'(-C --no-color)'{-C,--no-color}'[Don"t display colors.]' \
'(--version)--version[show program"s version number and exit]' \
'(- : *)'{-h,--help}'[show this help message and exit]' \
'*:: :->subcmds' && return 0
if (( CURRENT == 1 )); then
_describe -t commands "celery subcommand" _1st_arguments
return
fi
case "$words[1]" in
worker)
_arguments \
'(-C --concurrency=)'{-C,--concurrency=}'[Number of child processes processing the queue. The default is the number of CPUs.]' \
'(--pool)--pool=:::(processes eventlet gevent threads solo)' \
'(--purge --discard)'{--discard,--purge}'[Purges all waiting tasks before the daemon is started.]' \
'(-f --logfile=)'{-f,--logfile=}'[Path to log file. If no logfile is specified, stderr is used.]' \
'(--loglevel=)--loglevel=:::(critical error warning info debug)' \
'(-N --hostname=)'{-N,--hostname=}'[Set custom hostname, e.g. "foo.example.com".]' \
'(-B --beat)'{-B,--beat}'[Also run the celerybeat periodic task scheduler.]' \
'(-s --schedule=)'{-s,--schedule=}'[Path to the schedule database if running with the -B option. Defaults to celerybeat-schedule.]' \
'(-S --statedb=)'{-S,--statedb=}'[Path to the state database.Default: None]' \
'(-E --events)'{-E,--events}'[Send events that can be captured by monitors like celeryev, celerymon, and others.]' \
'(--time-limit=)--time-limit=[nables a hard time limit (in seconds int/float) for tasks]' \
'(--soft-time-limit=)--soft-time-limit=[Enables a soft time limit (in seconds int/float) for tasks]' \
'(--maxtasksperchild=)--maxtasksperchild=[Maximum number of tasks a pool worker can execute before it"s terminated and replaced by a new worker.]' \
'(-Q --queues=)'{-Q,--queues=}'[List of queues to enable for this worker, separated by comma. By default all configured queues are enabled.]' \
'(-I --include=)'{-I,--include=}'[Comma separated list of additional modules to import.]' \
'(--pidfile=)--pidfile=[Optional file used to store the process pid.]' \
'(--autoscale=)--autoscale=[Enable autoscaling by providing max_concurrency, min_concurrency.]' \
'(--autoreload)--autoreload[Enable autoreloading.]' \
'(--no-execv)--no-execv[Don"t do execv after multiprocessing child fork.]'
compadd -a ifargs
;;
inspect)
_values -s \
'active[dump active tasks (being processed)]' \
'active_queues[dump queues being consumed from]' \
'ping[ping worker(s)]' \
'registered[dump of registered tasks]' \
'report[get bugreport info]' \
'reserved[dump reserved tasks (waiting to be processed)]' \
'revoked[dump of revoked task ids]' \
'scheduled[dump scheduled tasks (eta/countdown/retry)]' \
'stats[dump worker statistics]'
compadd -a controlargs ifargs
;;
control)
_values -s \
'add_consumer[tell worker(s) to start consuming a queue]' \
'autoscale[change autoscale settings]' \
'cancel_consumer[tell worker(s) to stop consuming a queue]' \
'disable_events[tell worker(s) to disable events]' \
'enable_events[tell worker(s) to enable events]' \
'pool_grow[start more pool processes]' \
'pool_shrink[use less pool processes]' \
'rate_limit[tell worker(s) to modify the rate limit for a task type]' \
'time_limit[tell worker(s) to modify the time limit for a task type.]'
compadd -a controlargs ifargs
;;
multi)
_values -s \
'--nosplash[Don"t display program info.]' \
'--verbose[Show more output.]' \
'--no-color[Don"t display colors.]' \
'--quiet[Don"t show as much output.]' \
'start' 'restart' 'stopwait' 'stop' 'show' \
'names' 'expand' 'get' 'kill'
compadd -a ifargs
;;
amqp)
_values -s \
'queue.declare' 'queue.purge' 'exchange.delete' 'basic.publish' \
'exchange.declare' 'queue.delete' 'queue.bind' 'basic.get'
;;
list)
_values -s, 'bindings'
;;
shell)
_values -s \
'--ipython[force iPython.]' \
'--bpython[force bpython.]' \
'--python[force default Python shell.]' \
'--without-tasks[don"t add tasks to locals.]' \
'--eventlet[use eventlet.]' \
'--gevent[use gevent.]'
compadd -a ifargs
;;
beat)
_arguments \
'(-s --schedule=)'{-s,--schedule=}'[Path to the schedule database. Defaults to celerybeat-schedule.]' \
'(-S --scheduler=)'{-S,--scheduler=}'[Scheduler class to use. Default is celery.beat.PersistentScheduler.]' \
'(--max-interval)--max-interval[]'
compadd -a dopts fargs
;;
events)
_arguments \
'(-d --dump)'{-d,--dump}'[Dump events to stdout.]' \
'(-c --camera=)'{-c,--camera=}'[Take snapshots of events using this camera.]' \
'(-F --frequency=)'{-F,--frequency=}'[Camera: Shutter frequency. Default is every 1.0 seconds.]' \
'(-r --maxrate=)'{-r,--maxrate=}'[Camera: Optional shutter rate limit (e.g. 10/m).]'
compadd -a dopts fargs
;;
*)
;;
esac
}

View file

@ -35,27 +35,37 @@
# -------
#
# * Mario Fernandez (https://github.com/sirech)
# * Dong Weiming (https://github.com/dongweiming)
#
# ------------------------------------------------------------------------------
local curcontext="$curcontext" state line ret=1
local curcontext="$curcontext" state line ret=1 version opts first second third
typeset -A opt_args
version=(${(f)"$(_call_program version $words[1] --version)"})
version=${${(z)${version[1]}}[3]}
first=$(echo $version|cut -d '.' -f 1)
second=$(echo $version|cut -d '.' -f 2)
third=$(echo $version|cut -d '.' -f 3)
if (( $first < 2 )) && (( $second < 7 )) && (( $third < 3 ));then
opts+=('(-l --lint)'{-l,--lint}'[pipe the compiled JavaScript through JavaScript Lint]'
'(-r --require)'{-r,--require}'[require a library before executing your script]:library')
fi
_arguments -C \
'(- *)'{-h,--help}'[display this help message]' \
'(- *)'{-v,--version}'[display the version number]' \
$opts \
'(-b --bare)'{-b,--bare}'[compile without a top-level function wrapper]' \
'(-e --eval)'{-e,--eval}'[pass a string from the command line as input]:Inline Script' \
'(-i --interactive)'{-i,--interactive}'[run an interactive CoffeeScript REPL]' \
'(-j --join)'{-j,--join}'[concatenate the source CoffeeScript before compiling]:Destination JS file:_files -g "*.js"' \
'(-l --lint)'{-l,--lint}'[pipe the compiled JavaScript through JavaScript Lint]' \
'(--nodejs)--nodejs[pass options directly to the "node" binary]' \
'(-c --compile)'{-c,--compile}'[compile to JavaScript and save as .js files]' \
'(-o --output)'{-o,--output}'[set the output directory for compiled JavaScript]:Output Directory:_files -/' \
'(-n -t -p)'{-n,--nodes}'[print out the parse tree that the parser produces]' \
'(-n -t -p)'{-p,--print}'[print out the compiled JavaScript]' \
'(-n -t -p)'{-t,--tokens}'[print out the tokens that the lexer/rewriter produce]' \
'(-r --require)'{-r,--require}'[require a library before executing your script]:library' \
'(-s --stdio)'{-s,--stdio}'[listen for and compile scripts over stdio]' \
'(-w --watch)'{-w,--watch}'[watch scripts for changes and rerun commands]' \
'*:script or directory:_files' && ret=0

View file

@ -0,0 +1,11 @@
man() {
env \
LESS_TERMCAP_mb=$(printf "\e[1;31m") \
LESS_TERMCAP_md=$(printf "\e[1;31m") \
LESS_TERMCAP_me=$(printf "\e[0m") \
LESS_TERMCAP_se=$(printf "\e[0m") \
LESS_TERMCAP_so=$(printf "\e[1;44;33m") \
LESS_TERMCAP_ue=$(printf "\e[0m") \
LESS_TERMCAP_us=$(printf "\e[1;32m") \
man "$@"
}

View file

@ -0,0 +1,28 @@
# Plugin for highligthing file content
# Plugin highlights file content based on the filename extension.
# If no highlighting method supported for given extension then it tries
# guess it by looking for file content.
alias colorize='colorize_via_pygmentize'
colorize_via_pygmentize() {
if [ ! -x $(which pygmentize) ]; then
echo package \'pygmentize\' is not installed!
exit -1
fi
if [ $# -eq 0 ]; then
pygmentize -g $@
fi
for FNAME in $@
do
filename=$(basename "$FNAME")
lexer=`pygmentize -N \"$filename\"`
if [ "Z$lexer" != "Ztext" ]; then
pygmentize -l $lexer "$FNAME"
else
pygmentize -g "$FNAME"
fi
done
}

View file

@ -3,3 +3,7 @@
# this is installed in Ubuntu
[[ -e /etc/zsh_command_not_found ]] && source /etc/zsh_command_not_found
# Arch Linux command-not-found support, you must have package pkgfile installed
# https://wiki.archlinux.org/index.php/Pkgfile#.22Command_not_found.22_hook
[[ -e /usr/share/doc/pkgfile/command-not-found.zsh ]] && source /usr/share/doc/pkgfile/command-not-found.zsh

View file

@ -10,9 +10,27 @@ _composer_get_command_list () {
composer --no-ansi | sed "1,/Available commands/d" | awk '/^ [a-z]+/ { print $1 }'
}
_composer_get_required_list () {
composer show -s --no-ansi | sed '1,/requires/d' | awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }'
}
_composer () {
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments \
'1: :->command'\
'*: :->args'
if [ -f composer.json ]; then
compadd `_composer_get_command_list`
case $state in
command)
compadd `_composer_get_command_list`
;;
*)
compadd `_composer_get_required_list`
;;
esac
else
compadd create-project init search selfupdate show
fi
}
@ -24,6 +42,7 @@ alias csu='composer self-update'
alias cu='composer update'
alias ci='composer install'
alias ccp='composer create-project'
alias cdu='composer dump-autoload'
# install composer in the current directory
alias cget='curl -s https://getcomposer.org/installer | php'
alias cget='curl -s https://getcomposer.org/installer | php'

View file

@ -0,0 +1,3 @@
function copydir {
pwd | tr -d "\r\n" | pbcopy
}

View file

@ -0,0 +1,5 @@
function copyfile {
[[ "$#" != 1 ]] && return 1
local file_to_copy=$1
cat $file_to_copy | pbcopy
}

View file

@ -6,14 +6,14 @@
# Use aptitude if installed, or apt-get if not.
# You can just set apt_pref='apt-get' to override it.
if [[ -e $( which aptitude 2>&1 ) ]]; then
if [[ -e $( which -p aptitude 2>&1 ) ]]; then
apt_pref='aptitude'
else
apt_pref='apt-get'
fi
# Use sudo by default if it's installed
if [[ -e $( which sudo 2>&1 ) ]]; then
if [[ -e $( which -p sudo 2>&1 ) ]]; then
use_sudo=1
fi

View file

@ -20,6 +20,19 @@ _managepy-createcachetable(){
$nul_args && ret=0
}
_managepy-collectstatic(){
_arguments -s : \
'--link=-[Create a symbolic link to each file instead of copying.]:' \
'--noinput=-[Do NOT prompt the user for input of any kind.]:' \
'--no-post-process=-[Do NOT post process collected files.]:' \
'--ignore=-[Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.]:' \
'--dry-run=-[Do everything except modify the filesystem.]:' \
'--clear=-[Clear the existing files using the storage before trying to copy or link the original file.]:' \
'--link=-[Create a symbolic link to each file instead of copying.]:' \
'--no-default-ignore=-[Do not ignore the common private glob-style patterns "CVS", ".*" and "*~".]:' \
$nul_args && ret=0
}
_managepy-dbshell(){
_arguments -s : \
$nul_args && ret=0
@ -163,6 +176,7 @@ _managepy-commands() {
commands=(
'adminindex:prints the admin-index template snippet for the given app name(s).'
'createcachetable:creates the table needed to use the SQL cache backend.'
'collectstatic:Collect static files in a single location.'
'dbshell:runs the command-line client for the current DATABASE_ENGINE.'
"diffsettings:displays differences between the current settings.py and Django's default settings."
'dumpdata:Output the contents of the database as a fixture of the given format.'
@ -220,4 +234,5 @@ _managepy() {
compdef _managepy manage.py
compdef _managepy django
compdef _managepy django-admin.py
compdef _managepy django-manage

19
plugins/docker/README.md Normal file
View file

@ -0,0 +1,19 @@
## Docker autocomplete plugin
- Adds autocomplete options for all docker commands.
- Will also show containerIDs and Image names where applicable
####Shows help for all commands
![General Help](http://i.imgur.com/tUBO9jh.png "Help for all commands")
####Shows your downloaded images where applicable
![Images](http://i.imgur.com/R8ZsWO1.png "Images")
####Shows your running containers where applicable
![Containers](http://i.imgur.com/WQtbheg.png "Containers")
Maintainer : Ahmed Azaan ([@aeonazaan](https://twitter.com/aeonazaan))

290
plugins/docker/_docker Normal file
View file

@ -0,0 +1,290 @@
#compdef docker
# Docker autocompletion for oh-my-zsh
# Requires: Docker installed
# Author : Azaan (@aeonazaan)
# ----- Helper functions
# Output a selectable list of all running docker containers
__docker_containers() {
declare -a cont_cmd
cont_cmd=($(docker ps | awk 'NR>1{print $1":[CON("$1")"$2"("$3")]"}'))
_describe 'containers' cont_cmd
}
# output a selectable list of all docker images
__docker_images() {
declare -a img_cmd
img_cmd=($(docker images | awk 'NR>1{print $1}'))
_describe 'images' img_cmd
}
# ----- Commands
# Seperate function for each command, makes extension easier later
# ---------------------------
__attach() {
__docker_containers
}
__build() {
_arguments \
'-q=false[Suppress verbose build output]' \
'-t="[fuck to be applied to the resulting image in case of success]' \
'*:files:_files'
}
__commit() {
_arguments \
'-author="[Author]' \
'-m="[Commit message]' \
'-run="[Config automatically applied when the image is run.\n]'
__docker_containers
}
__diff() {
__docker_containers
}
__export() {
__docker_containers
}
__history() {
__docker_images
}
__images() {
_arguments \
'-a[show all images]' \
'-notrunc[dont truncate output]' \
'-q[only show numeric IDs]' \
'-viz[output graph in graphviz format]'
__docker_images
}
__import() {
_arguments '*:files:_files'
}
__info() {
# no arguments
}
__insert() {
__docker_images
_arguments '*:files:_files'
}
__inspect() {
__docker_images
__docker_containers
}
__kill() {
__docker_containers
}
__login() {
_arguments \
'-e="[email]' \
'-p="[password]' \
'-u="[username]' \
}
__logs() {
__docker_containers
}
__port() {
__docker_containers
}
__top() {
__docker_containers
}
__ps() {
_arguments \
'-a[Show all containers. Only running containers are shown by default.]' \
'-beforeId="[Show only container created before Id, include non-running ones.]' \
'-l[Show only the latest created container, include non-running ones.]' \
'-n=[Show n last created containers, include non-running ones.]' \
'-notrurrrrnc[Dont truncate output]' \
'-q[Only display numeric IDs]' \
'-s[Display sizes]' \
'-sinceId="[Show only containers created since Id, include non-running ones.]'
}
__pull() {
_arguments '-t="[Download tagged image in repository]'
}
__push() {
}
__restart() {
_arguments '-t=[number of seconds to try to stop before killing]'
__docker_containers
}
__rm() {
_arguments '-v[Remove the volumes associated to the container]'
__docker_containers
}
__rmi() {
__docker_images
}
__run() {
_arguments \
'-a=[Attach to stdin, stdout or stderr.]' \
'-c=[CPU shares (relative weight)]' \
'-d[Detached mode: leave the container running in the background]' \
'-dns=[Set custom dns servers]' \
'-e=[Set environment variables]' \
'-entrypoint="[Overwrite the default entrypoint of the image]' \
'-h="[Container host name]' \
'-i[Keep stdin open even if not attached]' \
'-m=[Memory limit (in bytes)]' \
'-p=[Expose a containers port to the host (use docker port to see the actual mapping)]' \
'-t[Allocate a pseudo-tty]' \
'-u="[Username or UID]' \
'-v=[Bind mount a volume (e.g. from the host: -v /host:/container, from docker: -v /container)]' \
'-volumes-from="[Mount volumes from the specified container]'
__docker_images
}
__search() {
_arguments '-notrunc[Dont truncate output]'
}
__start() {
__docker_containers
}
__stop() {
_arguments '-t=[number of seconds to try to stop before killing]'
__docker_containers
}
__tag() {
_arguments '-f[Force]'
__docker_images
}
__version() {
}
__wait() {
__docker_containers
}
# end commands ---------
# ----------------------
local -a _1st_arguments
_1st_arguments=(
"attach":"Attach to a running container"
"build":"Build a container from a Dockerfile"
"commit":"Create a new image from a container's changes"
"diff":"Inspect changes on a container's filesystem"
"export":"Stream the contents of a container as a tar archive"
"history":"Show the history of an image"
"images":"List images"
"import":"Create a new filesystem image from the contents of a tarball"
"info":"Display system-wide information"
"insert":"Insert a file in an image"
"inspect":"Return low-level information on a container"
"kill":"Kill a running container"
"login":"Register or Login to the docker registry server"
"logs":"Fetch the logs of a container"
"port":"Lookup the public-facing port which is NAT-ed to PRIVATE_PORT"
"top":"Lookup the running processes of a container"
"ps":"List containers"
"pull":"Pull an image or a repository from the docker registry server"
"push":"Push an image or a repository to the docker registry server"
"restart":"Restart a running container"
"rm":"Remove one or more containers"
"rmi":"Remove one or more images"
"run":"Run a command in a new container"
"search":"Search for an image in the docker index"
"start":"Start a stopped container"
"stop":"Stop a running container"
"tag":"Tag an image into a repository"
"version":"Show the docker version information"
"wait":"Block until a container stops, then print its exit code"
)
_arguments '*:: :->command'
if (( CURRENT == 1 )); then
_describe -t commands "docker command" _1st_arguments
return
fi
local -a _command_args
case "$words[1]" in
attach)
__docker_containers ;;
build)
__build ;;
commit)
__commit ;;
diff)
__diff ;;
export)
__export ;;
history)
__history ;;
images)
__images ;;
import)
__import ;;
info)
__info ;;
insert)
__insert ;;
inspect)
__inspect ;;
kill)
__kill ;;
login)
__login ;;
logs)
__logs ;;
port)
__port ;;
top)
__top ;;
ps)
__ps ;;
pull)
__pull ;;
push)
__push ;;
restart)
__restart ;;
rm)
__rm ;;
rmi)
__rmi ;;
run)
__run ;;
search)
__search ;;
start)
__start ;;
stop)
__stop ;;
tag)
__tag ;;
version)
__version ;;
wait)
__wait ;;
esac

View file

@ -0,0 +1,29 @@
# ------------------------------------------------------------------------------
# FILE: emoji-clock.plugin.zsh
# DESCRIPTION: The current time with half hour accuracy as an emoji symbol.
# Inspired by Andre Torrez' "Put A Burger In Your Shell"
# http://notes.torrez.org/2013/04/put-a-burger-in-your-shell.html
# AUTHOR: Alexis Hildebrandt (afh[at]surryhill.net)
# VERSION: 1.0.0
# -----------------------------------------------------------------------------
function emoji-clock() {
hour=$(date '+%I')
minutes=$(date '+%M')
case $hour in
01) clock="🕐"; [ $minutes -ge 30 ] && clock="🕜";;
02) clock="🕑"; [ $minutes -ge 30 ] && clock="🕝";;
03) clock="🕒"; [ $minutes -ge 30 ] && clock="🕞";;
04) clock="🕓"; [ $minutes -ge 30 ] && clock="🕟";;
05) clock="🕔"; [ $minutes -ge 30 ] && clock="🕠";;
06) clock="🕕"; [ $minutes -ge 30 ] && clock="🕡";;
07) clock="🕖"; [ $minutes -ge 30 ] && clock="🕢";;
08) clock="🕗"; [ $minutes -ge 30 ] && clock="🕣";;
09) clock="🕘"; [ $minutes -ge 30 ] && clock="🕤";;
10) clock="🕙"; [ $minutes -ge 30 ] && clock="🕥";;
11) clock="🕚"; [ $minutes -ge 30 ] && clock="🕦";;
12) clock="🕛"; [ $minutes -ge 30 ] && clock="🕧";;
*) clock="⌛";;
esac
echo $clock
}

View file

@ -1,4 +1,4 @@
encode64(){ echo -n $1 | base64 }
decode64(){ echo -n $1 | base64 -D }
decode64(){ echo -n $1 | base64 --decode }
alias e64=encode64
alias d64=decode64

View file

@ -53,7 +53,7 @@ function extract() {
(*.lzma) unlzma "$1" ;;
(*.Z) uncompress "$1" ;;
(*.zip) unzip "$1" -d $extract_dir ;;
(*.rar) unrar e -ad "$1" ;;
(*.rar) unrar x -ad "$1" ;;
(*.7z) 7za x "$1" ;;
(*.deb)
mkdir -p "$extract_dir/control"

60
plugins/fabric/_fab Normal file
View file

@ -0,0 +1,60 @@
#compdef fab
#autoload
local curcontext=$curcontext state line
declare -A opt_args
declare target_list
target_list=(`fab --shortlist 2>/dev/null`)
_targets() {
_describe -t commands "fabric targets" target_list
}
output_levels=(
'status: Status messages, i.e. noting when Fabric is done running, if the user used a keyboard interrupt, or when servers are disconnected from. These messages are almost always relevant and rarely verbose.'
'aborts: Abort messages. Like status messages, these should really only be turned off when using Fabric as a library, and possibly not even then. Note that even if this output group is turned off, aborts will still occur there just wont be any output about why Fabric aborted!'
'warnings: Warning messages. These are often turned off when one expects a given operation to fail, such as when using grep to test existence of text in a file. If paired with setting env.warn_only to True, this can result in fully silent warnings when remote programs fail. As with aborts, this setting does not control actual warning behavior, only whether warning messages are printed or hidden.'
'running: Printouts of commands being executed or files transferred, e.g. [myserver] run: ls /var/www. Also controls printing of tasks being run, e.g. [myserver] Executing task ''foo''.'
'stdout: Local, or remote, stdout, i.e. non-error output from commands.'
'stderr: Local, or remote, stderr, i.e. error-related output from commands.'
'user: User-generated output, i.e. local output printed by fabfile code via use of the fastprint or puts functions.'
)
_arguments -w -S -C \
'(-)'{-h,--help}'[show this help message and exit]: :->noargs' \
'(-)'{-V,--version}'[show program''s version number and exit]: :->noargs' \
'(-)--list[print list of possible commands and exit]: :->noargs' \
'(-)--shortlist[print non-verbose list of possible commands and exit]: :->noargs' \
'(--reject-unknown-hosts)--reject-unknown-hosts[reject unknown hosts]' \
'(--no-pty)--no-pty[do not use pseudo-terminal in run/sudo]' \
"(-d+ --display=-)"{-d+,--display=-}"[print detailed info about a given command]: :_targets" \
'(-D --disable-known-hosts)'{-D,--disable-known-hosts}'[do not load user known_hosts file]' \
'(-r --reject-unknown-hosts)'{-r,--reject-unknown-hosts}'[reject unknown hosts]' \
'(-u+ --user=-)'{-u+,--user=-}'[username to use when connecting to remote hosts]: :' \
'(-p+ --password=-)'{-p+,--password=-}'[password for use with authentication and/or sudo]: :' \
'(-H+ --hosts=-)'{-H+,--hosts=-}'[comma separated list of hosts to operate on]: :' \
'(-R+ --roles=-)'{-R+,--roles=-}'[comma separated list of roles to operate on]: :' \
'(-a --no-agent)'{-a,--no-agent}'[don''t use the running SSH agent]' \
'(-k --no-keys)'{-k,--no-keys}'[don''t load private key files from ~/.ssh/]' \
'(-w --warn-only)'{-w,--warn-only}'[warn instead of abort, when commands fail]' \
'-i+[path to SSH private key file. May be repeated]: :_files' \
"(-f+ --fabfile=)"{-f+,--fabfile=}"[Python module file to import]: :_files -g *.py" \
'(-c+ --config=-)'{-c+,--config=-}'[specify location of config file to use]: :_files' \
'(-s+ --shell=-)'{-s+,--shell=-}'[specify a new shell, defaults to ''/bin/bash -l -c'']: :' \
'(--hide=-)--hide=-[comma-separated list of output levels to hide]: :->levels' \
'(--show=-)--show=-[comma-separated list of output levels to show]: :->levels' \
'*::: :->subcmds' && return 0
if [[ CURRENT -ge 1 ]]; then
case $state in
noargs)
_message "nothing to complete";;
levels)
_describe -t commands "output levels" output_levels;;
*)
_targets;;
esac
return
fi

View file

@ -0,0 +1 @@
# DECLARION: This plugin was created by vhbit. What I did is just making a portal from https://github.com/vhbit/fabric-zsh-autocomplete.

View file

@ -1,5 +1,10 @@
if [ $commands[fasd] ]; then # check if fasd is installed
eval "$(fasd --init auto)"
fasd_cache="$HOME/.fasd-init-cache"
if [ "$(command -v fasd)" -nt "$fasd_cache" -o ! -s "$fasd_cache" ]; then
fasd --init auto >| "$fasd_cache"
fi
source "$fasd_cache"
unset fasd_cache
alias v='f -e vim'
alias o='a -e open'
fi

View file

@ -0,0 +1,6 @@
# start fbterm automatically in /dev/tty*
if [[ $(tty|grep -o '/dev/tty') = /dev/tty ]] ; then
fbterm
exit
fi

View file

@ -9,8 +9,9 @@ _gem_installed() {
local -a _1st_arguments
_1st_arguments=(
'build:Build a gem from a gemspec'
'cert:Manage RubyGems certificates and signing settings'
'check:Check installed gems'
'check:Check a gem repository for added or missing files'
'cleanup:Clean up old versions of installed gems in the local repository'
'contents:Display the contents of the installed gems'
'dependency:Show the dependencies of an installed gem'
@ -21,7 +22,7 @@ _1st_arguments=(
'install:Install a gem into the local repository'
'list:Display gems whose name starts with STRING'
'lock:Generate a lockdown list of gems'
'mirror:Mirror a gem repository'
'mirror:Mirror all gem files (requires rubygems-mirror)'
'outdated:Display all gems that need updates'
'owner:Manage gem owners on RubyGems.org.'
'pristine:Restores installed gems to pristine condition from files located in the gem cache'
@ -35,8 +36,9 @@ _1st_arguments=(
'stale:List gems along with access times'
'uninstall:Uninstall gems from the local repository'
'unpack:Unpack an installed gem to the current directory'
'update:Update the named gems (or all installed gems) in the local repository'
'update:Update installed gems to the latest version'
'which:Find the location of a library file you can require'
'yank:Remove a specific gem version release from RubyGems.org'
)
local expl

View file

@ -0,0 +1,416 @@
#!zsh
#
# Installation
# ------------
#
# To achieve git-flow completion nirvana:
#
# 0. Update your zsh's git-completion module to the newest verion.
# From here. http://zsh.git.sourceforge.net/git/gitweb.cgi?p=zsh/zsh;a=blob_plain;f=Completion/Unix/Command/_git;hb=HEAD
#
# 1. Install this file. Either:
#
# a. Place it in your .zshrc:
#
# b. Or, copy it somewhere (e.g. ~/.git-flow-completion.zsh) and put the following line in
# your .zshrc:
#
# source ~/.git-flow-completion.zsh
#
# c. Or, use this file as a oh-my-zsh plugin.
#
_git-flow ()
{
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
local -a subcommands
subcommands=(
'init:Initialize a new git repo with support for the branching model.'
'feature:Manage your feature branches.'
'config:Manage your configuration.'
'release:Manage your release branches.'
'hotfix:Manage your hotfix branches.'
'support:Manage your support branches.'
'version:Shows version information.'
)
_describe -t commands 'git flow' subcommands
;;
(options)
case $line[1] in
(init)
_arguments \
-f'[Force setting of gitflow branches, even if already configured]'
;;
(version)
;;
(hotfix)
__git-flow-hotfix
;;
(release)
__git-flow-release
;;
(feature)
__git-flow-feature
;;
(config)
__git-flow-config
;;
esac
;;
esac
}
__git-flow-release ()
{
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
local -a subcommands
subcommands=(
'start:Start a new release branch.'
'finish:Finish a release branch.'
'list:List all your release branches. (Alias to `git flow release`)'
'publish:Publish release branch to remote.'
'track:Checkout remote release branch.'
'delet:Delete a release branch.'
)
_describe -t commands 'git flow release' subcommands
_arguments \
-v'[Verbose (more) output]'
;;
(options)
case $line[1] in
(start)
_arguments \
-F'[Fetch from origin before performing finish]'\
':version:__git_flow_version_list'
;;
(finish)
_arguments \
-F'[Fetch from origin before performing finish]' \
-s'[Sign the release tag cryptographically]'\
-u'[Use the given GPG-key for the digital signature (implies -s)]'\
-m'[Use the given tag message]'\
-p'[Push to $ORIGIN after performing finish]'\
':version:__git_flow_version_list'
;;
(delete)
_arguments \
-f'[Force deletion]' \
-r'[Delete remote branch]' \
':version:__git_flow_version_list'
;;
(publish)
_arguments \
':version:__git_flow_version_list'
;;
(track)
_arguments \
':version:__git_flow_version_list'
;;
*)
_arguments \
-v'[Verbose (more) output]'
;;
esac
;;
esac
}
__git-flow-hotfix ()
{
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
local -a subcommands
subcommands=(
'start:Start a new hotfix branch.'
'finish:Finish a hotfix branch.'
'delete:Delete a hotfix branch.'
'list:List all your hotfix branches. (Alias to `git flow hotfix`)'
)
_describe -t commands 'git flow hotfix' subcommands
_arguments \
-v'[Verbose (more) output]'
;;
(options)
case $line[1] in
(start)
_arguments \
-F'[Fetch from origin before performing finish]'\
':hotfix:__git_flow_version_list'\
':branch-name:__git_branch_names'
;;
(finish)
_arguments \
-F'[Fetch from origin before performing finish]' \
-s'[Sign the release tag cryptographically]'\
-u'[Use the given GPG-key for the digital signature (implies -s)]'\
-m'[Use the given tag message]'\
-p'[Push to $ORIGIN after performing finish]'\
':hotfix:__git_flow_hotfix_list'
;;
(delete)
_arguments \
-f'[Force deletion]' \
-r'[Delete remote branch]' \
':hotfix:__git_flow_hotfix_list'
;;
*)
_arguments \
-v'[Verbose (more) output]'
;;
esac
;;
esac
}
__git-flow-feature ()
{
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
local -a subcommands
subcommands=(
'start:Start a new feature branch.'
'finish:Finish a feature branch.'
'delete:Delete a feature branch.'
'list:List all your feature branches. (Alias to `git flow feature`)'
'publish:Publish feature branch to remote.'
'track:Checkout remote feature branch.'
'diff:Show all changes.'
'rebase:Rebase from integration branch.'
'checkout:Checkout local feature branch.'
'pull:Pull changes from remote.'
)
_describe -t commands 'git flow feature' subcommands
_arguments \
-v'[Verbose (more) output]'
;;
(options)
case $line[1] in
(start)
_arguments \
-F'[Fetch from origin before performing finish]'\
':feature:__git_flow_feature_list'\
':branch-name:__git_branch_names'
;;
(finish)
_arguments \
-F'[Fetch from origin before performing finish]' \
-r'[Rebase instead of merge]'\
':feature:__git_flow_feature_list'
;;
(delete)
_arguments \
-f'[Force deletion]' \
-r'[Delete remote branch]' \
':feature:__git_flow_feature_list'
;;
(publish)
_arguments \
':feature:__git_flow_feature_list'\
;;
(track)
_arguments \
':feature:__git_flow_feature_list'\
;;
(diff)
_arguments \
':branch:__git_branch_names'\
;;
(rebase)
_arguments \
-i'[Do an interactive rebase]' \
':branch:__git_branch_names'
;;
(checkout)
_arguments \
':branch:__git_flow_feature_list'\
;;
(pull)
_arguments \
':remote:__git_remotes'\
':branch:__git_branch_names'
;;
*)
_arguments \
-v'[Verbose (more) output]'
;;
esac
;;
esac
}
__git-flow-config ()
{
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
local -a subcommands
subcommands=(
'list:List the configuration. (Alias to `git flow config`)'
'set:Set the configuration option'
)
_describe -t commands 'git flow config' subcommands
;;
(options)
case $line[1] in
(set)
_arguments \
--local'[Use repository config file]' \
--global'[Use global config file]'\
--system'[Use system config file]'\
--file'[Use given config file]'\
':option:(master develop feature hotfix release support versiontagprefix)'
;;
*)
_arguments \
--local'[Use repository config file]' \
--global'[Use global config file]'\
--system'[Use system config file]'\
--file'[Use given config file]'
;;
esac
;;
esac
}
__git_flow_version_list ()
{
local expl
declare -a versions
versions=(${${(f)"$(_call_program versions git flow release list 2> /dev/null | tr -d ' |*')"}})
__git_command_successful || return
_wanted versions expl 'version' compadd $versions
}
__git_flow_feature_list ()
{
local expl
declare -a features
features=(${${(f)"$(_call_program features git flow feature list 2> /dev/null | tr -d ' |*')"}})
__git_command_successful || return
_wanted features expl 'feature' compadd $features
}
__git_remotes () {
local expl gitdir remotes
gitdir=$(_call_program gitdir git rev-parse --git-dir 2>/dev/null)
__git_command_successful || return
remotes=(${${(f)"$(_call_program remotes git config --get-regexp '"^remote\..*\.url$"')"}//#(#b)remote.(*).url */$match[1]})
__git_command_successful || return
# TODO: Should combine the two instead of either or.
if (( $#remotes > 0 )); then
_wanted remotes expl remote compadd $* - $remotes
else
_wanted remotes expl remote _files $* - -W "($gitdir/remotes)" -g "$gitdir/remotes/*"
fi
}
__git_flow_hotfix_list ()
{
local expl
declare -a hotfixes
hotfixes=(${${(f)"$(_call_program hotfixes git flow hotfix list 2> /dev/null | tr -d ' |*')"}})
__git_command_successful || return
_wanted hotfixes expl 'hotfix' compadd $hotfixes
}
__git_branch_names () {
local expl
declare -a branch_names
branch_names=(${${(f)"$(_call_program branchrefs git for-each-ref --format='"%(refname)"' refs/heads 2>/dev/null)"}#refs/heads/})
__git_command_successful || return
_wanted branch-names expl branch-name compadd $* - $branch_names
}
__git_command_successful () {
if (( ${#pipestatus:#0} > 0 )); then
_message 'not a git repository'
return 1
fi
return 0
}
zstyle ':completion:*:*:git:*' user-commands flow:'provide high-level repository operations'

View file

@ -20,6 +20,12 @@
# c. Or, use this file as a oh-my-zsh plugin.
#
#Alias
alias gf='git flow'
alias gcd='git checkout develop'
alias gch='git checkout hotfix'
alias gcr='git checkout release'
_git-flow ()
{
local curcontext="$curcontext" state line
@ -88,6 +94,8 @@ __git-flow-release ()
'start:Start a new release branch.'
'finish:Finish a release branch.'
'list:List all your release branches. (Alias to `git flow release`)'
'publish: public'
'track: track'
)
_describe -t commands 'git flow release' subcommands
_arguments \
@ -115,6 +123,16 @@ __git-flow-release ()
':version:__git_flow_version_list'
;;
(publish)
_arguments \
':version:__git_flow_version_list'\
;;
(track)
_arguments \
':version:__git_flow_version_list'\
;;
*)
_arguments \
-v'[Verbose (more) output]'

View file

@ -6,7 +6,8 @@ _git_remote_branch() {
compadd create publish rename delete track
elif (( CURRENT == 3 )); then
# second arg: remote branch name
compadd `git branch -r | grep -v HEAD | sed "s/.*\///" | sed "s/ //g"`
remotes=`git remote | tr '\n' '|' | sed "s/\|$//g"`
compadd `git branch -r | grep -v HEAD | sed "s/$remotes\///" | sed "s/ //g"`
elif (( CURRENT == 4 )); then
# third arg: remote name
compadd `git remote`

4
plugins/git/README.md Normal file
View file

@ -0,0 +1,4 @@
## git
**Maintainer:** [Stibbons](https://github.com/Stibbons)
This plugin adds several git aliases and increase the completion function provided by zsh

83
plugins/git/_git-branch Normal file
View file

@ -0,0 +1,83 @@
#compdef git-branch
_git-branch ()
{
declare l c m d
l='--color --no-color -r -a --all -v --verbose --abbrev --no-abbrev'
c='-l -f --force -t --track --no-track --set-upstream --contains --merged --no-merged'
m='-m -M'
d='-d -D'
declare -a dependent_creation_args
if (( words[(I)-r] == 0 )); then
dependent_creation_args=(
"($l $m $d): :__git_branch_names"
"::start-point:__git_revisions")
fi
declare -a dependent_deletion_args
if (( words[(I)-d] || words[(I)-D] )); then
dependent_creation_args=
dependent_deletion_args=(
'-r[delete only remote-tracking branches]')
if (( words[(I)-r] )); then
dependent_deletion_args+='*: :__git_ignore_line_inside_arguments __git_remote_branch_names'
else
dependent_deletion_args+='*: :__git_ignore_line_inside_arguments __git_branch_names'
fi
fi
declare -a dependent_modification_args
if (( words[(I)-m] || words[(I)-M] )); then
dependent_creation_args=
dependent_modification_args=(
':old or new branch name:__git_branch_names'
'::new branch name:__git_branch_names')
fi
_arguments -w -S -s \
"($c $m $d --no-color :)--color=-[turn on branch coloring]:: :__git_color_whens" \
"($c $m $d : --color)--no-color[turn off branch coloring]" \
"($c $m -a --all)-r[list or delete only remote-tracking branches]" \
"($c $m $d : -r)"{-a,--all}"[list both remote-tracking branches and local branches]" \
"($c $m $d : -v --verbose)"{-v,--verbose}'[show SHA1 and commit subject line for each head]' \
"($c $m $d :)--abbrev=[set minimum SHA1 display-length]: :__git_guard_number length" \
"($c $m $d :)--no-abbrev[do not abbreviate sha1s]" \
"($l $m $d)-l[create the branch's reflog]" \
"($l $m $d -f --force)"{-f,--force}"[force the creation of a new branch]" \
"($l $m $d -t --track)"{-t,--track}"[set up configuration so that pull merges from the start point]" \
"($l $m $d)--no-track[override the branch.autosetupmerge configuration variable]" \
"($l $m $d)--set-upstream[set up configuration so that pull merges]" \
"($l $m $d)--contains=[only list branches which contain the specified commit]: :__git_committishs" \
"($l $m $d)--merged=[only list branches which are fully contained by HEAD]: :__git_committishs" \
"($l $m $d)--no-merged=[do not list branches which are fully contained by HEAD]: :__git_committishs" \
$dependent_creation_args \
"($l $c $d -M)-m[rename a branch and the corresponding reflog]" \
"($l $c $d -m)-M[rename a branch even if the new branch-name already exists]" \
$dependent_modification_args \
"($l $c $m -D)-d[delete a fully merged branch]" \
"($l $c $m -d)-D[delete a branch]" \
$dependent_deletion_args
}
(( $+functions[__git_ignore_line] )) ||
__git_ignore_line () {
declare -a ignored
ignored=()
((CURRENT > 1)) &&
ignored+=(${line[1,CURRENT-1]//(#m)[\[\]()\\*?#<>~\^]/\\$MATCH})
((CURRENT < $#line)) &&
ignored+=(${line[CURRENT+1,-1]//(#m)[\[\]()\\*?#<>~\^]/\\$MATCH})
$* -F ignored
}
(( $+functions[__git_ignore_line_inside_arguments] )) ||
__git_ignore_line_inside_arguments () {
declare -a compadd_opts
zparseopts -D -E -a compadd_opts V: J: 1 2 n f X: M: P: S: r: R: q F:
__git_ignore_line $* $compadd_opts
}

74
plugins/git/_git-remote Normal file
View file

@ -0,0 +1,74 @@
#compdef git-remote
# NOTE: --track is undocumented.
# TODO: --track, -t, --master, and -m should take remote branches, I guess.
# NOTE: --master is undocumented.
# NOTE: --fetch is undocumented.
_git-remote () {
local curcontext=$curcontext state line
declare -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options' && ret=0
case $state in
(command)
declare -a commands
commands=(
'add:add a new remote'
'show:show information about a given remote'
'prune:delete all stale tracking branches for a given remote'
'update:fetch updates for a set of remotes'
'rm:remove a remote from .git/config and all associated tracking branches'
'rename:rename a remote from .git/config and update all associated tracking branches'
'set-head:sets or deletes the default branch'
'set-branches:changes the list of branches tracked by the named remote.'
'set-url:changes URL remote points to.'
)
_describe -t commands 'sub-command' commands && ret=0
;;
(options)
case $line[1] in
(add)
_arguments \
'*'{--track,-t}'[track given branch instead of default glob refspec]:branch:__git_branch_names' \
'(--master -m)'{--master,-m}'[set the remote'\''s HEAD to point to given master branch]:branch:__git_branch_names' \
'(--fetch -f)'{--fetch,-f}'[run git-fetch on the new remote after it has been created]' \
':branch name:__git_remotes' \
':url:_urls' && ret=0
;;
(show)
_arguments \
'-n[do not contact the remote for a list of branches]' \
':remote:__git_remotes' && ret=0
;;
(prune)
_arguments \
'(--dry-run -n)'{-n,--dry-run}'[do not actually prune, only list what would be done]' \
':remote:__git_remotes' && ret=0
;;
(update)
__git_remote-groups && ret=0
;;
(rm)
__git_remotes && ret=0
;;
(rename)
__git_remotes && ret=0
;;
(set-url)
_arguments \
'*--push[manipulate push URLs]' \
'(--add)--add[add URL]' \
'(--delete)--delete[delete URLs]' \
':branch name:__git_remotes' \
':url:_urls' && ret=0
;;
esac
;;
esac
}

View file

@ -3,6 +3,10 @@ alias g='git'
compdef g=git
alias gst='git status'
compdef _git gst=git-status
alias gd='git diff'
compdef _git gd=git-diff
alias gdc='git diff --cached'
compdef _git gdc=git-diff
alias gl='git pull'
compdef _git gl=git-pull
alias gup='git pull --rebase'
@ -14,8 +18,14 @@ gdv() { git diff -w "$@" | view - }
compdef _git gdv=git-diff
alias gc='git commit -v'
compdef _git gc=git-commit
alias gc!='git commit -v --amend'
compdef _git gc!=git-commit
alias gca='git commit -v -a'
compdef _git gca=git-commit
compdef _git gc=git-commit
alias gca!='git commit -v -a --amend'
compdef _git gca!=git-commit
alias gcmsg='git commit -m'
compdef _git gcmsg=git-commit
alias gco='git checkout'
compdef _git gco=git-checkout
alias gcm='git checkout master'
@ -31,6 +41,12 @@ alias grset='git remote set-url'
compdef _git grset=git-remote
alias grup='git remote update'
compdef _git grset=git-remote
alias grbi='git rebase -i'
compdef _git grbi=git-rebase
alias grbc='git rebase --continue'
compdef _git grbc=git-rebase
alias grba='git rebase --abort'
compdef _git grba=git-rebase
alias gb='git branch'
compdef _git gb=git-branch
alias gba='git branch -a'
@ -40,12 +56,14 @@ compdef gcount=git
alias gcl='git config --list'
alias gcp='git cherry-pick'
compdef _git gcp=git-cherry-pick
alias glg='git log --stat --max-count=5'
alias glg='git log --stat --max-count=10'
compdef _git glg=git-log
alias glgg='git log --graph --max-count=5'
alias glgg='git log --graph --max-count=10'
compdef _git glgg=git-log
alias glgga='git log --graph --decorate --all'
compdef _git glgga=git-log
alias glo='git log --oneline'
compdef _git glo=git-log
alias gss='git status -s'
compdef _git gss=git-status
alias ga='git add'
@ -54,9 +72,20 @@ alias gm='git merge'
compdef _git gm=git-merge
alias grh='git reset HEAD'
alias grhh='git reset HEAD --hard'
alias gclean='git reset --hard && git clean -dfx'
alias gwc='git whatchanged -p --abbrev-commit --pretty=medium'
alias gf='git ls-files | grep'
#remove the gf alias
#alias gf='git ls-files | grep'
alias gpoat='git push origin --all && git push origin --tags'
alias gmt='git mergetool --no-prompt'
compdef _git gm=git-mergetool
alias gg='git gui citool'
alias gga='git gui citool --amend'
alias gk='gitk --all --branches'
alias gsts='git stash show --text'
# Will cd into the top of the current repository
# or submodule.
@ -87,7 +116,32 @@ function current_repository() {
# these aliases take advantage of the previous function
alias ggpull='git pull origin $(current_branch)'
compdef ggpull=git
alias ggpur='git pull --rebase origin $(current_branch)'
compdef ggpur=git
alias ggpush='git push origin $(current_branch)'
compdef ggpush=git
alias ggpnp='git pull origin $(current_branch) && git push origin $(current_branch)'
compdef ggpnp=git
# Pretty log messages
function _git_log_prettily(){
if ! [ -z $1 ]; then
git log --pretty=$1
fi
}
alias glp="_git_log_prettily"
compdef _git glp=git-log
# Work In Progress (wip)
# These features allow to pause a branch development and switch to another one (wip)
# When you want to go back to work, just unwip it
#
# This function return a warning if the current branch is a wip
function work_in_progress() {
if $(git log -n 1 | grep -q -c wip); then
echo "WIP!!"
fi
}
# these alias commit and uncomit wip branches
alias gwip='git add -A; git ls-files --deleted -z | xargs -0 git rm; git commit -m "wip"'
alias gunwip='git log -n 1 | grep -q -c wip && git reset HEAD~1'

View file

@ -60,6 +60,15 @@ __gitcomp_nl ()
compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
}
__gitcomp_file ()
{
emulate -L zsh
local IFS=$'\n'
compset -P '*[=:]'
compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
}
_git ()
{
local _ret=1

View file

@ -13,6 +13,7 @@
# *) .git/remotes file names
# *) git 'subcommands'
# *) tree paths within 'ref:path/to/file' expressions
# *) file paths within current working directory and index
# *) common --long-options
#
# To use these routines:
@ -23,10 +24,6 @@
# 3) Consider changing your PS1 to also show the current branch,
# see git-prompt.sh for details.
if [[ -n ${ZSH_VERSION-} ]]; then
autoload -U +X bashcompinit && bashcompinit
fi
case "$COMP_WORDBREAKS" in
*:*) : great ;;
*) COMP_WORDBREAKS="$COMP_WORDBREAKS:"
@ -169,7 +166,6 @@ __git_reassemble_comp_words_by_ref()
}
if ! type _get_comp_words_by_ref >/dev/null 2>&1; then
if [[ -z ${ZSH_VERSION:+set} ]]; then
_get_comp_words_by_ref ()
{
local exclude cur_ words_ cword_
@ -197,32 +193,6 @@ _get_comp_words_by_ref ()
shift
done
}
else
_get_comp_words_by_ref ()
{
while [ $# -gt 0 ]; do
case "$1" in
cur)
cur=${COMP_WORDS[COMP_CWORD]}
;;
prev)
prev=${COMP_WORDS[COMP_CWORD-1]}
;;
words)
words=("${COMP_WORDS[@]}")
;;
cword)
cword=$COMP_CWORD
;;
-n)
# assume COMP_WORDBREAKS is already set sanely
shift
;;
esac
shift
done
}
fi
fi
# Generates completion reply with compgen, appending a space to possible
@ -264,6 +234,124 @@ __gitcomp_nl ()
COMPREPLY=($(compgen -P "${2-}" -S "${4- }" -W "$1" -- "${3-$cur}"))
}
# Generates completion reply with compgen from newline-separated possible
# completion filenames.
# It accepts 1 to 3 arguments:
# 1: List of possible completion filenames, separated by a single newline.
# 2: A directory prefix to be added to each possible completion filename
# (optional).
# 3: Generate possible completion matches for this word (optional).
__gitcomp_file ()
{
local IFS=$'\n'
# XXX does not work when the directory prefix contains a tilde,
# since tilde expansion is not applied.
# This means that COMPREPLY will be empty and Bash default
# completion will be used.
COMPREPLY=($(compgen -P "${2-}" -W "$1" -- "${3-$cur}"))
# Tell Bash that compspec generates filenames.
compopt -o filenames 2>/dev/null
}
__git_index_file_list_filter_compat ()
{
local path
while read -r path; do
case "$path" in
?*/*) echo "${path%%/*}/" ;;
*) echo "$path" ;;
esac
done
}
__git_index_file_list_filter_bash ()
{
local path
while read -r path; do
case "$path" in
?*/*)
# XXX if we append a slash to directory names when using
# `compopt -o filenames`, Bash will append another slash.
# This is pretty stupid, and this the reason why we have to
# define a compatible version for this function.
echo "${path%%/*}" ;;
*)
echo "$path" ;;
esac
done
}
# Process path list returned by "ls-files" and "diff-index --name-only"
# commands, in order to list only file names relative to a specified
# directory, and append a slash to directory names.
__git_index_file_list_filter ()
{
# Default to Bash >= 4.x
__git_index_file_list_filter_bash
}
# Execute git ls-files, returning paths relative to the directory
# specified in the first argument, and using the options specified in
# the second argument.
__git_ls_files_helper ()
{
(
test -n "${CDPATH+set}" && unset CDPATH
# NOTE: $2 is not quoted in order to support multiple options
cd "$1" && git ls-files --exclude-standard $2
) 2>/dev/null
}
# Execute git diff-index, returning paths relative to the directory
# specified in the first argument, and using the tree object id
# specified in the second argument.
__git_diff_index_helper ()
{
(
test -n "${CDPATH+set}" && unset CDPATH
cd "$1" && git diff-index --name-only --relative "$2"
) 2>/dev/null
}
# __git_index_files accepts 1 or 2 arguments:
# 1: Options to pass to ls-files (required).
# Supported options are --cached, --modified, --deleted, --others,
# and --directory.
# 2: A directory path (optional).
# If provided, only files within the specified directory are listed.
# Sub directories are never recursed. Path must have a trailing
# slash.
__git_index_files ()
{
local dir="$(__gitdir)" root="${2-.}"
if [ -d "$dir" ]; then
__git_ls_files_helper "$root" "$1" | __git_index_file_list_filter |
sort | uniq
fi
}
# __git_diff_index_files accepts 1 or 2 arguments:
# 1) The id of a tree object.
# 2) A directory path (optional).
# If provided, only files within the specified directory are listed.
# Sub directories are never recursed. Path must have a trailing
# slash.
__git_diff_index_files ()
{
local dir="$(__gitdir)" root="${2-.}"
if [ -d "$dir" ]; then
__git_diff_index_helper "$root" "$1" | __git_index_file_list_filter |
sort | uniq
fi
}
__git_heads ()
{
local dir="$(__gitdir)"
@ -321,7 +409,7 @@ __git_refs ()
if [[ "$ref" == "$cur"* ]]; then
echo "$ref"
fi
done | uniq -u
done | sort | uniq -u
fi
return
fi
@ -428,7 +516,7 @@ __git_complete_revlist_file ()
*) pfx="$ref:$pfx" ;;
esac
__gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" \
__gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" 2>/dev/null \
| sed '/^100... blob /{
s,^.* ,,
s,$, ,
@ -461,6 +549,46 @@ __git_complete_revlist_file ()
}
# __git_complete_index_file requires 1 argument: the options to pass to
# ls-file
__git_complete_index_file ()
{
local pfx cur_="$cur"
case "$cur_" in
?*/*)
pfx="${cur_%/*}"
cur_="${cur_##*/}"
pfx="${pfx}/"
__gitcomp_file "$(__git_index_files "$1" "$pfx")" "$pfx" "$cur_"
;;
*)
__gitcomp_file "$(__git_index_files "$1")" "" "$cur_"
;;
esac
}
# __git_complete_diff_index_file requires 1 argument: the id of a tree
# object
__git_complete_diff_index_file ()
{
local pfx cur_="$cur"
case "$cur_" in
?*/*)
pfx="${cur_%/*}"
cur_="${cur_##*/}"
pfx="${pfx}/"
__gitcomp_file "$(__git_diff_index_files "$1" "$pfx")" "$pfx" "$cur_"
;;
*)
__gitcomp_file "$(__git_diff_index_files "$1")" "" "$cur_"
;;
esac
}
__git_complete_file ()
{
__git_complete_revlist_file
@ -562,10 +690,19 @@ __git_complete_strategy ()
return 1
}
__git_commands () {
if test -n "${GIT_TESTING_COMMAND_COMPLETION:-}"
then
printf "%s" "${GIT_TESTING_COMMAND_COMPLETION}"
else
git help -a|egrep '^ [a-zA-Z0-9]'
fi
}
__git_list_all_commands ()
{
local i IFS=" "$'\n'
for i in $(git help -a|egrep '^ [a-zA-Z0-9]')
for i in $(__git_commands)
do
case $i in
*--*) : helper pattern;;
@ -585,7 +722,7 @@ __git_list_porcelain_commands ()
{
local i IFS=" "$'\n'
__git_compute_all_commands
for i in "help" $__git_all_commands
for i in $__git_all_commands
do
case $i in
*--*) : helper pattern;;
@ -594,6 +731,7 @@ __git_list_porcelain_commands ()
archimport) : import;;
cat-file) : plumbing;;
check-attr) : plumbing;;
check-ignore) : plumbing;;
check-ref-format) : plumbing;;
checkout-index) : plumbing;;
commit-tree) : plumbing;;
@ -753,6 +891,43 @@ __git_has_doubledash ()
return 1
}
# Try to count non option arguments passed on the command line for the
# specified git command.
# When options are used, it is necessary to use the special -- option to
# tell the implementation were non option arguments begin.
# XXX this can not be improved, since options can appear everywhere, as
# an example:
# git mv x -n y
#
# __git_count_arguments requires 1 argument: the git command executed.
__git_count_arguments ()
{
local word i c=0
# Skip "git" (first argument)
for ((i=1; i < ${#words[@]}; i++)); do
word="${words[i]}"
case "$word" in
--)
# Good; we can assume that the following are only non
# option arguments.
((c = 0))
;;
"$1")
# Skip the specified git command and discard git
# main options
((c = 0))
;;
?*)
((c++))
;;
esac
done
printf "%d" $c
}
__git_whitespacelist="nowarn warn error error-all fix"
_git_am ()
@ -801,8 +976,6 @@ _git_apply ()
_git_add ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "
@ -811,7 +984,9 @@ _git_add ()
"
return
esac
COMPREPLY=()
# XXX should we check for --update and --all options ?
__git_complete_index_file "--others --modified"
}
_git_archive ()
@ -961,15 +1136,15 @@ _git_cherry_pick ()
_git_clean ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--dry-run --quiet"
return
;;
esac
COMPREPLY=()
# XXX should we check for -x option ?
__git_complete_index_file "--others"
}
_git_clone ()
@ -989,6 +1164,8 @@ _git_clone ()
--upload-pack
--template=
--depth
--single-branch
--branch
"
return
;;
@ -998,7 +1175,19 @@ _git_clone ()
_git_commit ()
{
__git_has_doubledash && return
case "$prev" in
-c|-C)
__gitcomp_nl "$(__git_refs)" "" "${cur}"
return
;;
esac
case "$prev" in
-c|-C)
__gitcomp_nl "$(__git_refs)" "" "${cur}"
return
;;
esac
case "$cur" in
--cleanup=*)
@ -1027,7 +1216,13 @@ _git_commit ()
"
return
esac
COMPREPLY=()
if git rev-parse --verify --quiet HEAD >/dev/null; then
__git_complete_diff_index_file "HEAD"
else
# This is the first commit
__git_complete_index_file "--cached"
fi
}
_git_describe ()
@ -1043,6 +1238,8 @@ _git_describe ()
__gitcomp_nl "$(__git_refs)"
}
__git_diff_algorithms="myers minimal patience histogram"
__git_diff_common_options="--stat --numstat --shortstat --summary
--patch-with-stat --name-only --name-status --color
--no-color --color-words --no-renames --check
@ -1053,10 +1250,11 @@ __git_diff_common_options="--stat --numstat --shortstat --summary
--no-ext-diff
--no-prefix --src-prefix= --dst-prefix=
--inter-hunk-context=
--patience
--patience --histogram --minimal
--raw
--dirstat --dirstat= --dirstat-by-file
--dirstat-by-file= --cumulative
--diff-algorithm=
"
_git_diff ()
@ -1064,6 +1262,10 @@ _git_diff ()
__git_has_doubledash && return
case "$cur" in
--diff-algorithm=*)
__gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
return
;;
--*)
__gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
--base --ours --theirs --no-index
@ -1116,6 +1318,14 @@ _git_fetch ()
__git_complete_remote_or_refspec
}
__git_format_patch_options="
--stdout --attach --no-attach --thread --thread= --output-directory
--numbered --start-number --numbered-files --keep-subject --signoff
--signature --no-signature --in-reply-to= --cc= --full-index --binary
--not --all --cover-letter --no-prefix --src-prefix= --dst-prefix=
--inline --suffix= --ignore-if-in-upstream --subject-prefix=
"
_git_format_patch ()
{
case "$cur" in
@ -1126,21 +1336,7 @@ _git_format_patch ()
return
;;
--*)
__gitcomp "
--stdout --attach --no-attach --thread --thread=
--output-directory
--numbered --start-number
--numbered-files
--keep-subject
--signoff --signature --no-signature
--in-reply-to= --cc=
--full-index --binary
--not --all
--cover-letter
--no-prefix --src-prefix= --dst-prefix=
--inline --suffix= --ignore-if-in-upstream
--subject-prefix=
"
__gitcomp "$__git_format_patch_options"
return
;;
esac
@ -1251,8 +1447,6 @@ _git_init ()
_git_ls_files ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--cached --deleted --modified --others --ignored
@ -1265,7 +1459,10 @@ _git_ls_files ()
return
;;
esac
COMPREPLY=()
# XXX ignore options like --modified and always suggest all cached
# files.
__git_complete_index_file "--cached"
}
_git_ls_remote ()
@ -1397,7 +1594,14 @@ _git_mv ()
return
;;
esac
COMPREPLY=()
if [ $(__git_count_arguments "mv") -gt 0 ]; then
# We need to show both cached and untracked files (including
# empty directories) since this may not be the last argument.
__git_complete_index_file "--cached --others --directory"
else
__git_complete_index_file "--cached"
fi
}
_git_name_rev ()
@ -1554,6 +1758,12 @@ _git_send_email ()
__gitcomp "ssl tls" "" "${cur##--smtp-encryption=}"
return
;;
--thread=*)
__gitcomp "
deep shallow
" "" "${cur##--thread=}"
return
;;
--*)
__gitcomp "--annotate --bcc --cc --cc-cmd --chain-reply-to
--compose --confirm= --dry-run --envelope-sender
@ -1563,11 +1773,12 @@ _git_send_email ()
--signed-off-by-cc --smtp-pass --smtp-server
--smtp-server-port --smtp-encryption= --smtp-user
--subject --suppress-cc= --suppress-from --thread --to
--validate --no-validate"
--validate --no-validate
$__git_format_patch_options"
return
;;
esac
COMPREPLY=()
__git_complete_revlist
}
_git_stage ()
@ -1581,7 +1792,7 @@ __git_config_get_set_variables ()
while [ $c -gt 1 ]; do
word="${words[c]}"
case "$word" in
--global|--system|--file=*)
--system|--global|--local|--file=*)
config_file="$word"
break
;;
@ -1687,7 +1898,7 @@ _git_config ()
case "$cur" in
--*)
__gitcomp "
--global --system --file=
--system --global --local --file=
--list --replace-all
--get --get-all --get-regexp
--add --unset --unset-all
@ -1860,6 +2071,7 @@ _git_config ()
diff.suppressBlankEmpty
diff.tool
diff.wordRegex
diff.algorithm
difftool.
difftool.prompt
fetch.recurseSubmodules
@ -2096,15 +2308,14 @@ _git_revert ()
_git_rm ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--cached --dry-run --ignore-unmatch --quiet"
return
;;
esac
COMPREPLY=()
__git_complete_index_file "--cached"
}
_git_shortlog ()
@ -2134,6 +2345,10 @@ _git_show ()
" "" "${cur#*=}"
return
;;
--diff-algorithm=*)
__gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
return
;;
--*)
__gitcomp "--pretty= --format= --abbrev-commit --oneline
$__git_diff_common_options
@ -2429,20 +2644,88 @@ __gitk_main ()
__git_complete_revlist
}
if [[ -n ${ZSH_VERSION-} ]]; then
echo "WARNING: this script is deprecated, please see git-completion.zsh" 1>&2
autoload -U +X compinit && compinit
__gitcomp ()
{
emulate -L zsh
local cur_="${3-$cur}"
case "$cur_" in
--*=)
;;
*)
local c IFS=$' \t\n'
local -a array
for c in ${=1}; do
c="$c${4-}"
case $c in
--*=*|*.) ;;
*) c="$c " ;;
esac
array[$#array+1]="$c"
done
compset -P '*[=:]'
compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
;;
esac
}
__gitcomp_nl ()
{
emulate -L zsh
local IFS=$'\n'
compset -P '*[=:]'
compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
}
__gitcomp_file ()
{
emulate -L zsh
local IFS=$'\n'
compset -P '*[=:]'
compadd -Q -p "${2-}" -f -- ${=1} && _ret=0
}
__git_zsh_helper ()
{
emulate -L ksh
local cur cword prev
cur=${words[CURRENT-1]}
prev=${words[CURRENT-2]}
let cword=CURRENT-1
__${service}_main
}
_git ()
{
emulate -L zsh
local _ret=1
__git_zsh_helper
let _ret && _default -S '' && _ret=0
return _ret
}
compdef _git git gitk
return
elif [[ -n ${BASH_VERSION-} ]]; then
if ((${BASH_VERSINFO[0]} < 4)); then
# compopt is not supported
__git_index_file_list_filter ()
{
__git_index_file_list_filter_compat
}
fi
fi
__git_func_wrap ()
{
if [[ -n ${ZSH_VERSION-} ]]; then
emulate -L bash
setopt KSH_TYPESET
# workaround zsh's bug that leaves 'words' as a special
# variable in versions < 4.3.12
typeset -h words
# workaround zsh's bug that quotes spaces in the COMPREPLY
# array if IFS doesn't contain spaces.
typeset -h IFS
fi
local cur words cword prev
_get_comp_words_by_ref -n =: cur words cword prev
$1

View file

@ -10,9 +10,22 @@
# 1) Copy this file to somewhere (e.g. ~/.git-prompt.sh).
# 2) Add the following line to your .bashrc/.zshrc:
# source ~/.git-prompt.sh
# 3) Change your PS1 to also show the current branch:
# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
# 3a) Change your PS1 to call __git_ps1 as
# command-substitution:
# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
# the optional argument will be used as format string.
# 3b) Alternatively, if you are using bash, __git_ps1 can be
# used for PROMPT_COMMAND with two parameters, <pre> and
# <post>, which are strings you would put in $PS1 before
# and after the status string generated by the git-prompt
# machinery. e.g.
# PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
# will show username, at-sign, host, colon, cwd, then
# various status string, followed by dollar and SP, as
# your prompt.
# Optionally, you can supply a third argument with a printf
# format string to finetune the output of the branch status
#
# The argument to __git_ps1 will be displayed only if you are currently
# in a git repository. The %s token will be the name of the current
@ -30,7 +43,10 @@
#
# If you would like to see if there're untracked files, then you can set
# GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked
# files, then a '%' will be shown next to the branch name.
# files, then a '%' will be shown next to the branch name. You can
# configure this per-repository with the bash.showUntrackedFiles
# variable, which defaults to true once GIT_PS1_SHOWUNTRACKEDFILES is
# enabled.
#
# If you would like to see the difference between HEAD and its upstream,
# set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates you are behind, ">"
@ -49,6 +65,19 @@
# find one, or @{upstream} otherwise. Once you have set
# GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by
# setting the bash.showUpstream config variable.
#
# If you would like to see more information about the identity of
# commits checked out as a detached HEAD, set GIT_PS1_DESCRIBE_STYLE
# to one of these values:
#
# contains relative to newer annotated tag (v1.6.3.2~35)
# branch relative to newer tag or branch (master~4)
# describe relative to older annotated tag (v1.6.3.1-13-gdd42c2f)
# default exactly matching tag
#
# If you would like a colored hint about the current dirty state, set
# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on
# the colored output of "git status -sb".
# __gitdir accepts 0 or 1 arguments (i.e., location)
# returns location of .git repo
@ -195,11 +224,43 @@ __git_ps1_show_upstream ()
# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
# returns text to add to bash PS1 prompt (includes branch name)
# when called from PS1 using command substitution
# in this mode it prints text to add to bash PS1 prompt (includes branch name)
#
# __git_ps1 requires 2 or 3 arguments when called from PROMPT_COMMAND (pc)
# in that case it _sets_ PS1. The arguments are parts of a PS1 string.
# when two arguments are given, the first is prepended and the second appended
# to the state string when assigned to PS1.
# The optional third parameter will be used as printf format string to further
# customize the output of the git-status string.
# In this mode you can request colored hints using GIT_PS1_SHOWCOLORHINTS=true
__git_ps1 ()
{
local pcmode=no
local detached=no
local ps1pc_start='\u@\h:\w '
local ps1pc_end='\$ '
local printf_format=' (%s)'
case "$#" in
2|3) pcmode=yes
ps1pc_start="$1"
ps1pc_end="$2"
printf_format="${3:-$printf_format}"
;;
0|1) printf_format="${1:-$printf_format}"
;;
*) return
;;
esac
local g="$(__gitdir)"
if [ -n "$g" ]; then
if [ -z "$g" ]; then
if [ $pcmode = yes ]; then
#In PC mode PS1 always needs to be set
PS1="$ps1pc_start$ps1pc_end"
fi
else
local r=""
local b=""
if [ -f "$g/rebase-merge/interactive" ]; then
@ -226,7 +287,7 @@ __git_ps1 ()
fi
b="$(git symbolic-ref HEAD 2>/dev/null)" || {
detached=yes
b="$(
case "${GIT_PS1_DESCRIBE_STYLE-}" in
(contains)
@ -259,24 +320,25 @@ __git_ps1 ()
b="GIT_DIR!"
fi
elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then
if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then
git diff --no-ext-diff --quiet --exit-code || w="*"
if git rev-parse --quiet --verify HEAD >/dev/null; then
git diff-index --cached --quiet HEAD -- || i="+"
else
i="#"
fi
if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
[ "$(git config --bool bash.showDirtyState)" != "false" ]
then
git diff --no-ext-diff --quiet --exit-code || w="*"
if git rev-parse --quiet --verify HEAD >/dev/null; then
git diff-index --cached --quiet HEAD -- || i="+"
else
i="#"
fi
fi
if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
fi
if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then
if [ -n "$(git ls-files --others --exclude-standard)" ]; then
u="%"
fi
if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
[ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
[ -n "$(git ls-files --others --exclude-standard)" ]
then
u="%%"
fi
if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
@ -285,6 +347,53 @@ __git_ps1 ()
fi
local f="$w$i$s$u"
printf -- "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p"
if [ $pcmode = yes ]; then
local gitstring=
if [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
local c_red='\e[31m'
local c_green='\e[32m'
local c_lblue='\e[1;34m'
local c_clear='\e[0m'
local bad_color=$c_red
local ok_color=$c_green
local branch_color="$c_clear"
local flags_color="$c_lblue"
local branchstring="$c${b##refs/heads/}"
if [ $detached = no ]; then
branch_color="$ok_color"
else
branch_color="$bad_color"
fi
# Setting gitstring directly with \[ and \] around colors
# is necessary to prevent wrapping issues!
gitstring="\[$branch_color\]$branchstring\[$c_clear\]"
if [ -n "$w$i$s$u$r$p" ]; then
gitstring="$gitstring "
fi
if [ "$w" = "*" ]; then
gitstring="$gitstring\[$bad_color\]$w"
fi
if [ -n "$i" ]; then
gitstring="$gitstring\[$ok_color\]$i"
fi
if [ -n "$s" ]; then
gitstring="$gitstring\[$flags_color\]$s"
fi
if [ -n "$u" ]; then
gitstring="$gitstring\[$bad_color\]$u"
fi
gitstring="$gitstring\[$c_clear\]$r$p"
else
gitstring="$c${b##refs/heads/}${f:+ $f}$r$p"
fi
gitstring=$(printf -- "$printf_format" "$gitstring")
PS1="$ps1pc_start$gitstring$ps1pc_end"
else
# NO color option unless in PROMPT_COMMAND mode
printf -- "$printf_format" "$c${b##refs/heads/}${f:+ $f}$r$p"
fi
fi
}

View file

@ -3,5 +3,6 @@ source $dir/../git/git.plugin.zsh
source $dir/git-prompt.sh
function git_prompt_info() {
__git_ps1 "${ZSH_THEME_GIT_PROMPT_PREFIX//\%/%%}%s${ZSH_THEME_GIT_PROMPT_SUFFIX//\%/%%}"
dirty="$(parse_git_dirty)"
__git_ps1 "${ZSH_THEME_GIT_PROMPT_PREFIX//\%/%%}%s${dirty//\%/%%}${ZSH_THEME_GIT_PROMPT_SUFFIX//\%/%%}"
}

View file

@ -1,5 +1,22 @@
# Setup hub function for git, if it is available; http://github.com/defunkt/hub
if [ "$commands[(I)hub]" ] && [ "$commands[(I)ruby]" ]; then
# Autoload _git completion functions
if declare -f _git > /dev/null; then
_git
fi
if declare -f _git_commands > /dev/null; then
_hub_commands=(
'alias:show shell instructions for wrapping git'
'pull-request:open a pull request on GitHub'
'fork:fork origin repo on GitHub'
'create:create new repo on GitHub for the current project'
'browse:browse the project on GitHub'
'compare:open GitHub compare view'
)
# Extend the '_git_commands' function with hub commands
eval "$(declare -f _git_commands | sed -e 's/base_commands=(/base_commands=(${_hub_commands} /')"
fi
# eval `hub alias -s zsh`
function git(){
if ! (( $+_has_working_hub )); then

151
plugins/go/go.plugin.zsh Normal file
View file

@ -0,0 +1,151 @@
# install in /etc/zsh/zshrc or your personal .zshrc
# gc
prefixes=(5 6 8)
for p in $prefixes; do
compctl -g "*.${p}" ${p}l
compctl -g "*.go" ${p}g
done
# standard go tools
compctl -g "*.go" gofmt
# gccgo
compctl -g "*.go" gccgo
# go tool
__go_tool_complete() {
typeset -a commands build_flags
commands+=(
'build[compile packages and dependencies]'
'clean[remove object files]'
'doc[run godoc on package sources]'
'fix[run go tool fix on packages]'
'fmt[run gofmt on package sources]'
'get[download and install packages and dependencies]'
'help[display help]'
'install[compile and install packages and dependencies]'
'list[list packages]'
'run[compile and run Go program]'
'test[test packages]'
'tool[run specified go tool]'
'version[print Go version]'
'vet[run go tool vet on packages]'
)
if (( CURRENT == 2 )); then
# explain go commands
_values 'go tool commands' ${commands[@]}
return
fi
build_flags=(
'-a[force reinstallation of packages that are already up-to-date]'
'-n[print the commands but do not run them]'
"-p[number of parallel builds]:number"
'-x[print the commands]'
"-work[print temporary directory name and keep it]"
"-gcflags[flags for 5g/6g/8g]:flags"
"-ldflags[flags for 5l/6l/8l]:flags"
"-gccgoflags[flags for gccgo]:flags"
)
__go_list() {
local expl importpaths
declare -a importpaths
importpaths=($(go list ${words[$CURRENT]}... 2>/dev/null))
_wanted importpaths expl 'import paths' compadd "$@" - "${importpaths[@]}"
}
case ${words[2]} in
clean|doc)
_arguments -s -w : '*:importpaths:__go_list'
;;
fix|fmt|list|vet)
_alternative ':importpaths:__go_list' ':files:_path_files -g "*.go"'
;;
install)
_arguments -s -w : ${build_flags[@]} \
"-v[show package names]" \
'*:importpaths:__go_list'
;;
get)
_arguments -s -w : \
${build_flags[@]}
;;
build)
_arguments -s -w : \
${build_flags[@]} \
"-v[show package names]" \
"-o[output file]:file:_files" \
"*:args:{ _alternative ':importpaths:__go_list' ':files:_path_files -g \"*.go\"' }"
;;
test)
_arguments -s -w : \
${build_flags[@]} \
"-c[do not run, compile the test binary]" \
"-i[do not run, install dependencies]" \
"-v[print test output]" \
"-x[print the commands]" \
"-short[use short mode]" \
"-parallel[number of parallel tests]:number" \
"-cpu[values of GOMAXPROCS to use]:number list" \
"-run[run tests and examples matching regexp]:regexp" \
"-bench[run benchmarks matching regexp]:regexp" \
"-benchtime[run each benchmark during n seconds]:duration" \
"-timeout[kill test after that duration]:duration" \
"-cpuprofile[write CPU profile to file]:file:_files" \
"-memprofile[write heap profile to file]:file:_files" \
"-memprofilerate[set heap profiling rate]:number" \
"*:args:{ _alternative ':importpaths:__go_list' ':files:_path_files -g \"*.go\"' }"
;;
help)
_values "${commands[@]}" \
'gopath[GOPATH environment variable]' \
'importpath[description of import paths]' \
'remote[remote import path syntax]' \
'testflag[description of testing flags]' \
'testfunc[description of testing functions]'
;;
run)
_arguments -s -w : \
${build_flags[@]} \
'*:file:_path_files -g "*.go"'
;;
tool)
if (( CURRENT == 3 )); then
_values "go tool" $(go tool)
return
fi
case ${words[3]} in
[568]g)
_arguments -s -w : \
'-I[search for packages in DIR]:includes:_path_files -/' \
'-L[show full path in file:line prints]' \
'-S[print the assembly language]' \
'-V[print the compiler version]' \
'-e[no limit on number of errors printed]' \
'-h[panic on an error]' \
'-l[disable inlining]' \
'-m[print optimization decisions]' \
'-o[file specify output file]:file' \
'-p[assumed import path for this code]:importpath' \
'-u[disable package unsafe]' \
"*:file:_files -g '*.go'"
;;
[568]l)
local O=${words[3]%l}
_arguments -s -w : \
'-o[file specify output file]:file' \
'-L[search for packages in DIR]:includes:_path_files -/' \
"*:file:_files -g '*.[ao$O]'"
;;
dist)
_values "dist tool" banner bootstrap clean env install version
;;
*)
# use files by default
_files
;;
esac
;;
esac
}
compdef __go_tool_complete go

View file

@ -0,0 +1,150 @@
# From : http://golang.org/misc/zsh/go?m=text
# gc
prefixes=(5 6 8)
for p in $prefixes; do
compctl -g "*.${p}" ${p}l
compctl -g "*.go" ${p}g
done
# standard go tools
compctl -g "*.go" gofmt
# gccgo
compctl -g "*.go" gccgo
# go tool
__go_tool_complete() {
typeset -a commands build_flags
commands+=(
'build[compile packages and dependencies]'
'clean[remove object files]'
'doc[run godoc on package sources]'
'fix[run go tool fix on packages]'
'fmt[run gofmt on package sources]'
'get[download and install packages and dependencies]'
'help[display help]'
'install[compile and install packages and dependencies]'
'list[list packages]'
'run[compile and run Go program]'
'test[test packages]'
'tool[run specified go tool]'
'version[print Go version]'
'vet[run go tool vet on packages]'
)
if (( CURRENT == 2 )); then
# explain go commands
_values 'go tool commands' ${commands[@]}
return
fi
build_flags=(
'-a[force reinstallation of packages that are already up-to-date]'
'-n[print the commands but do not run them]'
"-p[number of parallel builds]:number"
'-x[print the commands]'
"-work[print temporary directory name and keep it]"
"-gcflags[flags for 5g/6g/8g]:flags"
"-ldflags[flags for 5l/6l/8l]:flags"
"-gccgoflags[flags for gccgo]:flags"
)
__go_list() {
local expl importpaths
declare -a importpaths
importpaths=($(go list ${words[$CURRENT]}... 2>/dev/null))
_wanted importpaths expl 'import paths' compadd "$@" - "${importpaths[@]}"
}
case ${words[2]} in
clean|doc)
_arguments -s -w : '*:importpaths:__go_list'
;;
fix|fmt|list|vet)
_alternative ':importpaths:__go_list' ':files:_path_files -g "*.go"'
;;
install)
_arguments -s -w : ${build_flags[@]} \
"-v[show package names]" \
'*:importpaths:__go_list'
;;
get)
_arguments -s -w : \
${build_flags[@]}
;;
build)
_arguments -s -w : \
${build_flags[@]} \
"-v[show package names]" \
"-o[output file]:file:_files" \
"*:args:{ _alternative ':importpaths:__go_list' ':files:_path_files -g \"*.go\"' }"
;;
test)
_arguments -s -w : \
${build_flags[@]} \
"-c[do not run, compile the test binary]" \
"-i[do not run, install dependencies]" \
"-v[print test output]" \
"-x[print the commands]" \
"-short[use short mode]" \
"-parallel[number of parallel tests]:number" \
"-cpu[values of GOMAXPROCS to use]:number list" \
"-run[run tests and examples matching regexp]:regexp" \
"-bench[run benchmarks matching regexp]:regexp" \
"-benchtime[run each benchmark during n seconds]:duration" \
"-timeout[kill test after that duration]:duration" \
"-cpuprofile[write CPU profile to file]:file:_files" \
"-memprofile[write heap profile to file]:file:_files" \
"-memprofilerate[set heap profiling rate]:number" \
"*:args:{ _alternative ':importpaths:__go_list' ':files:_path_files -g \"*.go\"' }"
;;
help)
_values "${commands[@]}" \
'gopath[GOPATH environment variable]' \
'importpath[description of import paths]' \
'remote[remote import path syntax]' \
'testflag[description of testing flags]' \
'testfunc[description of testing functions]'
;;
run)
_arguments -s -w : \
${build_flags[@]} \
'*:file:_path_files -g "*.go"'
;;
tool)
if (( CURRENT == 3 )); then
_values "go tool" $(go tool)
return
fi
case ${words[3]} in
[568]g)
_arguments -s -w : \
'-I[search for packages in DIR]:includes:_path_files -/' \
'-L[show full path in file:line prints]' \
'-S[print the assembly language]' \
'-V[print the compiler version]' \
'-e[no limit on number of errors printed]' \
'-h[panic on an error]' \
'-l[disable inlining]' \
'-m[print optimization decisions]' \
'-o[file specify output file]:file' \
'-p[assumed import path for this code]:importpath' \
'-u[disable package unsafe]' \
"*:file:_files -g '*.go'"
;;
[568]l)
local O=${words[3]%l}
_arguments -s -w : \
'-o[file specify output file]:file' \
'-L[search for packages in DIR]:includes:_path_files -/' \
"*:file:_files -g '*.[ao$O]'"
;;
dist)
_values "dist tool" banner bootstrap clean env install version
;;
*)
# use files by default
_files
;;
esac
;;
esac
}
compdef __go_tool_complete go

View file

@ -1,26 +1,38 @@
# Based on ssh-agent code
local GPG_ENV=$HOME/.gnupg/gpg-agent.env
function start_agent {
/usr/bin/env gpg-agent --daemon --enable-ssh-support --write-env-file ${GPG_ENV} > /dev/null
chmod 600 ${GPG_ENV}
. ${GPG_ENV} > /dev/null
function start_agent_nossh {
eval $(/usr/bin/env gpg-agent --quiet --daemon --write-env-file ${GPG_ENV} 2> /dev/null)
chmod 600 ${GPG_ENV}
export GPG_AGENT_INFO
}
# Source GPG agent settings, if applicable
if [ -f "${GPG_ENV}" ]; then
. ${GPG_ENV} > /dev/null
ps -ef | grep ${SSH_AGENT_PID} | grep gpg-agent > /dev/null || {
start_agent;
}
else
start_agent;
fi
function start_agent_withssh {
eval $(/usr/bin/env gpg-agent --quiet --daemon --enable-ssh-support --write-env-file ${GPG_ENV} 2> /dev/null)
chmod 600 ${GPG_ENV}
export GPG_AGENT_INFO
export SSH_AUTH_SOCK
export SSH_AGENT_PID
}
export GPG_AGENT_INFO
export SSH_AUTH_SOCK
export SSH_AGENT_PID
# check if another agent is running
if ! gpg-connect-agent --quiet /bye > /dev/null 2> /dev/null; then
# source settings of old agent, if applicable
if [ -f "${GPG_ENV}" ]; then
. ${GPG_ENV} > /dev/null
fi
# check again if another agent is running using the newly sourced settings
if ! gpg-connect-agent --quiet /bye > /dev/null 2> /dev/null; then
# check for existing ssh-agent
if ssh-add -l > /dev/null 2> /dev/null; then
# ssh-agent running, start gpg-agent without ssh support
start_agent_nossh;
else
# otherwise start gpg-agent with ssh support
start_agent_withssh;
fi
fi
fi
GPG_TTY=$(tty)
export GPG_TTY

View file

@ -54,27 +54,14 @@ function in_gradle() {
fi
}
############################################################################
# Define the stat_cmd command based on platform behavior
##########################################################################
stat -f%m . > /dev/null 2>&1
if [ "$?" = 0 ]; then
stat_cmd=(stat -f%m)
else
stat_cmd=(stat -L --format=%Y)
fi
############################################################################## Examine the build.gradle file to see if its
# timestamp has changed, and if so, regen
# the .gradle_tasks cache file
############################################################################
_gradle_does_task_list_need_generating () {
if [ ! -f .gradletasknamecache ]; then return 0;
else
accurate=$($stat_cmd .gradletasknamecache)
changed=$($stat_cmd build.gradle)
return $(expr $accurate '>=' $changed)
fi
[ ! -f .gradletasknamecache ] && return 0;
[ .gradletasknamecache -nt build.gradle ] && return 0;
return 1;
}

View file

@ -24,17 +24,23 @@ _enumerateGrailsScripts() {
return
fi
# - Strip the path
# - Remove all scripts with a leading '_'
# - PackagePlugin_.groovy -> PackagePlugin
# - PackagePlugin -> Package-Plugin
# - Package-Plugin -> package-plugin
basename $files \
| sed -E -e 's/^_?([^_]+)_?.groovy/\1/'\
-e 's/([a-z])([A-Z])/\1-\2/g' \
| tr "[:upper:]" "[:lower:]" \
| sort \
| uniq
scripts=()
for file in $files
do
# - Strip the path
# - Remove all scripts with a leading '_'
# - PackagePlugin_.groovy -> PackagePlugin
# - PackagePlugin -> Package-Plugin
# - Package-Plugin -> package-plugin
command=$(basename $file \
| sed -E -e 's/^_?([^_]+)_?.groovy/\1/'\
-e 's/([a-z])([A-Z])/\1-\2/g' \
| tr "[:upper:]" "[:lower:]" \
| sort \
| uniq)
scripts+=($command)
done
echo $scripts
}
_grails() {

View file

@ -3,7 +3,7 @@
# .jira-url in the current directory takes precedence
#
# If you use Rapid Board, set:
#JIRA_RAPID_BOARD="yes"
#JIRA_RAPID_BOARD="true"
# in you .zshrc
#
# Setup: cd to/my/project
@ -11,6 +11,13 @@
# Usage: jira # opens a new issue
# jira ABC-123 # Opens an existing issue
open_jira_issue () {
local open_cmd
if [[ $(uname -s) == 'Darwin' ]]; then
open_cmd='open'
else
open_cmd='xdg-open'
fi
if [ -f .jira-url ]; then
jira_url=$(cat .jira-url)
elif [ -f ~/.jira-url ]; then
@ -27,10 +34,10 @@ open_jira_issue () {
`open $jira_url/secure/CreateIssue!default.jspa`
else
echo "Opening issue #$1"
if [[ "x$JIRA_RAPID_BOARD" = "yes" ]]; then
`open $jira_url/issues/$1`
if [[ "x$JIRA_RAPID_BOARD" = "xtrue" ]]; then
$open_cmd "$jira_url/issues/$1"
else
`open $jira_url/browse/$1`
$open_cmd "$jira_url/browse/$1"
fi
fi
}

View file

@ -0,0 +1,58 @@
# Easily jump around the file system by manually adding marks
# marks are stored as symbolic links in the directory $MARKPATH (default $HOME/.marks)
#
# jump FOO: jump to a mark named FOO
# mark FOO: create a mark named FOO
# unmark FOO: delete a mark
# marks: lists all marks
#
export MARKPATH=$HOME/.marks
jump() {
cd -P "$MARKPATH/$1" 2>/dev/null || echo "No such mark: $1"
}
mark() {
if (( $# == 0 )); then
MARK=$(basename "$(pwd)")
else
MARK="$1"
fi
if read -q \?"Mark $(pwd) as ${MARK}? (y/n) "; then
mkdir -p "$MARKPATH"; ln -s "$(pwd)" "$MARKPATH/$MARK"
fi
}
unmark() {
rm -i "$MARKPATH/$1"
}
autoload colors
marks() {
for link in $MARKPATH/*(@); do
local markname="$fg[cyan]${link:t}$reset_color"
local markpath="$fg[blue]$(readlink $link)$reset_color"
printf "%s\t" $markname
printf "-> %s \t\n" $markpath
done
}
_completemarks() {
if [[ $(ls "${MARKPATH}" | wc -l) -gt 1 ]]; then
reply=($(ls $MARKPATH/**/*(-) | grep : | sed -E 's/(.*)\/([_\da-zA-Z\-]*):$/\2/g'))
else
if readlink -e "${MARKPATH}"/* &>/dev/null; then
reply=($(ls "${MARKPATH}"))
fi
fi
}
compctl -K _completemarks jump
compctl -K _completemarks unmark
_mark_expansion() {
setopt extendedglob
autoload -U modify-current-argument
modify-current-argument '$(readlink "$MARKPATH/$ARG")'
}
zle -N _mark_expansion
bindkey "^g" _mark_expansion

View file

@ -1,5 +1,10 @@
#compdef knife
# You can override the path to knife.rb and your cookbooks by setting
# KNIFE_CONF_PATH=/path/to/my/.chef/knife.rb
# KNIFE_COOKBOOK_PATH=/path/to/my/chef/cookbooks
# Read around where these are used for more detail.
# These flags should be available everywhere according to man knife
knife_general_flags=( --help --server-url --key --config --editor --format --log_level --logfile --no-editor --user --print-after --version --yes )
@ -26,7 +31,7 @@ _knife() {
case $state in
knifecmd)
compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" exec environment index node recipe role search ssh status windows $cloudproviders
compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" diff exec environment index node recipe role search ssh status upload windows $cloudproviders
;;
knifesubcmd)
case $words[2] in
@ -42,9 +47,12 @@ _knife() {
cookbook)
compadd -Q "$@" test list create download delete "metadata from" show "bulk delete" metadata upload
;;
environment)
diff)
_arguments '*:file or directory:_files -g "*"'
;;
environment)
compadd -Q "$@" list create delete edit show "from file"
;;
;;
node)
compadd -Q "$@" "from file" create show edit delete list run_list "bulk delete"
;;
@ -54,6 +62,9 @@ _knife() {
role)
compadd -Q "$@" "bulk delete" create delete edit "from file" list show
;;
upload)
_arguments '*:file or directory:_files -g "*"'
;;
windows)
compadd "$@" bootstrap
;;
@ -170,11 +181,13 @@ _chef_environments_remote() {
# The chef_x_local functions use the knife config to find the paths of relevant objects x to be uploaded to the server
_chef_cookbooks_local() {
local knife_rb="$HOME/.chef/knife.rb"
if [ -f ./.chef/knife.rb ]; then
knife_rb="./.chef/knife.rb"
fi
(for i in $( grep cookbook_path $knife_rb | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' ); do ls $i; done)
local knife_rb=${KNIFE_CONF_PATH:-${HOME}/.chef/knife.rb}
if [ -f ./.chef/knife.rb ]; then
knife_rb="./.chef/knife.rb"
fi
local cookbook_path=${KNIFE_COOKBOOK_PATH:-$(grep cookbook_path $knife_rb | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' )}
(for i in $cookbook_path; do ls $i; done)
}
# This function extracts the available cookbook versions on the chef server

View file

@ -0,0 +1,18 @@
function knife_ssh() {
grep -q $1 ~/.knife_comp~ 2> /dev/null || rm -f ~/.knife_comp~;
ssh $(knife node show $1 | awk '/IP:/{print $2}')
}
_knife_ssh() {
if hash knife 2>/dev/null; then
if [[ ! -f ~/.knife_comp~ ]]; then
echo "\nGenerating ~/.knife_comp~..." >/dev/stderr
knife node list > ~/.knife_comp~
fi
compadd $(<~/.knife_comp~)
else
echo "Could not find knife" > /dev/stderr;
fi
}
compdef _knife_ssh knife_ssh

View file

@ -9,7 +9,8 @@ local cache_file="$ZSH/cache/last-working-dir"
# Updates the last directory once directory is changed.
function chpwd() {
echo "$PWD" > "$cache_file"
# Use >| in case noclobber is set to avoid "file exists" error
pwd >| "$cache_file"
}
# Changes directory to the last working directory.

View file

@ -1,20 +1,63 @@
# Mercurial
alias hgc='hg commit'
alias hgb='hg branch'
alias hgba='hg branches'
alias hgbk='hg bookmarks'
alias hgco='hg checkout'
alias hgd='hg diff'
alias hged='hg diffmerge'
# pull and update
alias hgi='hg incoming'
alias hgl='hg pull -u'
alias hglr='hg pull --rebase'
alias hgo='hg outgoing'
alias hgp='hg push'
alias hgs='hg status'
# this is the 'git commit --amend' equivalent
alias hgca='hg qimport -r tip ; hg qrefresh -e ; hg qfinish tip'
function hg_current_branch() {
if [ -d .hg ]; then
echo hg:$(hg branch)
function in_hg() {
if [[ -d .hg ]] || $(hg summary > /dev/null 2>&1); then
echo 1
fi
}
}
function hg_get_branch_name() {
if [ $(in_hg) ]; then
echo $(hg branch)
fi
}
function hg_prompt_info {
if [ $(in_hg) ]; then
_DISPLAY=$(hg_get_branch_name)
echo "$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_HG_PROMPT_PREFIX\
$ZSH_THEME_REPO_NAME_COLOR$_DISPLAY$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_HG_PROMPT_SUFFIX$ZSH_PROMPT_BASE_COLOR$(hg_dirty)$ZSH_PROMPT_BASE_COLOR"
unset _DISPLAY
fi
}
function hg_dirty_choose {
if [ $(in_hg) ]; then
hg status 2> /dev/null | grep -Eq '^\s*[ACDIM!?L]'
if [ $pipestatus[-1] -eq 0 ]; then
# Grep exits with 0 when "One or more lines were selected", return "dirty".
echo $1
else
# Otherwise, no lines were found, or an error occurred. Return clean.
echo $2
fi
fi
}
function hg_dirty {
hg_dirty_choose $ZSH_THEME_HG_PROMPT_DIRTY $ZSH_THEME_HG_PROMPT_CLEAN
}
function hgic() {
hg incoming "$@" | grep "changeset" | wc -l
}
function hgoc() {
hg outgoing "$@" | grep "changeset" | wc -l
}

63
plugins/mix/_mix Normal file
View file

@ -0,0 +1,63 @@
#compdef mix
#autoload
# Elixir mix zsh completion
local -a _1st_arguments
_1st_arguments=(
'archive:Archive this project into a .ez file'
'clean:Clean generated application files'
'compile:Compile source files'
'deps:List dependencies and their status'
"deps.clean:Remove dependencies' files"
'deps.compile:Compile dependencies'
'deps.get:Get all out of date dependencies'
'deps.unlock:Unlock the given dependencies'
'deps.update:Update dependencies'
'do:Executes the commands separated by comma'
'escriptize:Generates an escript for the project'
'help:Print help information for tasks'
'local:List local tasks'
'local.install:Install a task or an archive locally'
'local.rebar:Install rebar locally'
'local.uninstall:Uninstall local tasks or archives'
'new:Creates a new Elixir project'
'run:Run the given file or expression'
"test:Run a project's tests"
'--help:Describe available tasks'
'--version:Prints the Elixir version information'
)
__task_list ()
{
local expl
declare -a tasks
tasks=(archive clean compile deps deps.clean deps.compile deps.get deps.unlock deps.update do escriptize help local local.install local.rebar local.uninstall new run test)
_wanted tasks expl 'help' compadd $tasks
}
local expl
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
_describe -t commands "mix subcommand" _1st_arguments
return
;;
(options)
case $line[1] in
(help)
_arguments ':feature:__task_list'
esac
;;
esac

View file

@ -0,0 +1,2 @@
# Allow SSH tab completion for mosh hostnames
compdef mosh=ssh

View file

@ -114,7 +114,7 @@ function listMavenCompletions {
# jboss
jboss:start jboss:stop jboss:deploy jboss:undeploy jboss:redeploy
# tomcat
tomcat:start tomcat:stop tomcat:deploy tomcat:undeploy tomcat:undeploy
tomcat:start tomcat:stop tomcat:deploy tomcat:undeploy tomcat:redeploy
# tomcat6
tomcat6:run tomcat6:run-war tomcat6:run-war-only tomcat6:stop tomcat6:deploy tomcat6:undeploy
# tomcat7
@ -163,7 +163,7 @@ function listMavenCompletions {
cli:execute cli:execute-phase
archetype:generate generate-sources
cobertura:cobertura
-Dtest= `if [ -d ./src ] ; then find ./src/test/java -type f -name '*.java' | grep -v svn | sed 's?.*/\([^/]*\)\..*?-Dtest=\1?' ; fi`
-Dtest= `if [ -d ./src/test/java ] ; then find ./src/test/java -type f -name '*.java' | grep -v svn | sed 's?.*/\([^/]*\)\..*?-Dtest=\1?' ; fi`
);
}

View file

@ -1,5 +1,13 @@
# Open the node api for your current version to the optional section.
# TODO: Make the section part easier to use.
function node-docs {
open "http://nodejs.org/docs/$(node --version)/api/all.html#all_$1"
# get the open command
local open_cmd
if [[ $(uname -s) == 'Darwin' ]]; then
open_cmd='open'
else
open_cmd='xdg-open'
fi
$open_cmd "http://nodejs.org/docs/$(node --version)/api/all.html#all_$1"
}

26
plugins/nvm/_nvm Normal file
View file

@ -0,0 +1,26 @@
#compdef nvm
#autoload
[[ -s ~/.nvm/nvm.sh ]] || return 0
local -a _1st_arguments
_1st_arguments=(
'help:show help'
'install:download and install a version'
'uninstall:uninstall a version'
'use:modify PATH to use version'
'run:run version with given arguments'
'ls:list installed versions or versions matching a given description'
'ls-remote:list remote versions available for install'
'deactivate:undo effects of NVM on current shell'
'alias:show or set aliases'
'unalias:deletes an alias'
'copy-packages:install global NPM packages to current version'
)
_arguments -C '*:: :->subcmds' && return 0
if (( CURRENT == 1 )); then
_describe -t commands "nvm subcommand" _1st_arguments
return
fi

View file

@ -0,0 +1,3 @@
# The addition 'nvm install' attempts in ~/.profile
[[ -s ~/.nvm/nvm.sh ]] && . ~/.nvm/nvm.sh

View file

@ -6,7 +6,7 @@
# ------------------------------------------------------------------------------
function tab() {
local command="cd \\\"$PWD\\\""
local command="cd \\\"$PWD\\\"; clear; "
(( $# > 0 )) && command="${command}; $*"
the_app=$(
@ -34,7 +34,7 @@ EOF
launch session "Default Session"
set current_session to current session
tell current_session
write text "${command}; clear;"
write text "${command}"
end tell
end tell
end tell
@ -154,3 +154,6 @@ function trash() {
IFS=$temp_ifs
}
function vncviewer() {
open vnc://$@
}

View file

@ -1,16 +1,13 @@
_phing_does_target_list_need_generating () {
if [ ! -f .phing_targets ]; then return 0;
else
accurate=$(stat -f%m .phing_targets)
changed=$(stat -f%m build.xml)
return $(expr $accurate '>=' $changed)
fi
[ ! -f .phing_targets ] && return 0;
[ .phing_targets -nt build.xml ] && return 0;
return 1;
}
_phing () {
if [ -f build.xml ]; then
if _phing_does_target_list_need_generating; then
phing -l |grep -v ":" |grep -v "^$"|grep -v "\-" > .phing_targets
phing -l |grep -v ":$" |grep -v "^-*$" > .phing_targets
fi
compadd `cat .phing_targets`
fi

View file

@ -6,8 +6,8 @@
_pip_all() {
# we cache the list of packages (originally from the macports plugin)
if (( ! $+piplist )); then
echo -n " (caching package index...)"
piplist=($(pip search * | cut -d ' ' -f 1 | tr '[A-Z]' '[a-z]'))
echo -n " (caching package index...)"
piplist=($(pip search * | cut -d ' ' -f 1 | tr '[A-Z]' '[a-z]'))
fi
}
@ -62,8 +62,13 @@ case "$words[1]" in
'(--no-install)--no-install[only download packages]' \
'(--no-download)--no-download[only install downloaded packages]' \
'(--install-option)--install-option[extra arguments to be supplied to the setup.py]' \
'(--single-version-externally-managed)--single-version-externally-managed[do not download/install dependencies. requires --record or --root]'\
'(--root)--root[treat this path as a fake chroot, installing into it. implies --single-version-externally-managed]'\
'(--record)--record[file to record all installed files to.]'\
'(-r --requirement)'{-r,--requirement}'[requirements file]: :_files'\
'(-e --editable)'{-e,--editable}'[path of or url to source to link to instead of installing.]: :_files -/'\
'1: :->packages' && return 0
if [[ "$state" == packages ]]; then
_pip_all
_wanted piplist expl 'packages' compadd -a piplist

42
plugins/pj/pj.plugin.zsh Normal file
View file

@ -0,0 +1,42 @@
#!/bin/zsh
#
# Original idea by DefV (Jan De Poorter)
# Source: https://gist.github.com/pjaspers/368394#comment-1016
#
# Usage:
# - Set `$PROJECT_PATHS` in your ~/.zshrc
# e.g.: PROJECT_PATHS=(~/src ~/work)
# - In ZSH you now can open a project directory with the command: `pj my-project`
# the plugin will locate the `my-project` directory in one of the $PROJECT_PATHS
# Also tab completion is supported.
# - `pjo my-project` will open the directory in $EDITOR
#
function pj() {
cmd="cd"
file=$1
if [[ "open" == "$file" ]] then
file=$2
cmd=(${(s: :)EDITOR})
fi
for project in $PROJECT_PATHS; do
if [[ -d $project/$file ]] then
$cmd "$project/$file"
unset project # Unset project var
return
fi
done
echo "No such project $1"
}
alias pjo="pj open"
function _pj () {
compadd `/bin/ls -l $PROJECT_PATHS 2>/dev/null | awk '{ print $9 }'`
}
compdef _pj pj

381
plugins/pod/_pod Normal file
View file

@ -0,0 +1,381 @@
#compdef pod
#autoload
# -----------------------------------------------------------------------------
# FILE: _pod
# DESCRIPTION: Cocoapods autocomplete plugin for Oh-My-Zsh
# http://cocoapods.org
# AUTHOR: Alexandre Joly (alexandre.joly@mekanics.ch)
# GITHUB: https://github.com/mekanics
# TWITTER: @jolyAlexandre
# VERSION: 0.0.1
# LICENSE: MIT
# -----------------------------------------------------------------------------
local -a _1st_arguments
_1st_arguments=(
'help:Show help for the given command.'
'install:Install project dependencies'
'ipc:Inter-process communication'
'list:List pods'
'outdated:Show outdated project dependencies'
'podfile-info:Shows information on installed Pods'
'push:Push new specifications to a spec-repo'
'repo:Manage spec-repositories'
'search:Searches for pods'
'setup:Setup the CocoaPods environment'
'spec:Manage pod specs'
'update:Update outdated project dependencies'
)
local -a _repo_arguments
_repo_arguments=(
'add:Add a spec repo'
'lint:Validates all specs in a repo'
'update:Update a spec repo'
)
local -a _spec_arguments
_spec_arguments=(
'cat:Prints a spec file'
'create:Create spec file stub'
'edit:Edit a spec file'
'lint:Validates a spec file'
'which:Prints the path of the given spec'
)
local -a _ipc_arguments
_ipc_arguments=(
'list:Lists the specifications know to CocoaPods'
'podfile:Converts a Podfile to YAML'
'repl:The repl listens to commands on standard input'
'spec:Converts a podspec to YAML'
'update-search-index:Updates the search index'
)
local -a _list_arguments
_list_arguments=(
'new:Lists pods introduced in the master spec-repo since the last check'
)
local -a _inherited_options
_inherited_options=(
'(--silent)--silent[Show nothing]' \
'(--version)--version[Show the version of CocoaPods]' \
'(--no-color)--no-color[Show output without color]' \
'(--verbose)--verbose[Show more debugging information]' \
'(--help)--help[Show help banner of specified command]'
)
local -a _install_options
_install_options=(
'(--no-clean)--no-clean[Leave SCM dirs like `.git` and `.svn` intact after downloading]' \
'(--no-integrate)--no-integrate[Skip integration of the Pods libraries in the Xcode project(s)]' \
'(--no-repo-update)--no-repo-update[Skip running `pod repo update` before install]'
)
local -a _update_options
_update_options=(
'(--no-clean)--no-clean[Leave SCM dirs like `.git` and `.svn intact after downloading]' \
'(--no-integrate)--no-integrate[Skip integration of the Pods libraries in the Xcode project(s)]' \
'(--no-repo-update)--no-repo-update[Skip running `pod repo update before install]'
)
local -a _outdated_options
_outdated_options=(
'(--no-repo-update)--no-repo-update[Skip running `pod repo update` before install]'
)
local -a _search_options
_search_options=(
'(--full)--full[Search by name, summary, and description]' \
'(--stats)--stats[Show additional stats (like GitHub watchers and forks)]' \
'(--ios)--ios[Restricts the search to Pods supported on iOS]' \
'(--osx)--osx[Restricts the search to Pods supported on OS X]'
)
local -a _list_options
_list_options=(
'(--update)--update[Run `pod repo update` before listing]'
)
local -a _podfile_info_options
_podfile_info_options=(
'(--all)--all[Show information about all Pods with dependencies that are used in a project]' \
'(--md)--md[Output information in Markdown format]' \
'*:script or directory:_files'
)
local -a _push_options
_push_options=(
'(--allow-warnings)--allow-warnings[Allows pushing even if there are warnings]' \
'(--local-only)--local-only[Does not perform the step of pushing REPO to its remote]' \
'*:script or directory:_files'
)
local -a _repo_lint_options
_repo_lint_options=(
'(--only-errors)--only-errors[Lint presents only the errors]'
)
local -a _setup_options
_setup_options=(
'(--push)--push[Use this option to enable push access once granted]'
)
local -a _spec_lint_options
_spec_lint_options=(
'(--quick)--quick[Lint skips checks that would require to download and build the spec]' \
'(--only-errors)--only-errors[Lint validates even if warnings are present]' \
'(--no-clean)--no-clean[Lint leaves the build directory intact for inspection]' \
'*:script or directory:_files'
)
local -a _spec_cat_options
_spec_cat_options=(
'(--show-all)--show-all[Pick from all versions of the given podspec]'
)
local -a _spec_which_options
_spec_which_options=(
'(--show-all)--show-all[Print all versions of the given podspec]'
)
local -a _spec_edit_options
_spec_edit_options=(
'(--show-all)--show-all[Pick which spec to edit from all available versions of the given podspec]'
)
__first_command_list ()
{
local expl
declare -a tasks
tasks=(install ipc list outdated podfile-info push repo search setup spec update)
_wanted tasks expl 'help' compadd $tasks
}
__repo_list() {
_wanted application expl 'repo' compadd $(command ls -1 ~/.cocoapods 2>/dev/null | sed -e 's/ /\\ /g')
}
__pod-repo() {
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
_describe -t commands "pod repo" _repo_arguments
return
;;
(options)
case $line[1] in
(lint)
_arguments \
$_inherited_options \
$_repo_lint_options \
':feature:__repo_list'
;;
(update)
_arguments \
$_inherited_options \
':feature:__repo_list'
;;
(add)
_arguments \
$_inherited_options
esac
;;
esac
}
__pod-spec() {
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
_describe -t commands "pod spec" _spec_arguments
return
;;
(options)
case $line[1] in
(create)
_arguments \
$_inherited_options
;;
(lint)
_arguments \
$_inherited_options \
$_spec_lint_options
;;
(cat)
_arguments \
$_inherited_options \
$_spec_cat_options
;;
(which)
_arguments \
$_inherited_options \
$_spec_which_options
;;
(edit)
_arguments \
$_inherited_options \
$_spec_edit_options
;;
esac
return
;;
esac
}
__pod-ipc() {
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
'*::options:->options'
case $state in
(command)
_describe -t commands "pod ipc" _ipc_arguments
return
;;
(options)
_arguments -C \
$_inherited_options
return
;;
esac
}
__pod-list() {
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
$_inherited_options \
$_list_options \
':command:->command' \
'*::options:->options'
case $state in
(command)
_describe -t commands "pod list" _list_arguments
return
;;
(options)
_arguments -C \
$_inherited_options \
$_list_options
return
;;
esac
}
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
$_inherited_options \
':command:->command' \
'*::options:->options'
case $state in
(command)
_describe -t commands "pod" _1st_arguments
return
;;
(options)
case $line[1] in
(help)
_arguments \
$_inherited_options \
':help:__first_command_list'
;;
(push)
_arguments \
$_inherited_options \
$_push_options \
':repo:__repo_list'
;;
(repo)
__pod-repo
;;
(spec)
__pod-spec
;;
(ipc)
__pod-ipc
;;
(list)
__pod-list
;;
(install)
_arguments \
$_inherited_options \
$_install_options
;;
(update)
_arguments \
$_inherited_options \
$_update_options
;;
(outdated)
_arguments \
$_inherited_options \
$_outdated_options
;;
(search)
_arguments \
$_inherited_options \
$_search_options
;;
(podfile-info)
_arguments \
$_inherited_options \
$_podfile_info_options
;;
(setup)
_arguments \
$_inherited_options \
$_setup_options
;;
esac
;;
esac

View file

@ -0,0 +1,6 @@
# Aliases to stop, start and restart Postgres
# Paths noted below are for Postgress installed via Homebrew on OSX
alias startpost='pg_ctl -D /usr/local/var/postgres -l /usr/local/var/postgres/server.log start'
alias stoppost='pg_ctl -D /usr/local/var/postgres stop -s -m fast'
alias restartpost='stoppost && sleep 1 && startpost'

55
plugins/powify/_powify Normal file
View file

@ -0,0 +1,55 @@
#compdef powify
_powify_all_servers() {
all_servers=(`ls $HOME/.pow/`)
}
local -a all_servers
local -a _1st_arguments
_1st_arguments=(
'server:server specific commands'
'utils:manage powify'
'create:creates a pow app from the current directory (to change the name append name as an argument)'
'destroy:destroys the pow app linked to the current directory'
'restart:restarts the pow app linked to the current directory'
'always_restart:reload the pow app after each request'
'always_restart_off:do not reload the pow app after each request'
'rename:rename the current pow app to [NAME] or renmae [OLD] to [NEW]'
'environment:run the this pow app in a different environment (aliased `env`)'
'browse:opens and navigates the default browser to this app'
'logs:tail the application logs'
)
_arguments '*:: :->command'
if (( CURRENT == 1 )); then
_describe -t commands "powify command" _1st_arguments
return
fi
case "$words[1]" in
server)
_values \
'install[install pow server]' \
'reinstall[reinstall pow server]' \
'update[update pow server]' \
'uninstall[uninstall pow server]' \
'list[list all pow apps]' \
'start[start the pow server]' \
'stop[stop the pow server]' \
'restart[restart the pow server]' \
'host[adds all pow apps to /etc/hosts file]' \
'unhost[removes all pow apps from /etc/hosts file]' \
'status[print the current server status]' \
'config[print the current server configuration]' \
'logs[tails the pow server logs]' ;;
utils)
_values \
'install[install powify.dev server management tool]' \
'reinstall[reinstall powify.dev server management tool]' \
'uninstall[uninstall powify.dev server management tool]' ;;
destroy|restart|always_restart|always_restart_off|rename|browse|logs)
_powify_all_servers
_wanted all_servers expl 'all pow servers' compadd -a all_servers ;;
esac

View file

@ -0,0 +1,12 @@
# You will probably want to list this plugin as the first in your .zshrc.
# This will look for a custom profile for the local machine and each domain or
# subdomain it belongs to. (e.g. com, example.com and foo.example.com)
parts=(${(s:.:)$(hostname)})
for i in {${#parts}..1}; do
profile=${(j:.:)${parts[$i,${#parts}]}}
file=$ZSH_CUSTOM/profiles/$profile
if [ -f $file ]; then
source $file
fi
done

View file

@ -10,6 +10,11 @@ alias migrate='rake db:migrate && rake db:test:prepare'
alias sc='ruby script/console'
alias sd='ruby script/server --debugger'
alias devlog='tail -f log/development.log'
alias testlog='tail -f log/test.log'
alias prodlog='tail -f log/production.log'
alias -g RET='RAILS_ENV=test'
alias -g REP='RAILS_ENV=production'
alias -g RED='RAILS_ENV=development'
function remote_console() {
/usr/bin/env ssh $1 "( cd $2 && ruby script/console production )"

56
plugins/rails3/_rails3 Normal file
View file

@ -0,0 +1,56 @@
#compdef rails
#autoload
# rails 3 zsh completion, based on homebrew completion
# Extracted from https://github.com/robbyrussell/oh-my-zsh/blob/30620d463850c17f86e7a56fbf6a8b5e793a4e07/plugins/rails3/_rails3
# Published by Christopher Chow
local -a _1st_arguments
_1st_arguments=(
'generate:Generate new code (short-cut alias: "g")'
'console:Start the Rails console (short-cut alias: "c")'
'server:Start the Rails server (short-cut alias: "s")'
'dbconsole:Start a console for the database specified in config/database.yml (short-cut alias: "db")'
'new:Create a new Rails application. "rails new my_app" creates a new application called MyApp in "./my_app"'
'application:Generate the Rails application code'
'destroy:Undo code generated with "generate"'
'benchmarker:See how fast a piece of code runs'
'profiler:Get profile information from a piece of code'
'plugin:Install a plugin'
)
_rails_generate_arguments() {
generate_arguments=(
controller
generator
helper
integration_test
mailer
migration
model
observer
performance_test
plugin
resource
scaffold
scaffold_controller
session_migration
stylesheets
)
}
_arguments \
'(--version)--version[show version]' \
'(--help)--help[show help]' \
'*:: :->subcmds' && return 0
if (( CURRENT == 1 )); then
_describe -t commands "rails subcommand" _1st_arguments
return
fi
case "$words[1]" in
generate)
_rails_generate_arguments
_wanted generate_arguments expl 'all generate' compadd -a generate_arguments ;;
esac

View file

@ -4,7 +4,11 @@ function _rails_command () {
if [ -e "script/server" ]; then
ruby script/$@
else
ruby script/rails $@
if [ -e "bin/rails" ]; then
bin/rails $@
else
rails $@
fi
fi
}
@ -19,5 +23,10 @@ alias ru='_rails_command runner'
alias rs='_rails_command server'
alias rsd='_rails_command server --debugger'
alias devlog='tail -f log/development.log'
alias testlog='tail -f log/test.log'
alias prodlog='tail -f log/production.log'
alias rdm='rake db:migrate'
alias rdr='rake db:rollback'
alias -g RET='RAILS_ENV=test'
alias -g REP='RAILS_ENV=production'
alias -g RED='RAILS_ENV=development'

View file

@ -0,0 +1,32 @@
# Rails 4 aliases
function _rails_command () {
if [ -e "script/server" ]; then
ruby script/$@
elif [ -e "script/rails" ]; then
ruby script/rails $@
else
ruby bin/rails $@
fi
}
alias rc='_rails_command console'
alias rd='_rails_command destroy'
alias rdb='_rails_command dbconsole'
alias rdbm='rake db:migrate db:test:clone'
alias rg='_rails_command generate'
alias rgm='_rails_command generate migration'
alias rp='_rails_command plugin'
alias ru='_rails_command runner'
alias rs='_rails_command server'
alias rsd='_rails_command server --debugger'
alias devlog='tail -f log/development.log'
alias testlog='tail -f log/test.log'
alias prodlog='tail -f log/production.log'
alias rdm='rake db:migrate'
alias rdc='rake db:create'
alias rdr='rake db:rollback'
alias rds='rake db:seed'
alias rlc='rake log:clear'
alias rn='rake notes'
alias rr='rake routes'

View file

@ -0,0 +1,17 @@
# Get a random quote fron the site http://www.quotationspage.com/random.php3
# Created by Eduardo San Martin Morote aka Posva
# http://posva.github.io
# Sun Jun 09 10:59:36 CEST 2013
# Don't remove this header, thank you
# Usage: quote
if [[ -x `which curl` ]]; then
function quote()
{
Q=$(curl -s --connect-timeout 2 "http://www.quotationspage.com/random.php3" | grep -m 1 "dt ")
TXT=$(echo "$Q" | sed -e 's/<\/dt>.*//g' -e 's/.*html//g' -e 's/^[^a-zA-Z]*//' -e 's/<\/a..*$//g')
W=$(echo "$Q" | sed -e 's/.*\/quotes\///g' -e 's/<.*//g' -e 's/.*">//g')
echo "\e[0;33m${W}\e[0;30m: \e[0;35m“${TXT}”\e[m"
}
#quote
fi

View file

@ -17,7 +17,7 @@ for rbenvdir in "${rbenvdirs[@]}" ; do
FOUND_RBENV=1
export RBENV_ROOT=$rbenvdir
export PATH=${rbenvdir}/bin:$PATH
eval "$(rbenv init - zsh)"
eval "$(rbenv init --no-rehash - zsh)"
alias rubies="rbenv versions"
alias gemsets="rbenv gemset list"
@ -32,11 +32,11 @@ for rbenvdir in "${rbenvdirs[@]}" ; do
function gems {
local rbenv_path=$(rbenv prefix)
gem list $@ | sed \
-Ee "s/\([0-9\.]+( .+)?\)/$fg[blue]&$reset_color/g" \
-Ee "s|$(echo $rbenv_path)|$fg[magenta]\$rbenv_path$reset_color|g" \
-Ee "s/$current_ruby@global/$fg[yellow]&$reset_color/g" \
-Ee "s/$current_ruby$current_gemset$/$fg[green]&$reset_color/g"
gem list $@ | sed -E \
-e "s/\([0-9a-z, \.]+( .+)?\)/$fg[blue]&$reset_color/g" \
-e "s|$(echo $rbenv_path)|$fg[magenta]\$rbenv_path$reset_color|g" \
-e "s/$current_ruby@global/$fg[yellow]&$reset_color/g" \
-e "s/$current_ruby$current_gemset$/$fg[green]&$reset_color/g"
}
function rbenv_prompt_info() {

79
plugins/rebar/_rebar Normal file
View file

@ -0,0 +1,79 @@
#compdef rebar
local curcontext=$curcontext state ret=1
typeset -ga _rebar_global_opts
_rebar_global_opts=(
'(--help -h)'{--help,-h}'[Show the program options]'
'(--commands -c)'{--commands,-c}'[Show available commands]'
'(--version -V)'{--version,-V}'[Show version information]'
'(-vvv -vv -v)'--verbose+'[Verbosity level. Default: 0]:verbosity level:(0 1 2 3)'
'(-vvv)-v[Slightly more verbose output]'
'(-vvv)-vv[More verbose output]'
'(-v -vv)-vvv[Most verbose output]'
'(--force -f)'{--force,-f}'[Force]'
'-D+[Define compiler macro]'
'(--jobs -j)'{--jobs+,-j+}'[Number of concurrent workers a command may use. Default: 3]:workers:(1 2 3 4 5 6 7 8 9)'
'(--config -C)'{--config,-C}'[Rebar config file to use]:files:_files'
'(--profile -p)'{--profile,-p}'[Profile this run of rebar]'
'(--keep-going -k)'{--keep-going,-k}'[Keep running after a command fails]'
)
_rebar () {
_arguments -C $_rebar_global_opts \
'*::command and variable:->cmd_and_var' \
&& return
case $state in
cmd_and_var)
_values -S = 'variables' \
'clean[Clean]' \
'compile[Compile sources]' \
'create[Create skel based on template and vars]' \
'create-app[Create simple app skel]' \
'create-node[Create simple node skel]' \
'list-template[List avaiavle templates]' \
'doc[Generate Erlang program documentation]' \
'check-deps[Display to be fetched dependencies]' \
'get-deps[Fetch dependencies]' \
'update-deps[Update fetched dependencies]' \
'delete-deps[Delete fetched dependencies]' \
'list-deps[List dependencies]' \
'generate[Build release with reltool]' \
'overlay[Run reltool overlays only]' \
'generate-appups[Generate appup files]' \
'generate-upgrade[Build an upgrade package]' \
'eunit[Run eunit tests]' \
'ct[Run common_test suites]' \
'qc[Test QuickCheck properties]' \
'xref[Run cross reference analysis]' \
'help[Show the program options]' \
'version[Show version information]' \
'apps[Application names to process]:' \
'case[Common Test case]:' \
'dump_spec[Dump reltool spec]:' \
'jobs[Number of workers]::workers:(0 1 2 3 4 5 6 7 8 9)' \
'suites[Common Test suites]::suite name:_path_files -W "(src test)" -g "*.erl(:r)"' \
'verbose[Verbosity level]::verbosity level:(0 1 2 3)' \
'appid[Application id]:' \
'previous_release[Previous release path]:' \
'nodeid[Node id]:' \
'root_dir[Reltool config root directory]::directory:_files -/' \
'skip_deps[Skip deps]::flag:(true false)' \
'skip_apps[Application names to not process]::flag:(true false)' \
'template[Template name]:' \
'template_dir[Template directory]::directory:_files -/' \
&& ret=0
;;
esac
}
_rebar
# Local variables:
# mode: shell-script
# sh-basic-offset: 2
# sh-indent-comment: t
# indent-tabs-mode: nil
# End:
# ex: sw=2 ts=2 et filetype=sh

7
plugins/repo/README.md Normal file
View file

@ -0,0 +1,7 @@
## repo
**Maintainer:** [Stibbons](https://github.com/Stibbons)
This plugin mainly add support automatic completion for the repo command line tool:
http://code.google.com/p/git-repo/
* `r` aliases `repo`

272
plugins/repo/_repo Normal file
View file

@ -0,0 +1,272 @@
#compdef repo
__git_apply_whitespace_strategies ()
{
declare -a strategies
strategies=(
'nowarn:turn off the trailing-whitespace warning'
'warn:output trailing-whitespace warning, but apply patch'
'fix:output trailing-whitespace warning and strip trailing whitespace'
'error:output trailing-whitespace warning and refuse to apply patch'
'error-all:same as "error", but output warnings for all files')
_describe -t strategies 'trailing-whitespace resolution strategy' strategies $*
}
_repo()
{
local context state state_descr line curcontext="$curcontext"
typeset -A opt_args
local ret=1
_arguments -C \
'(- 1 *)--help[show usage]'\
'1:command:->command'\
'*::args:->args' && ret=0
case $state in
(command)
repo list 2> /dev/null > /dev/null
if [[ $? == 0 ]]; then
local commands;
commands=(
'abandon:Permanently abandon a development branch'
'branch:View current topic branches'
'branches:View current topic branches'
'checkout:Checkout a branch for development'
'cherry-pick:Cherry-pick a change.'
'diff:Show changes between commit and working tree'
'download:Download and checkout a change'
'forall:execute command on several project'
'grep:Print lines matching a pattern'
'help:Display detailed help on a command'
'init:Initialize repo in the current directory'
'list:List projects and their associated directories'
'manifest:Manifest inspection utility'
'overview:Display overview of unmerged project branches'
'prune:Prune (delete) already merged topics'
'rebase:Rebase local branches on upstream branch'
'selfupdate:Update repo to the latest version'
'smartsync:Update working tree to the latest known good revision'
'stage:Stage file(s) for commit'
'start:Start a new branch for development'
'status:Show the working tree status'
'sync:Update working tree to the latest revision'
'upload:Upload changes for code review'
'version:Display the version of repo'
)
_describe -t commands 'command' commands && ret=0
else
local commands;
commands=(
'init:Install repo in the current working directory'
'help:Display detailed help on a command'
)
_describe -t commands 'command' commands && ret=0
fi
;;
(args)
case $words[1] in
(branch | branches)
# TODO : list available projects and add them in list to feed compadd with
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
': :__repo_projects' \
&& ret=0
;;
(abandon)
# TODO : list available projects and add them in list to feed compadd with
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
':branch name:__repo_branch' \
': :__repo_projects'\
&& ret=0
;;
(checkout)
# TODO : list available projects and add them in list to feed compadd with
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
':branch name:__repo_branch' \
': :__repo_projects'\
&& ret=0
;;
(init)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
"(-q --quiet)"{-q,--quiet}"[be quiet]" \
"(-u --manifest-url)"{-u,--manifest-url=}"[manifest repository location]":url:__repo_url_prompt \
"(-b --manifest-branch)"{-b,--manifest-branch=}"[manifest branch or revision]":branch:__repo_branch\
"(-m --manifest-name)"{-m,--manifest-name=}"[initial manifest file]":manifest_name:__repo_manifest_name\
"(--mirror)--mirror[mirror the forrest]"\
"(--reference)--reference=[location of mirror directory]":dir:_dirs\
"(--depth)--depth=[create a shallow clone with given depth; see git clone]":depth:__repo_depth_prompt\
"(-g --group=)"{-g,--group=}"[restrict manifest projects to ones with a specified group]":group:_group\
"(-p --platform=)"{-p,--platform=}"[restrict manifest projects to ones with a specified platform group(auto|all|none|linux|darwin|...)]":platform:"(auto all none linux darwin)"\
"(--repo-url)--repo-url=[repo repository location]":url:__repo_url_prompt\
"(--repo-branch)--repo-branch[repo branch or revision]":branch_or_rev:__repo__repo_branch_or_rev\
"(--no-repo-verify)--no-repo-verify[do not verify repo source code]"\
"(--config-name)--config-name[Always prompt for name/e-mail]"\
&& ret=0
;;
(start)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
"(--all)--all=[begin branch in all projects]"\
':branch name:__repo_new__repo_branch_name' \
':projects:__repo_projects_or_all' \
&& ret=0
;;
(rebase)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
"(-i --interactive)"{-i,--interactive}"[interactive rebase (single project only)]: :__repo_projects" \
"(-f --force-rebase)"{-f,--force-rebase}"[Pass --force-rebase to git rebase]" \
"(--no-ff)--no-ff=[Pass --no-ff to git rebase]"\
"(-q --quiet)"{-q,--quiet}"[Pass --quiet to git rebase]" \
"(--autosquash)--no-ff[Pass --autosquash to git rebase]"\
"(--whitespace=)--whitespace=[Pass --whitespace to git rebase]: :__git_apply_whitespace_strategies"\
"(--auto-stash)--auto-stash[Stash local modifications before starting]"\
&& ret=0
;;
(checkout)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
':branch name:__git_branch_names' \
':projects:__repo_projects' \
&& ret=0
;;
(list)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
&& ret=0
;;
(status)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
"(-j --jobs)"{-j,--jobs}"[number of projects to check simultaneously]" \
':projects:__repo_projects' \
&& ret=0
;;
(sync)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
"(--no-force-broken)--no-force-broken[stop sync if a project fails to sync (probably because of permissions)]" \
"(-l --local-only)"{-l,--local-only}"[only update working tree, don't fetch]" \
"(-n --network-only)"{-n,--network-branch}"[fetch only, don't update working tree]" \
"(-d --detach)"{-d,--detach}"[detach projects back to manifest revision]" \
"(-c --current-branch)"{-c,--current-branch}"[fetch only current branch from server]" \
"(-q --quiet)"{-q,--quiet}"[be more quiet]" \
"(-j --jobs=)"{-j,--jobs=}"[projects to fetch simultaneously (default 1) (limited to 5)]:projects to fetch simultaneously (default 1) (limited to 5)" \
"(-m --manifest-name=)"{-m,--manifest-name=}"[temporary manifest to use for this sync]:manifest xml file:_files -g *.xml" \
"(--no-clone-bundle)--no-clone-bundle[disable use of /clone.bundle on HTTP/HTTPS]" \
"(-s --smart-sync)"{-s,--smart-sync=}"[smart sync using manifest from a known tag]:tag:" \
'(--no-repo-verify)--no-repo-verify[do not verify repo source code]' \
': :__repo_projects' \
&& ret=0
;;
(upload)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
"(-t)-t[Send local branch name to Gerrit Code Review]" \
"(--re= --reviewers=)"{--re=,--reviewers=}"[Request reviews from these people]:Request reviews from these people:" \
"(--cc=)--cc=[Also send email to these email addresses.]:email addresses:_email_addresses" \
"(--br=)--br=[Branch to upload.]:branch:__repo_branch" \
"(--cbr --current-branch)"{--cbr,--current-branch}"[Upload current git branch]" \
"(-d --draft)"{-d,--draft}"[If specified, upload as a draft.]" \
"(--verify --no-verify)--no-verify[Do not run the upload hook.]" \
'(--verify --no-verify)--verify[Run the upload hook without prompting]' \
': :__repo_projects' \
&& ret=0
;;
(forall)
_arguments : \
"(-h --help)"{-h,--help}"[Show help]" \
"(-v --verbose)"{-v,--verbose}"[Show command error messages]" \
'(-p)-p[Show project headers before output]' \
': :__repo_projects_mandatory' \
"(-c --command -h --help -v --verbose -p)"{-c,--command}"[Command (and arguments) to execute]" \
&& ret=0
;;
*)
ret=0
esac
;;
esac
return $ret
}
__repo_reviewers()
{
# _message -e url 'reviewers'
}
__repo_url_prompt()
{
_message -e url 'url'
}
__repo_manifest_name()
{
_message -e manifest_name 'manifest name'
}
_group()
{
_message -e group 'group'
}
__repo_branch()
{
#_message -e branch 'Repo branch'
branches=($(repo branches| cut -c4- | grep '|' | cut -d' ' -f1))
_describe -t branches 'Select repo branch' branches
}
__repo__repo_branch_or_rev()
{
_message -e branch_or_rev 'repo branch or revision'
}
__repo_depth_prompt()
{
_message -e depth 'depth'
}
__repo_projects()
{
_message -e depth 'Optional option : <projects>...'
projects=($(repo list | cut -d' ' -f1))
_describe -t projects 'Select projects (keep empty for selecting all projects)' projects
}
__repo_projects_mandatory()
{
projects=($(repo list | cut -d' ' -f1))
#_describe -t projects 'Select projects to apply commands' projects
_values -s ' ' "Select projects to apply commands" $projects
}
__repo_new__repo_branch_name()
{
branches=($(repo branches| cut -c4- | grep '|' | cut -d' ' -f1))
_describe "" branches
_message -e "branch name" 'Enter new branch name or select an existing repo branch'
}
__repo_projects_or_all()
{
#_message -e depth '[--all | <project>...]'
projects=(--all $(repo list | cut -d' ' -f1))
_describe -t projects 'Select projects or --all' projects
_describe -t --all 'All projects'
}
_repo "$@"
return $?

View file

@ -0,0 +1,2 @@
# Aliases
alias r='repo'

View file

@ -3,8 +3,9 @@ fpath=($rvm_path/scripts/zsh/Completion $fpath)
alias rubies='rvm list rubies'
alias gemsets='rvm gemset list'
local ruby18='ruby-1.8.7-p334'
local ruby19='ruby-1.9.3-p194'
local ruby18='ruby-1.8.7'
local ruby19='ruby-1.9.3'
local ruby20='ruby-2.0.0'
function rb18 {
if [ -z "$1" ]; then
@ -28,9 +29,19 @@ function rb19 {
_rb19() {compadd `ls -1 $rvm_path/gems | grep "^$ruby19@" | sed -e "s/^$ruby19@//" | awk '{print $1}'`}
compdef _rb19 rb19
function rb20 {
if [ -z "$1" ]; then
rvm use "$ruby20"
else
rvm use "$ruby20@$1"
fi
}
_rb20() {compadd `ls -1 $rvm_path/gems | grep "^$ruby20@" | sed -e "s/^$ruby20@//" | awk '{print $1}'`}
compdef _rb20 rb20
function rvm-update {
rvm get head
rvm reload # TODO: Reload rvm completion?
}
# TODO: Make this usable w/o rvm.

View file

@ -0,0 +1,54 @@
# Code from Mikael Magnusson: http://www.zsh.org/mla/users/2011/msg00367.html
#
# Requires xterm, urxvt, iTerm2 or any other terminal that supports bracketed
# paste mode as documented: http://www.xfree86.org/current/ctlseqs.html
# create a new keymap to use while pasting
bindkey -N paste
# make everything in this keymap call our custom widget
bindkey -R -M paste "^@"-"\M-^?" paste-insert
# these are the codes sent around the pasted text in bracketed
# paste mode.
# do the first one with both -M viins and -M vicmd in vi mode
bindkey '^[[200~' _start_paste
bindkey -M paste '^[[201~' _end_paste
# insert newlines rather than carriage returns when pasting newlines
bindkey -M paste -s '^M' '^J'
zle -N _start_paste
zle -N _end_paste
zle -N zle-line-init _zle_line_init
zle -N zle-line-finish _zle_line_finish
zle -N paste-insert _paste_insert
# switch the active keymap to paste mode
function _start_paste() {
bindkey -A paste main
}
# go back to our normal keymap, and insert all the pasted text in the
# command line. this has the nice effect of making the whole paste be
# a single undo/redo event.
function _end_paste() {
#use bindkey -v here with vi mode probably. maybe you want to track
#if you were in ins or cmd mode and restore the right one.
bindkey -e
LBUFFER+=$_paste_content
unset _paste_content
}
function _paste_insert() {
_paste_content+=$KEYS
}
function _zle_line_init() {
# Tell terminal to send escape codes around pastes.
[[ $TERM == rxvt-unicode || $TERM == xterm || $TERM = xterm-256color || $TERM = screen || $TERM = screen-256color ]] && printf '\e[?2004h'
}
function _zle_line_finish() {
# Tell it to stop when we leave zle, so pasting in other programs
# doesn't get the ^[[200~ codes around the pasted text.
[[ $TERM == rxvt-unicode || $TERM == xterm || $TERM = xterm-256color || $TERM = screen || $TERM = screen-256color ]] && printf '\e[?2004l'
}

55
plugins/sbt/_sbt Normal file
View file

@ -0,0 +1,55 @@
#compdef sbt
#autoload
local -a _sbt_commands
_sbt_commands=(
'clean:delete files produced by the build'
'compile:compile sources'
'console:start the Scala REPL with project classes on the classpath'
'console-quick:start the Scala REPL with project deps on the classpath'
'console-project:start the Scala REPL w/sbt+build-def on the classpath'
'dist:generate distribution artifacts'
'dist\:clean:clean distribution artifacts'
'doc:generate API documentation'
'gen-idea:generate Intellij Idea project files'
'package:produce the main artifact, such as a binary jar'
'package-doc:produce a doc artifact, such as a jar containing API docs'
'package-src:produce a source artifact, such as a jar containing sources'
'publish:publish artifacts to a repository'
'publish-local:publish artifacts to the local repository'
'run:run a main class'
'run-main:run the main class selected by the first argument'
'test:execute all tests'
'test-only:execute the tests provided as arguments'
'test-quick:execute previously failed tests'
'update:resolve and optionally retrieve dependencies'
)
local expl
_arguments \
'(-help)-h[prints an help message]' \
'(-h)-help[prints an help message]' \
'(-verbose)-v[this runner is chattier]' \
'(-v)-verbose[this runner is chattier]' \
'(-debug)-d[set sbt log level to debug]' \
'(-d)-debug[set sbt log level to debug]' \
'-no-colors[disable ANSI color codes]' \
'-sbt-create[start even if current dir contains no sbt project]' \
'-sbt-dir[path to global settings/plugins dir (default: ~/.sbt)]' \
'-sbt-boot[path to shared boot dir (default: ~/.sbt/boot)]' \
'-ivy[path to local Ivy repository (default: ~/.ivy2)]' \
'-mem[set memory options]' \
'-no-share[use all local caches; no sharing]' \
'-no-global[use global caches, but do not use global ~/.sbt dir]' \
'-jvm-debug[turn on JVM debugging, open at the given port]' \
'-batch[disable interactive mode]' \
'-sbt-version[use the specified version of sbt]' \
'-sbt-jar[use the specified jar as the sbt launcher]' \
'(-sbt-snapshot)-sbt-rc[use an RC version of sbt]' \
'(-sbt-rc)-sbt-snapshot[use a snapshot version of sbt]' \
'-java-home[alternate JAVA_HOME]' \
'*:: :->subcmds' && return 0
_describe -t commands "sbt subcommand" _sbt_commands
return

View file

@ -0,0 +1,23 @@
# ------------------------------------------------------------------------------
# FILE: sbt.plugin.zsh
# DESCRIPTION: oh-my-zsh plugin file.
# AUTHOR: Mirko Caserta (mirko.caserta@gmail.com)
# VERSION: 1.0.2
# ------------------------------------------------------------------------------
# aliases - mnemonic: prefix is 'sb'
alias sbc='sbt compile'
alias sbco='sbt console'
alias sbcq='sbt console-quick'
alias sbcl='sbt clean'
alias sbcp='sbt console-project'
alias sbd='sbt doc'
alias sbdc='sbt dist:clean'
alias sbdi='sbt dist'
alias sbgi='sbt gen-idea'
alias sbp='sbt publish'
alias sbpl='sbt publish-local'
alias sbr='sbt run'
alias sbrm='sbt run-main'
alias sbu='sbt update'
alias sbx='sbt test'

249
plugins/scala/_scala Normal file
View file

@ -0,0 +1,249 @@
#compdef scala scalac
# ------------------------------------------------------------------------------
# Copyright (c) 2012 Github zsh-users - http://github.com/zsh-users
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the zsh-users nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
# Description
# -----------
#
# Completion script for scala and scalac (http://www.scala-lang.org/).
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
# * Tony Sloane <inkytonik@gmail.com>
#
# ------------------------------------------------------------------------------
typeset -A opt_args
local context state line
_scala_features () {
compadd "postfixOps" "reflectiveCalls" "implicitConversions" "higherKinds" \
"existentials" "experimental.macros" "_"
}
_scala_phases () {
compadd "parser" "namer" "packageobjects" "typer" "patmat" "superaccessors" \
"extmethods" "pickler" "refchecks" "selectiveanf" "selectivecps" "uncurry" \
"tailcalls" "specialize" "explicitouter" "erasure" "posterasure" "lazyvals" \
"lambdalift" "constructors" "flatten" "mixin" "cleanup" "icode" "inliner" \
"inlineExceptionHandlers" "closelim" "dce" "jvm" "terminal"
}
local -a shared_opts
shared_opts=(
"-bootclasspath+[Override location of bootstrap class files]:bootstrap class directory:_files -/"
"-classpath+[Specify where to find user class files]:directory:_files -/"
"-D-[Pass -Dproperty=value directly to the runtime system]"
"-d+[Destination for generated classfiles]: directory or jar file:_files"
"-dependencyfile+[Set dependency tracking file]:dependency tracking file:_files"
"-deprecation[Emit warning and location for usages of deprecated APIs]"
"-encoding+[Specify character encoding used by source files]:encoding:"
"-explaintypes[Explain type errors in more detail]"
"-extdirs+[Override location of installed extensions]:extensions directory:_files -/"
"-g\:-[Set level of generated debugging info (default\: vars)]:debugging info level:(none source line vars notailcalls)"
"-help[Print a synopsis of standard options]"
"-J-[pass argument directly to Java runtime system]:JVM argument:"
"-javabootclasspath+[Override java boot classpath]:Java boot class path directory]:_files -/"
"-javaextdirs+[Override java extdirs classpath]:Java extdirs directory:_files -/"
"-language\:-[Enable one or more language features]:feature:_scala_features"
"-no-specialization[Ignore @specialize annotations]"
"-nobootcp[Do not use the boot classpath for the scala jars]"
"-nowarn[Generate no warnings]"
"-optimise[Generate faster bytecode by applying optimisations to the program]"
"-P\:-[Pass an option to a plugin (written plugin\:opt)]:plugin option:"
"-print[Print program with Scala-specific features removed]"
"-sourcepath+[Specify location(s) of source files]:source file directory:_files -/"
"-target\:-[Target platform for object files (default\: jvm-1.5)]:platform name:(jvm-1.5 msil)"
"-toolcp+[Add to the runner classpath]:directory:_files -/"
"-unchecked[Enable detailed unchecked (erasure) warnings]"
"-uniqid[Uniquely tag all identifiers in debugging output]"
"-usejavacp[Utilize the java.class.path in classpath resolution]"
"-verbose[Output messages about what the compiler is doing]"
"-version[Print product version and exit]"
"-X[Print a synopsis of advanced options]"
"-Y[Print a synopsis of private options]"
)
local -a X_opts
X_opts=(
"-Xcheck-null[Warn upon selection of nullable reference]"
"-Xcheckinit[Wrap field accessors to throw an exception on uninitialized access]"
"-Xdisable-assertions[Generate no assertions or assumptions]"
"-Xelide-below+[Calls to @elidable methods are omitted if method priority is lower than integer argument]"
"-Xexperimental[Enable experimental extensions]"
"-Xfatal-warnings[Fail the compilation if there are any warnings]"
"-Xfull-lubs[Retains pre 2.10 behavior of less aggressive truncation of least upper bounds]"
"-Xfuture[Turn on future language features]"
"-Xgenerate-phase-graph+[Generate the phase graphs (outputs .dot files) to fileX.dot]:output file:_files"
"-Xlint[Enable recommended additional warnings]"
"-Xlog-free-terms[Print a message when reification creates a free term]"
"-Xlog-free-types[Print a message when reification resorts to generating a free type]"
"-Xlog-implicits[Show more detail on why some implicits are not applicable]"
"-Xlog-implicit-conversions[Print a message whenever an implicit conversion is inserted]"
"-Xlog-reflective-calls[Print a message when a reflective method call is generated]"
"-Xmacro-settings\:-[Custom settings for macros]:option"
"-Xmain-class+[Class for manifest's Main-Class entry (only useful with -d jar)]:path:"
"-Xmax-classfile-name+[Maximum filename length for generated classes]"
"-Xmigration[Warn about constructs whose behavior may have changed]"
"-Xno-forwarders[Do not generate static forwarders in mirror classes]"
"-Xno-patmat-analysis[Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation]"
"-Xno-uescape[Disable handling of \u unicode escapes]"
"-Xnojline[Do not use JLine for editing]"
"-Xoldpatmat[Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10]"
"-Xprint\:-[Print out program after <phase>]:phase name:_scala_phases"
"-Xprint-icode\:-[Log internal icode to *.icode files after phase (default\: icode)]:phase name:_scala_phases"
"-Xprint-pos[Print tree positions, as offsets]"
"-Xprint-types[Print tree types (debugging option)]"
"-Xprompt[Display a prompt after each error (debugging option)]"
"-Xresident[Compiler stays resident: read source filenames from standard input]"
"-Xscript+[Treat the source file as a script and wrap it in a main method]:main object name"
"-Xshow-class+[Show internal representation of class]:class name"
"-Xshow-object+[Show internal representation of object]:object name"
"-Xshow-phases[Print a synopsis of compiler phases]"
"-Xsource-reader+[Specify a class name for a custom method of reading source files]:class name"
"-Xverify[Verify generic signatures in generated bytecode]"
"-Xassem-extdirs+[List of directories containing assemblies (requires -target:msil) (default\: lib)]:assembly directory:_files -/"
"-Xassem-name+[Name of the output assembly (requires -target:msil)]:assembly name:_files"
"-Xassem-path+[List of assemblies referenced by the program (requires -target:msil)]:assembly path:_files"
"-Xsourcedir+[Mirror source folder structure in output directory (requires -target:msil)]:source directory:_files -/"
"-Xplugin\:-[Load one or more plugins from file]:plugin file:_files"
"-Xpluginsdir+[Path to search compiler plugins]:plugin directory:_files -/"
"-Xplugin-list[Print a synopsis of loaded plugins]"
"-Xplugin-disable\:-[Disable the given plugin(s)]"
"-Xplugin-require\:-[Abort unless the given plugin(s) are available]"
)
local -a Y_opts
Y_opts=(
"-Y[Print a synopsis of private options]"
"-Ybuild-manager-debug[Generate debug information for the Refined Build Manager compiler]"
"-Ybuilder-debug\:-[Compile using the specified build manager (default\: none)]:build manager:(none refined simple)"
"-Yclosure-elim[Perform closure elimination]"
"-Ycompact-trees[Use compact tree printer when displaying trees]"
"-Ydead-code[Perform dead code elimination]"
"-Ydependent-method-types[Allow dependent method types]"
"-Ydump-classes+[Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders)]:output directory:_files -/"
"-Yeta-expand-keeps-star[Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.]"
"-Ygen-javap+[Generate a parallel output directory of .javap files]:output directory:_files -/"
"-Yinfer-argument-types[Infer types for arguments of overriden methods]"
"-Yinline[Perform inlining when possible]"
"-Yinline-handlers[Perform exception handler inlining when possible]"
"-Yinline-warnings[Emit inlining warnings (normally surpressed due to high volume)]"
"-Yinvalidate+[Invalidate classpath entry before run]:classpath entry"
"-Ylinearizer\:-[Linearizer to use (default\: rpo)]:linearizer:(normal dfs rpo dump)"
"-Ylog-classpath[Output information about what classpath is being applied]"
"-Yno-adapted-args[Do not adapt an argument list (either by inserting unit or creating a tuple) to match the receiver]"
"-Ymacro-debug-lite[Trace essential macro-related activities]"
"-Ymacro-debug-verbose[Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions]"
"-Yno-completion[Disable tab-completion in the REPL]"
"-Yno-generic-signatures[Suppress generation of generic signatures for Java]"
"-Yno-imports[Compile without any implicit imports]"
"-Yno-predef[Compile without importing Predef]"
"-Yno-self-type-checks[Suppress check for self-type conformance among inherited members]"
"-Yno-squeeze[Disable creation of compact code in matching]"
"-Ynotnull[Enable (experimental and incomplete) scala.NotNull]"
"-Yoverride-objects[Allow member objects to be overridden]"
"-Yoverride-vars[Allow vars to be overridden]"
"-Ypmat-naive[Desugar matches as naively as possible]"
"-Ypresentation-delay+[Wait number of ms after typing before starting typechecking]"
"-Ypresentation-log+[Log presentation compiler events into file]:log file:_files"
"-Ypresentation-replay+[Replay presentation compiler events from file]:log file:_files"
"-Ypresentation-strict[Do not report type errors in sources with syntax errors]"
"-Ypresentation-verbose[Print information about presentation compiler tasks]"
"-Yprofile-class+[Specify name of profiler class]:profiler class name"
"-Yprofile-memory[Heap snapshot after compiler run (requires jgpagent on JVM -agentpath)]"
"-Yrangepos[Use range positions for syntax trees]"
"-Yrecursion+[Set recursion depth used when locking symbols]"
"-Yreify-copypaste[Dump the reified trees in copypasteable representation]"
"-Yrepl-sync[Do not use asynchronous code for REPL startup]"
"-Yresolve-term-conflict\:-[Resolve term conflicts (default\: error)]:resolution strategy:(package object error)"
"-Yself-in-annots[Include a \"self\" identifier inside of annotations]"
"-Yshow\:-[Show after <phase> (requires -Xshow-class or -Xshow-object)]:phase name:_scala_phases"
"-Yshow-syms[Print the AST symbol hierarchy after each phase]"
"-Yshow-symkinds[Print abbreviated symbol kinds next to symbol names]"
"-Yshow-trees[Print detailed ASTs (requires -Xprint\:phase)]"
"-Yshow-trees-compact[Print detailed ASTs in compact form (requires -Xprint\:)]"
"-Yshow-trees-stringified[Print stringifications along with detailed ASTs (requires -Xprint\:)]"
"-Ystatistics[Print compiler statistics]"
"-Ystruct-dispatch\:-[Structural method dispatch policy (default\: poly-cache)]:policy name:(no-cache mono-cache poly-cache invoke-dynamic)"
"-Ybrowse\:-[Browse the abstract syntax tree after <phase>]:phase name:_scala_phases"
"-Ycheck\:-[Check the tree at the end of <phase>]:phase name:_scala_phases"
"-Ylog\:-[Log operations during <phase>]:phase name:_scala_phases"
"-Yprofile\:-[Profile CPU usage of given phases (requires jgpagent on JVM -agentpath)]:phase name:_scala_phases"
"-Yskip\:-[Skip <phase>]:phase name:_scala_phases"
"-Ystop-after\:-[Stop after given phase <phase>]:phase name:_scala_phases"
"-Ystop-before\:-[Stop before given phase <phase>]:phase name:_scala_phases"
"-Ywarn-adapted-args[Warn if an argument list is modified to match the receiver]"
"-Ywarn-all[Enable all -Y warnings]"
"-Ywarn-dead-code[Warn when dead code is identified]"
"-Ywarn-inaccessible[Warn about inaccessible types in method signatures]"
"-Ywarn-nullary-override[Warn when non-nullary overrides nullary, e.g. def foo() over def foo]"
"-Ywarn-nullary-unit[Warn when nullary methods return Unit]"
"-Ywarn-numeric-widen[Warn when numerics are widened]"
"-Ywarn-value-discard[Warn when non-Unit expression results are unused]"
"-Ybuild-manager-debug[Generate debug information for the Refined Build Manager compiler]"
"-Ybuilder-debug\:-[Compile using the specified build manager (default\: none)]:manager:(none refined simple)"
"-Ycompletion-debug[Trace all tab completion activity]"
"-Ydebug[Increase the quantity of debugging output]"
"-Ydoc-debug[Trace all scaladoc activity]"
"-Yide-debug[Generate, validate and output trees using the interactive compiler]"
"-Yinfer-debug[Trace type inference and implicit search]"
"-Yissue-debug[Print stack traces when a context issues an error]"
"-Ypatmat-debug[Trace pattern matching translation]"
"-Ypmat-debug[Trace all pattern matcher activity]"
"-Ypos-debug[Trace position validation]"
"-Ypresentation-debug[Enable debugging output for the presentation compiler]"
"-Yreify-debug[Trace reification]"
"-Yrepl-debug[Trace all REPL activity]"
"-Ytyper-debug[Trace all type assignments]"
)
local -a scala_opts
scala_opts=(
"-e+[execute <string> as if entered in the repl]:string" \
"-howtorun+[what to run (default\: guess)]:execution mode:(script object jar guess)" \
"-i+[preload <file> before starting the repl]:file to preload:_files" \
"-nc[no compilation daemon\: do not use the fsc offline compiler]" \
"-save[save the compiled script in a jar for future use]"
)
case $words[$CURRENT] in
-X*) _arguments $X_opts;;
-Y*) _arguments $Y_opts;;
*) case $service in
scala) _arguments $scala_opts $shared_opts "*::filename:_files";;
scalac) _arguments $shared_opts "*::filename:_files";;
esac
esac
return 0

View file

@ -6,11 +6,17 @@
#
# zstyle :omz:plugins:ssh-agent agent-forwarding on
#
# To load multiple identies use the identities style, For
# To load multiple identities use the identities style, For
# example:
#
# zstyle :omz:plugins:ssh-agent id_rsa id_rsa2 id_github
# zstyle :omz:plugins:ssh-agent identities id_rsa id_rsa2 id_github
#
# To set the maximum lifetime of the identities, use the
# lifetime style. The lifetime may be specified in seconds
# or as described in sshd_config(5) (see TIME FORMATS)
# If left unspecified, the default lifetime is forever.
#
# zstyle :omz:plugins:ssh-agent lifetime 4h
#
# CREDITS
#
@ -27,15 +33,18 @@ local _plugin__forwarding
function _plugin__start_agent()
{
local -a identities
local lifetime
zstyle -s :omz:plugins:ssh-agent lifetime lifetime
# start ssh-agent and setup environment
/usr/bin/env ssh-agent | sed 's/^echo/#echo/' > ${_plugin__ssh_env}
/usr/bin/env ssh-agent ${lifetime:+-t} ${lifetime} | sed 's/^echo/#echo/' > ${_plugin__ssh_env}
chmod 600 ${_plugin__ssh_env}
. ${_plugin__ssh_env} > /dev/null
# load identies
zstyle -a :omz:plugins:ssh-agent identities identities
echo starting...
echo starting ssh-agent...
/usr/bin/ssh-add $HOME/.ssh/${^identities}
}

View file

@ -1,7 +1,13 @@
# Sublime Text 2 Aliases
#unamestr = 'uname'
local _sublime_darwin_subl=/Applications/Sublime\ Text\ 2.app/Contents/SharedSupport/bin/subl
local _sublime_darwin_paths > /dev/null 2>&1
_sublime_darwin_paths=(
"/usr/local/bin/subl"
"$HOME/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl"
"$HOME/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl"
"/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl"
"/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl"
)
if [[ $('uname') == 'Linux' ]]; then
if [ -f '/usr/bin/sublime_text' ]; then
@ -9,13 +15,17 @@ if [[ $('uname') == 'Linux' ]]; then
else
st_run() { nohup /usr/bin/sublime-text $@ > /dev/null & }
fi
alias st=st_run
alias st=st_run
elif [[ $('uname') == 'Darwin' ]]; then
# Check if Sublime is installed in user's home application directory
if [[ -a $HOME/${_sublime_darwin_subl} ]]; then
alias st='$HOME/${_sublime_darwin_subl}'
else
alias st='${_sublime_darwin_subl}'
fi
for _sublime_path in $_sublime_darwin_paths; do
if [[ -a $_sublime_path ]]; then
alias subl="'$_sublime_path'"
alias st=subl
break
fi
done
fi
alias stt='st .'

Some files were not shown because too many files have changed in this diff Show more