Merge updates from upstream

This commit is contained in:
ncanceill 2014-04-01 15:10:49 +02:00
commit 52a532bdf9
12 changed files with 324 additions and 95 deletions

View file

@ -46,8 +46,8 @@ bindkey ' ' magic-space # [Space] - do history exp
bindkey '^[[1;5C' forward-word # [Ctrl-RightArrow] - move forward one word bindkey '^[[1;5C' forward-word # [Ctrl-RightArrow] - move forward one word
bindkey '^[[1;5D' backward-word # [Ctrl-LeftArrow] - move backward one word bindkey '^[[1;5D' backward-word # [Ctrl-LeftArrow] - move backward one word
if [[ "${terminfo[kdch1]}" != "" ]]; then if [[ "${terminfo[kcbt]}" != "" ]]; then
bindkey "${terminfo[kdch1]}" reverse-menu-complete # [Shift-Tab] - move through the completion menu backwards bindkey "${terminfo[kcbt]}" reverse-menu-complete # [Shift-Tab] - move through the completion menu backwards
fi fi
bindkey '^?' backward-delete-char # [Backspace] - delete backward bindkey '^?' backward-delete-char # [Backspace] - delete backward

View file

@ -26,8 +26,11 @@ function omz_termsupport_precmd {
function omz_termsupport_preexec { function omz_termsupport_preexec {
emulate -L zsh emulate -L zsh
setopt extended_glob setopt extended_glob
local CMD=${1[(wr)^(*=*|sudo|ssh|rake|-*)]} #cmd name only, or if this is sudo or ssh, the next cmd
# cmd name only, or if this is sudo or ssh, the next cmd
local CMD=${1[(wr)^(*=*|sudo|ssh|rake|-*)]:gs/%/%%}
local LINE="${2:gs/%/%%}" local LINE="${2:gs/%/%%}"
title '$CMD' '%100>...>$LINE%<<' title '$CMD' '%100>...>$LINE%<<'
} }

View file

@ -0,0 +1,84 @@
# Autocompletion for homebrew-cask.
#
# This script intercepts calls to the brew plugin and adds autocompletion
# for the cask subcommand.
#
# Author: https://github.com/pstadler
compdef _brew-cask brew
_brew-cask()
{
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments -C \
':command:->command' \
':subcmd:->subcmd' \
'*::options:->options'
case $state in
(command)
__call_original_brew
cask_commands=(
'cask:manage casks'
)
_describe -t commands 'brew cask command' cask_commands ;;
(subcmd)
case "$line[1]" in
cask)
if (( CURRENT == 3 )); then
local -a subcommands
subcommands=(
"alfred:used to modify Alfred's scope to include the Caskroom"
'audit:verifies installability of casks'
'checklinks:checks for bad cask links'
'cleanup:cleans up cached downloads'
'create:creates a cask of the given name and opens it in an editor'
'doctor:checks for configuration issues'
'edit:edits the cask of the given name'
'fetch:downloads Cask resources to local cache'
'home:opens the homepage of the cask of the given name'
'info:displays information about the cask of the given name'
'install:installs the cask of the given name'
'list:with no args, lists installed casks; given installed casks, lists installed files'
'search:searches all known casks'
'uninstall:uninstalls the cask of the given name'
"update:a synonym for 'brew update'"
)
_describe -t commands "brew cask subcommand" subcommands
fi ;;
*)
__call_original_brew ;;
esac ;;
(options)
local -a casks installed_casks
local expl
case "$line[2]" in
list|uninstall)
__brew_installed_casks
_wanted installed_casks expl 'installed casks' compadd -a installed_casks ;;
audit|edit|home|info|install)
__brew_all_casks
_wanted casks expl 'all casks' compadd -a casks ;;
esac ;;
esac
}
__brew_all_casks() {
casks=(`brew cask search`)
}
__brew_installed_casks() {
installed_casks=(`brew cask list`)
}
__call_original_brew()
{
local ret=1
_call_function ret _brew
compdef _brew-cask brew
}

View file

@ -21,8 +21,8 @@ fi
# Aliases ################################################################### # Aliases ###################################################################
# These are for more obscure uses of apt-get and aptitude that aren't covered # These are for more obscure uses of apt-get and aptitude that aren't covered
# below. # below.
alias ag='apt-get' alias age='apt-get'
alias ap='aptitude' alias api='aptitude'
# Some self-explanatory aliases # Some self-explanatory aliases
alias acs="apt-cache search" alias acs="apt-cache search"

View file

@ -2,7 +2,8 @@
# Docker autocompletion for oh-my-zsh # Docker autocompletion for oh-my-zsh
# Requires: Docker installed # Requires: Docker installed
# Author : Azaan (@aeonazaan) # Author: Azaan (@aeonazaan)
# Updates: Bob Maerten (@bobmaerten) for Docker v0.9+
# ----- Helper functions # ----- Helper functions
@ -24,21 +25,30 @@ __docker_images() {
# Seperate function for each command, makes extension easier later # Seperate function for each command, makes extension easier later
# --------------------------- # ---------------------------
__attach() { __attach() {
_arguments \
'--no-stdin[Do not attach stdin]' \
'--sig-proxy[Proxify all received signal to the process (even in non-tty mode)]'
__docker_containers __docker_containers
} }
__build() { __build() {
_arguments \ _arguments \
'-q=false[Suppress verbose build output]' \ '--no-cache[Do not use cache when building the image]' \
'-t="[fuck to be applied to the resulting image in case of success]' \ '(-q,--quiet)'{-q,--quiet}'[Suppress the verbose output generated by the containers]' \
'--rm[Remove intermediate containers after a successful build]' \
'(-t,--tag=)'{-t,--tag=}'[Repository name (and optionally a tag) to be applied to the resulting image in case of success]' \
'*:files:_files' '*:files:_files'
} }
__commit() { __commit() {
_arguments \ _arguments \
'-author="[Author]' \ '(-a,--author=)'{-a,--author=}'[Author (eg. "John Hannibal Smith <hannibal@a-team.com>"]' \
'-m="[Commit message]' \ '(-m,--message=)'{-m,--message=}'[Commit message]' \
'-run="[Config automatically applied when the image is run.\n]' '--run=[Config automatically applied when the image is run.]'
__docker_containers
}
__cp() {
__docker_containers __docker_containers
} }
@ -46,21 +56,29 @@ __diff() {
__docker_containers __docker_containers
} }
__events() {
_arguments \
'--since=[Show previously created events and then stream.]'
}
__export() { __export() {
__docker_containers __docker_containers
} }
__history() { __history() {
_arguments \
'--no-trunc=[Don''t truncate output]' \
'(-q,--quiet)'{-q,--quiet}'[Only show numeric IDs]'
__docker_images __docker_images
} }
__images() { __images() {
_arguments \ _arguments \
'-a[show all images]' \ '(-a,--all)'{-a,--all}'[Show all images (by default filter out the intermediate images used to build)]' \
'-notrunc[dont truncate output]' \ '--no-trunc[Don''t truncate output]' \
'-q[only show numeric IDs]' \ '(-q,--quiet=)'{-q,--quiet=}'[Only show numeric IDs]' \
'-viz[output graph in graphviz format]' '(-t,--tree=)'{-t,--tree=}'[Output graph in tree format]' \
'(-v,--viz=)'{-v,--viz=}'[Output graph in graphviz format]'
__docker_images __docker_images
} }
@ -86,14 +104,20 @@ __kill() {
__docker_containers __docker_containers
} }
__load() {
_arguments '*:files:_files'
}
__login() { __login() {
_arguments \ _arguments \
'-e="[email]' \ '(-e,--email=)'{-e,-email=}'[Email]' \
'-p="[password]' \ '(-p,--password=)'{-p,-password=}'[Password]' \
'-u="[username]' \ '(-u,--username=)'{-u,-username=}'[Username]'
} }
__logs() { __logs() {
_arguments \
'(-f,--follow)'{-f,-follow}'[Follow log output]'
__docker_containers __docker_containers
} }
@ -107,77 +131,107 @@ __top() {
__ps() { __ps() {
_arguments \ _arguments \
'-a[Show all containers. Only running containers are shown by default.]' \ '(-a,--all)'{-a,--all}'[Show all containers. Only running containers are shown by default.]' \
'-beforeId="[Show only container created before Id, include non-running ones.]' \ '--before-id=[Show only container created before Id, include non-running ones.]' \
'-l[Show only the latest created container, include non-running ones.]' \ '(-l,--latest)'{-l,--latest}'[Show only the latest created container, include non-running ones.]' \
'-n=[Show n last created containers, include non-running ones.]' \ '-n=[Show n last created containers, include non-running ones. default=-1.]' \
'-notrunc[Dont truncate output]' \ '--no-trunc[Don''t truncate output]' \
'-q[Only display numeric IDs]' \ '(-q,--quiet)'{-q,--quiet}'[Only display numeric IDs]' \
'-s[Display sizes]' \ '(-s,--size)'{-s,--size}'[Display sizes]' \
'-sinceId="[Show only containers created since Id, include non-running ones.]' '--since-id=[Show only containers created since Id, include non-running ones.]'
} }
__pull() { __pull() {
_arguments '-t="[Download tagged image in repository]' _arguments \
'(-t,--tag=)'{-t,--tag=}'[Download tagged image in repository]'
} }
__push() { __push() {
# no arguments
} }
__restart() { __restart() {
_arguments '-t=[number of seconds to try to stop before killing]' _arguments \
'(-t,--time=)'{-t,--time=}'[Number of seconds to try to stop for before killing the container. Once killed it will then be restarted. Default=10]'
__docker_containers __docker_containers
} }
__rm() { __rm() {
_arguments '-v[Remove the volumes associated to the container]' _arguments \
'(-f,--force=)'{-f,--force=}'[Force removal of running container]' \
'(-l,--link=)'{-l,--link=}'[Remove the specified link and not the underlying container]' \
'(-v,--volumes=)'{-v,--volumes=}'[Remove the volumes associated to the container]'
__docker_containers __docker_containers
} }
__rmi() { __rmi() {
_arguments \
'(-f,--force=)'{-f,--force=}'[Force]' \
__docker_images __docker_images
} }
__run() { __run() {
_arguments \ _arguments \
'-a=[Attach to stdin, stdout or stderr.]' \ '(-P,--publish-all=)'{-P,--publish-all=}'[Publish all exposed ports to the host interfaces]' \
'-c=[CPU shares (relative weight)]' \ '(-a,--attach=)'{-a,--attach=}'[Attach to stdin, stdout or stderr.]' \
'-d[Detached mode: leave the container running in the background]' \ '(-c,--cpu-shares=)'{-c,--cpu-shares=}': CPU shares (relative weight)]' \
'-dns=[Set custom dns servers]' \ '--cidfile=[Write the container ID to the file]' \
'-e=[Set environment variables]' \ '(-d,--detach=)'{-d,--detach=}'[Detached mode: Run container in the background, print new container id]' \
'-entrypoint="[Overwrite the default entrypoint of the image]' \ '--dns=[Set custom dns servers]' \
'-h="[Container host name]' \ '(-e,--env=)'{-e,--env=}'[Set environment variables]' \
'-i[Keep stdin open even if not attached]' \ '--entrypoint=[Overwrite the default entrypoint of the image]' \
'-m=[Memory limit (in bytes)]' \ '--expose=[Expose a port from the container without publishing it to your host]' \
'-p=[Expose a containers port to the host (use docker port to see the actual mapping)]' \ '(-h,--hostname=)'{-h,--hostname=}'[Container host name]' \
'-t[Allocate a pseudo-tty]' \ '(-i,--interactive=)'{-i,--interactive=}'[Keep stdin open even if not attached]' \
'-u="[Username or UID]' \ '--link=[Add link to another container (name:alias)]' \
'-v=[Bind mount a volume (e.g. from the host: -v /host:/container, from docker: -v /container)]' \ '--lxc-conf=[Add custom lxc options -lxc-conf="lxc.cgroup.cpuset.cpus = 0,1"]' \
'-volumes-from="[Mount volumes from the specified container]' '(-m,--memory=)'{-m,--memory=}'[Memory limit (format: <number><optional unit>, where unit = b, k, m or g)]' \
'(-n,--networking=)'{-n,--networking=}'[Enable networking for this container]' \
'--name=[Assign a name to the container]' \
'(-p,--publish=)'{-p,--publish=}'[Publish a container''s port to the host (format: ip:hostPort:containerPort | ip::containerPort | hostPort:containerPort) (use "docker port" to see the actual mapping)]' \
'--privileged=[Give extended privileges to this container]' \
'--rm=[Automatically remove the container when it exits (incompatible with -d)]' \
'--sig-proxy=[Proxify all received signal to the process (even in non-tty mode)]' \
'(-t,--tty=)'{-t,--tty=}'[Allocate a pseudo-tty]' \
'(-u,--user=)'{-u,--user=}'[Username or UID]' \
'(-v,--volume=)'{-v,--volume=}'[Bind mount a volume (e.g. from the host: -v /host:/container, from docker: -v /container)]' \
'--volumes-from=[Mount volumes from the specified container(s)]' \
'(-w,--workdir=)'{-w,--workdir=}'[Working directory inside the container]'
__docker_images __docker_images
} }
__search() { __search() {
_arguments '-notrunc[Dont truncate output]' _arguments \
'--no-trunc=[Don''t truncate output]' \
'-s,--stars=)'{-s,--stars=}'[Only displays with at least xxx stars]' \
'-t,--trusted=)'{-t,--trusted=}'[Only show trusted builds]'
}
__save() {
__docker_images
} }
__start() { __start() {
_arguments \
'(-a,--attach=)'{-a,--attach=}'[Attach container''s stdout/stderr and forward all signals to the process]' \
'(-i,--interactive=)'{-i, --interactive=}'[Attach container''s stdin]'
__docker_containers __docker_containers
} }
__stop() { __stop() {
_arguments '-t=[number of seconds to try to stop before killing]' _arguments \
'(-t,--time=)'{-t,--time=}'[Number of seconds to wait for the container to stop before killing it.]'
__docker_containers __docker_containers
} }
__tag() { __tag() {
_arguments '-f[Force]' _arguments \
'(-f,--force=)'{-f,--force=}'[Force]'
__docker_images __docker_images
} }
__version() { __version() {
# no arguments
} }
__wait() { __wait() {
@ -192,7 +246,9 @@ _1st_arguments=(
"attach":"Attach to a running container" "attach":"Attach to a running container"
"build":"Build a container from a Dockerfile" "build":"Build a container from a Dockerfile"
"commit":"Create a new image from a container's changes" "commit":"Create a new image from a container's changes"
"cp":"Copy files/folders from the containers filesystem to the host path"
"diff":"Inspect changes on a container's filesystem" "diff":"Inspect changes on a container's filesystem"
"events":"Get real time events from the server"
"export":"Stream the contents of a container as a tar archive" "export":"Stream the contents of a container as a tar archive"
"history":"Show the history of an image" "history":"Show the history of an image"
"images":"List images" "images":"List images"
@ -201,10 +257,10 @@ _1st_arguments=(
"insert":"Insert a file in an image" "insert":"Insert a file in an image"
"inspect":"Return low-level information on a container" "inspect":"Return low-level information on a container"
"kill":"Kill a running container" "kill":"Kill a running container"
"load":"Load an image from a tar archive"
"login":"Register or Login to the docker registry server" "login":"Register or Login to the docker registry server"
"logs":"Fetch the logs of a container" "logs":"Fetch the logs of a container"
"port":"Lookup the public-facing port which is NAT-ed to PRIVATE_PORT" "port":"Lookup the public-facing port which is NAT-ed to PRIVATE_PORT"
"top":"Lookup the running processes of a container"
"ps":"List containers" "ps":"List containers"
"pull":"Pull an image or a repository from the docker registry server" "pull":"Pull an image or a repository from the docker registry server"
"push":"Push an image or a repository to the docker registry server" "push":"Push an image or a repository to the docker registry server"
@ -212,10 +268,12 @@ _1st_arguments=(
"rm":"Remove one or more containers" "rm":"Remove one or more containers"
"rmi":"Remove one or more images" "rmi":"Remove one or more images"
"run":"Run a command in a new container" "run":"Run a command in a new container"
"save":"Save an image to a tar archive"
"search":"Search for an image in the docker index" "search":"Search for an image in the docker index"
"start":"Start a stopped container" "start":"Start a stopped container"
"stop":"Stop a running container" "stop":"Stop a running container"
"tag":"Tag an image into a repository" "tag":"Tag an image into a repository"
"top":"Lookup the running processes of a container"
"version":"Show the docker version information" "version":"Show the docker version information"
"wait":"Block until a container stops, then print its exit code" "wait":"Block until a container stops, then print its exit code"
) )
@ -230,13 +288,17 @@ fi
local -a _command_args local -a _command_args
case "$words[1]" in case "$words[1]" in
attach) attach)
__docker_containers ;; __attach ;;
build) build)
__build ;; __build ;;
commit) commit)
__commit ;; __commit ;;
cp)
__cp ;;
diff) diff)
__diff ;; __diff ;;
events)
__events ;;
export) export)
__export ;; __export ;;
history) history)
@ -253,14 +315,14 @@ case "$words[1]" in
__inspect ;; __inspect ;;
kill) kill)
__kill ;; __kill ;;
load)
__load ;;
login) login)
__login ;; __login ;;
logs) logs)
__logs ;; __logs ;;
port) port)
__port ;; __port ;;
top)
__top ;;
ps) ps)
__ps ;; __ps ;;
pull) pull)
@ -275,6 +337,8 @@ case "$words[1]" in
__rmi ;; __rmi ;;
run) run)
__run ;; __run ;;
save)
__save ;;
search) search)
__search ;; __search ;;
start) start)
@ -283,6 +347,8 @@ case "$words[1]" in
__stop ;; __stop ;;
tag) tag)
__tag ;; __tag ;;
top)
__top ;;
version) version)
__version ;; __version ;;
wait) wait)

View file

@ -3,7 +3,7 @@
# get list of available X windows. # get list of available X windows.
x=`emacsclient --alternate-editor '' --eval '(x-display-list)' 2>/dev/null` x=`emacsclient --alternate-editor '' --eval '(x-display-list)' 2>/dev/null`
if [ -z "$x" ] ;then if [ -z "$x" ] || [ "$x" = "nil" ] ;then
# Create one if there is no X window yet. # Create one if there is no X window yet.
emacsclient --alternate-editor "" --create-frame "$@" emacsclient --alternate-editor "" --create-frame "$@"
else else

View file

@ -0,0 +1,42 @@
# jsontools
Handy command line tools for dealing with json data.
## Tools
- **pp_json** - pretty prints json
- **is_json** - returns true if valid json; false otherwise
- **urlencode_json** - returns a url encoded string for the given json
- **urldecode_json** - returns decoded json for the given url encoded string
## Usage
Usage is simple...just take your json data and pipe it into the appropriate jsontool.
```sh
<json data> | <jsontools tool>
```
## Examples
##### pp_json
```sh
# curl json data and pretty print the results
curl https://coderwall.com/bobwilliams.json | pp_json
```
##### is_json
```sh
# pretty print the contents of an existing json file
less data.json | is_json
```
##### urlencode_json
```sh
# json data directly from the command line
echo '{"b":2, "a":1}' | urlencode_json
```
##### urldecode_json
```sh
# url encoded string to decode
echo '%7B%22b%22:2,%20%22a%22:1%7D%0A' | urldecode_json
```

View file

@ -2,15 +2,38 @@
# Adds command line aliases useful for dealing with JSON # Adds command line aliases useful for dealing with JSON
if [[ $(whence $JSONTOOLS_METHOD) = "" ]]; then if [[ $(whence $JSONTOOLS_METHOD) = "" ]]; then
JSONTOOLS_METHOD="" JSONTOOLS_METHOD=""
fi fi
if [[ $(whence node) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xnode" ) ]]; then if [[ $(whence node) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xnode" ) ]]; then
alias pp_json='node -e "console.log(JSON.stringify(process.argv[1]), null, 4)"' alias pp_json='xargs -0 node -e "console.log(JSON.stringify(JSON.parse(process.argv[1]), null, 4));"'
alias is_json='xargs -0 node -e "try {json = JSON.parse(process.argv[1]);} catch (e) { console.log(false); json = null; } if(json) { console.log(true); }"'
alias urlencode_json='xargs -0 node -e "console.log(encodeURIComponent(process.argv[1]))"'
alias urldecode_json='xargs -0 node -e "console.log(decodeURIComponent(process.argv[1]))"'
elif [[ $(whence python) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xpython" ) ]]; then elif [[ $(whence python) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xpython" ) ]]; then
alias pp_json='python -mjson.tool' alias pp_json='python -mjson.tool'
alias is_json='python -c "
import json, sys;
try:
json.loads(sys.stdin.read())
except ValueError, e:
print False
else:
print True
sys.exit(0)"'
alias urlencode_json='python -c "
import urllib, json, sys;
print urllib.quote_plus(sys.stdin.read())
sys.exit(0)"'
alias urldecode_json='python -c "
import urllib, json, sys;
print urllib.unquote_plus(sys.stdin.read())
sys.exit(0)"'
elif [[ $(whence ruby) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xruby" ) ]]; then elif [[ $(whence ruby) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xruby" ) ]]; then
alias pp_json='ruby -e "require \"json\"; require \"yaml\"; puts JSON.parse(STDIN.read).to_yaml"' alias pp_json='ruby -e "require \"json\"; require \"yaml\"; puts JSON.parse(STDIN.read).to_yaml"'
alias is_json='ruby -e "require \"json\"; begin; JSON.parse(STDIN.read); puts true; rescue Exception => e; puts false; end"'
alias urlencode_json='ruby -e "require \"uri\"; puts URI.escape(STDIN.read)"'
alias urldecode_json='ruby -e "require \"uri\"; puts URI.unescape(STDIN.read)"'
fi fi
unset JSONTOOLS_METHOD unset JSONTOOLS_METHOD

View file

@ -21,8 +21,8 @@
rack_root(){ rack_root(){
setopt chaselinks setopt chaselinks
local orgdir=$(pwd) local orgdir="$(pwd)"
local basedir=$(pwd) local basedir="$(pwd)"
while [[ $basedir != '/' ]]; do while [[ $basedir != '/' ]]; do
test -e "$basedir/config.ru" && break test -e "$basedir/config.ru" && break
@ -30,7 +30,7 @@ rack_root(){
basedir="$(pwd)" basedir="$(pwd)"
done done
builtin cd $orgdir 2>/dev/null builtin cd "$orgdir" 2>/dev/null
[[ ${basedir} == "/" ]] && return 1 [[ ${basedir} == "/" ]] && return 1
echo $basedir echo $basedir
} }
@ -56,19 +56,19 @@ kapow(){
compctl -W ~/.pow -/ kapow compctl -W ~/.pow -/ kapow
powit(){ powit(){
local basedir=$(pwd) local basedir="$(pwd)"
local vhost=$1 local vhost=$1
[ ! -n "$vhost" ] && vhost=$(rack_root_detect) [ ! -n "$vhost" ] && vhost=$(rack_root_detect)
if [ ! -h ~/.pow/$vhost ] if [ ! -h ~/.pow/$vhost ]
then then
echo "pow: Symlinking your app with pow. ${vhost}" echo "pow: Symlinking your app with pow. ${vhost}"
[ ! -d ~/.pow/${vhost} ] && ln -s $basedir ~/.pow/$vhost [ ! -d ~/.pow/${vhost} ] && ln -s "$basedir" ~/.pow/$vhost
return 1 return 1
fi fi
} }
powed(){ powed(){
local basedir=$(rack_root) local basedir="$(rack_root)"
find ~/.pow/ -type l -lname "*$basedir*" -exec basename {}'.dev' \; find ~/.pow/ -type l -lname "*$basedir*" -exec basename {}'.dev' \;
} }
@ -82,4 +82,4 @@ repow(){
} }
# View the standard out (puts) from any pow app # View the standard out (puts) from any pow app
alias kaput="tail -f ~/Library/Logs/Pow/apps/*" alias kaput="tail -f ~/Library/Logs/Pow/apps/*"

View file

@ -0,0 +1,23 @@
# rake-fast
Fast rake autocompletion plugin.
This script caches the output for later usage and significantly speeds it up. It generates a .rake_tasks cache file in parallel to the Rakefile. It also checks the file modification dates to see if it needs to regenerate the cache file.
This is entirely based on [this pull request by Ullrich Schäfer](https://github.com/robb/.dotfiles/pull/10/), which is inspired by [this Ruby on Rails trick from 2006](http://weblog.rubyonrails.org/2006/3/9/fast-rake-task-completion-for-zsh/).
Think about that. 2006.
## Installation
Just add the plugin to your `.zshrc`:
```bash
plugins=(foo bar rake-fast)
```
You might consider adding `.rake_tasks` to your [global .gitignore](https://help.github.com/articles/ignoring-files#global-gitignore)
## Usage
`rake`, then press tab

View file

@ -1,20 +1,3 @@
# rake-fast
# Fast rake autocompletion plugin for oh-my-zsh
# This script caches the output for later usage and significantly speeds it up.
# It generates a .rake_tasks file in parallel to the Rakefile.
# You'll want to add `.rake_tasks` to your global .git_ignore file:
# https://help.github.com/articles/ignoring-files#global-gitignore
# You can force .rake_tasks to refresh with:
# $ rake_refresh
# This is entirely based on Ullrich Schäfer's work
# (https://github.com/robb/.dotfiles/pull/10/),
# which is inspired by this Ruby on Rails trick from 2006:
# http://weblog.rubyonrails.org/2006/3/9/fast-rake-task-completion-for-zsh/
_rake_refresh () { _rake_refresh () {
if [ -f .rake_tasks ]; then if [ -f .rake_tasks ]; then
rm .rake_tasks rm .rake_tasks
@ -27,8 +10,13 @@ _rake_refresh () {
_rake_does_task_list_need_generating () { _rake_does_task_list_need_generating () {
if [ ! -f .rake_tasks ]; then return 0; if [ ! -f .rake_tasks ]; then return 0;
else else
accurate=$(stat -f%m .rake_tasks) if [[ $(uname -s) == 'Darwin' ]]; then
changed=$(stat -f%m Rakefile) accurate=$(stat -f%m .rake_tasks)
changed=$(stat -f%m Rakefile)
else
accurate=$(stat -c%Y .rake_tasks)
changed=$(stat -c%Y Rakefile)
fi
return $(expr $accurate '>=' $changed) return $(expr $accurate '>=' $changed)
fi fi
} }

View file

@ -11,34 +11,34 @@ ZSH_THEME="robbyrussell"
# alias zshconfig="mate ~/.zshrc" # alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh" # alias ohmyzsh="mate ~/.oh-my-zsh"
# Set this to use case-sensitive completion # Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true" # CASE_SENSITIVE="true"
# Uncomment this to disable bi-weekly auto-update checks # Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true" # DISABLE_AUTO_UPDATE="true"
# Uncomment to change how often to auto-update? (in days) # Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13 # export UPDATE_ZSH_DAYS=13
# Uncomment following line if you want to disable colors in ls # Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true" # DISABLE_LS_COLORS="true"
# Uncomment following line if you want to disable autosetting terminal title. # Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true" # DISABLE_AUTO_TITLE="true"
# Uncomment following line if you want to disable command autocorrection # Uncomment the following line to disable command auto-correction.
# DISABLE_CORRECTION="true" # DISABLE_CORRECTION="true"
# Uncomment following line if you want red dots to be displayed while waiting for completion # Uncomment the following line to display red dots whilst waiting for completion.
# COMPLETION_WAITING_DOTS="true" # COMPLETION_WAITING_DOTS="true"
# Uncomment following line if you want to disable marking untracked files under # Uncomment the following line if you want to disable marking untracked files
# VCS as dirty. This makes repository status check for large repositories much, # under VCS as dirty. This makes repository status check for large repositories
# much faster. # much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true" # DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment following line if you want to the command execution time stamp shown # Uncomment the following line if you want to change the command execution time
# in the history command output. # stamp shown in the history command output.
# The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd" # The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# HIST_STAMPS="mm/dd/yyyy" # HIST_STAMPS="mm/dd/yyyy"
@ -60,7 +60,7 @@ export PATH=$HOME/bin:/usr/local/bin:$PATH
# You may need to manually set your language environment # You may need to manually set your language environment
# export LANG=en_US.UTF-8 # export LANG=en_US.UTF-8
# # Preferred editor for local and remote sessions # Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then # if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim' # export EDITOR='vim'
# else # else