Merge branch 'ohmyzsh:master' into master

This commit is contained in:
Santhosh Janardhanan 2023-07-08 00:04:37 -04:00 committed by GitHub
commit 1b5f65fadd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
533 changed files with 50452 additions and 12577 deletions

View file

@ -0,0 +1,9 @@
# Do nothing if op is not installed
(( ${+commands[op]} )) || return
# Load op completion
eval "$(op completion zsh)"
compdef _op op
# Load opswd function
autoload -Uz opswd

View file

@ -0,0 +1,40 @@
# 1Password
This plugin adds 1Password functionality to oh-my-zsh.
To use, add `1password` to the list of plugins in your `.zshrc` file:
```zsh
plugins=(... 1password)
```
Then, you can use the command `opswd` to copy passwords for services into your
clipboard.
## `opswd`
The `opswd` command is a wrapper around the `op` command. It takes a service
name as an argument and copies the username, then the password for that service
to the clipboard, after confirmation on the user part.
If the service also contains a TOTP, it is copied to the clipboard after confirmation
on the user part. Finally, after 20 seconds, the clipboard is cleared.
For example, `opswd github.com` will put your GitHub username into your clipboard. Then,
it will ask for confirmation to continue, and copy the password to your clipboard. Finally,
if a TOTP is available, it will be copied to the clipboard after your confirmation.
This function has completion support, so you can use tab completion to select which
service you want to get.
> NOTE: you need to be signed in for `opswd` to work. If you are using biometric unlock,
> 1Password CLI will automatically prompt you to sign in. See:
>
> - [Get started with 1Password CLI 2: Sign in](https://developer.1password.com/docs/cli/get-started#sign-in)
> - [Sign in to your 1Password account manually](https://developer.1password.com/docs/cli/sign-in-manually)
## Requirements
- [1Password CLI 2](https://developer.1password.com/docs/cli/get-started#install)
> NOTE: if you're using 1Password CLI 1, [see how to upgrade to CLI 2](https://developer.1password.com/docs/cli/upgrade).

19
plugins/1password/_opswd Normal file
View file

@ -0,0 +1,19 @@
#compdef opswd
function _opswd() {
local -a services
services=("${(@f)$(op item list --categories Login --cache 2>/dev/null | awk 'NR != 1 { print $2 }')}")
[[ -z "$services" ]] || compadd -a -- services
}
# TODO: 2022-03-26: Remove support for op CLI 1
autoload -Uz is-at-least
is-at-least 2.0.0 $(op --version) || {
function _opswd() {
local -a services
services=("${(@f)$(op list items --categories Login 2>/dev/null | op get item - --fields title 2>/dev/null)}")
[[ -z "$services" ]] || compadd -a -- services
}
}
_opswd "$@"

90
plugins/1password/opswd Normal file
View file

@ -0,0 +1,90 @@
#autoload
# opswd puts the password of the named service into the clipboard. If there's a
# one time password, it will be copied into the clipboard after 10 seconds. The
# clipboard is cleared after another 20 seconds.
function opswd() {
if [[ $# -lt 1 ]]; then
echo "Usage: opswd <service>"
return 1
fi
local service=$1
# If not logged in, print error and return
op user list > /dev/null || return
local username
# Copy the username to the clipboard
if ! username=$(op item get "$service" --fields username 2>/dev/null); then
echo "error: could not obtain username for $service"
return 1
fi
echo -n "$username" | clipcopy
echo "✔ username for service $service copied to the clipboard. Press Enter to continue"
read
local password
# Copy the password to the clipboard
if ! password=$(op item get "$service" --fields password 2>/dev/null); then
echo "error: could not obtain password for $service"
return 1
fi
echo -n "$password" | clipcopy
echo "✔ password for $service copied to clipboard. Press Enter to continue"
read
# If there's a one time password, copy it to the clipboard
local totp
if totp=$(op item get --otp "$service" 2>/dev/null) && [[ -n "$totp" ]]; then
echo -n "$totp" | clipcopy
echo "✔ TOTP for $service copied to clipboard"
fi
(sleep 20 && clipcopy </dev/null 2>/dev/null) &!
}
# TODO: 2022-03-26: Remove support for op CLI 1
autoload -Uz is-at-least
is-at-least 2.0.0 $(op --version) || {
print -ru2 ${(%):-"%F{yellow}opswd: usage with op version $(op --version) is deprecated. Upgrade to CLI 2 and reload zsh.
For instructions, see https://developer.1password.com/docs/cli/upgrade.%f"}
# opswd puts the password of the named service into the clipboard. If there's a
# one time password, it will be copied into the clipboard after 10 seconds. The
# clipboard is cleared after another 20 seconds.
function opswd() {
if [[ $# -lt 1 ]]; then
echo "Usage: opswd <service>"
return 1
fi
local service=$1
# If not logged in, print error and return
op list users > /dev/null || return
local password
# Copy the password to the clipboard
if ! password=$(op get item "$service" --fields password 2>/dev/null); then
echo "error: could not obtain password for $service"
return 1
fi
echo -n "$password" | clipcopy
echo "✔ password for $service copied to clipboard"
# If there's a one time password, copy it to the clipboard after 5 seconds
local totp
if totp=$(op get totp "$service" 2>/dev/null) && [[ -n "$totp" ]]; then
sleep 10 && echo -n "$totp" | clipcopy
echo "✔ TOTP for $service copied to clipboard"
fi
(sleep 20 && clipcopy </dev/null 2>/dev/null) &!
}
}
opswd "$@"

13
plugins/ag/README.md Normal file
View file

@ -0,0 +1,13 @@
# The Silver Searcher
This plugin provides completion support for [`ag`](https://github.com/ggreer/the_silver_searcher).
To use it, add ag to the plugins array in your zshrc file.
```zsh
plugins=(... ag)
```
## INSTALLATION NOTES
Besides oh-my-zsh, `ag` needs to be installed by following these steps: https://github.com/ggreer/the_silver_searcher#installing.

66
plugins/ag/_ag Normal file
View file

@ -0,0 +1,66 @@
#compdef ag
#autoload
typeset -A opt_args
# Took the liberty of not listing every option… specially aliases and -D
_ag () {
local -a _1st_arguments
_1st_arguments=(
'--ackmate:Print results in AckMate-parseable format'
{'-A','--after'}':[LINES] Print lines after match (Default: 2)'
{'-B','--before'}':[LINES] Print lines before match (Default: 2)'
'--break:Print newlines between matches in different files'
'--nobreak:Do not print newlines between matches in different files'
{'-c','--count'}':Only print the number of matches in each file'
'--color:Print color codes in results (Default: On)'
'--nocolor:Do not print color codes in results'
'--color-line-number:Color codes for line numbers (Default: 1;33)'
'--color-match:Color codes for result match numbers (Default: 30;43)'
'--color-path:Color codes for path names (Default: 1;32)'
'--column:Print column numbers in results'
{'-H','--heading'}':Print file names (On unless searching a single file)'
'--noheading:Do not print file names (On unless searching a single file)'
'--line-numbers:Print line numbers even for streams'
{'-C','--context'}':[LINES] Print lines before and after matches (Default: 2)'
'-g:[PATTERN] Print filenames matching PATTERN'
{'-l','--files-with-matches'}':Only print filenames that contain matches'
{'-L','--files-without-matches'}':Only print filenames that do not contain matches'
'--no-numbers:Do not print line numbers'
{'-o','--only-matching'}':Prints only the matching part of the lines'
'--print-long-lines:Print matches on very long lines (Default: 2k characters)'
'--passthrough:When searching a stream, print all lines even if they do not match'
'--silent:Suppress all log messages, including errors'
'--stats:Print stats (files scanned, time taken, etc.)'
'--vimgrep:Print results like vim :vimgrep /pattern/g would'
{'-0','--null'}':Separate filenames with null (for "xargs -0")'
{'-a','--all-types'}':Search all files (does not include hidden files / .gitignore)'
'--depth:[NUM] Search up to NUM directories deep (Default: 25)'
{'-f','--follow'}':Follow symlinks'
{'-G','--file-search-regex'}':[PATTERN] Limit search to filenames matching PATTERN'
'--hidden:Search hidden files (obeys .*ignore files)'
{'-i','--ignore-case'}':Match case insensitively'
'--ignore:[PATTERN] Ignore files/directories matching PATTERN'
{'-m','--max-count'}':[NUM] Skip the rest of a file after NUM matches (Default: 10k)'
{'-p','--path-to-agignore'}':[PATH] Use .agignore file at PATH'
{'-Q','--literal'}':Do not parse PATTERN as a regular expression'
{'-s','--case-sensitive'}':Match case'
{'-S','--smart-case'}':Insensitive match unless PATTERN has uppercase (Default: On)'
'--search-binary:Search binary files for matches'
{'-t','--all-text'}':Search all text files (Hidden files not included)'
{'-u','--unrestricted'}':Search all files (ignore .agignore and _all_)'
{'-U','--skip-vcs-ignores'}':Ignore VCS files (stil obey .agignore)'
{'-v','--invert-match'}':Invert match'
{'-w','--word-regexp'}':Only match whole words'
{'-z','--search-zip'}':Search contents of compressed (e.g., gzip) files'
'--list-file-types:list of supported file types'
)
if [[ $words[-1] =~ "^-" ]]; then
_describe -t commands "ag options" _1st_arguments && ret=0
else
_files && ret=0
fi
}

1
plugins/aliases/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
__pycache__

28
plugins/aliases/README.md Normal file
View file

@ -0,0 +1,28 @@
# Aliases cheatsheet
**Maintainer:** [@hqingyi](https://github.com/hqingyi)
With lots of 3rd-party amazing aliases installed, this plugin helps list the shortcuts
that are currently available based on the plugins you have enabled.
To use it, add `aliases` to the plugins array in your zshrc file:
```zsh
plugins=(aliases)
```
Requirements: Python needs to be installed.
## Usage
- `acs`: show all aliases by group
- `acs -h/--help`: print help mesage
- `acs <keyword(s)>`: filter and highlight aliases by `<keyword>`
- `acs -g <group>/--group <group>`: show only aliases for group `<group>`. Multiple uses of the flag show all groups
- `acs --groups`: show only group names
![screenshot](https://cloud.githubusercontent.com/assets/3602957/11581913/cb54fb8a-9a82-11e5-846b-5a67f67ad9ad.png)

View file

@ -0,0 +1,14 @@
# Handle $0 according to the standard:
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
0="${${(M)0:#/*}:-$PWD/$0}"
eval '
function acs(){
(( $+commands[python3] )) || {
echo "[error] No python executable detected"
return
}
alias | python3 "'"${0:h}"'/cheatsheet.py" "$@"
}
'

View file

@ -0,0 +1,69 @@
#!/usr/bin/env python3
import sys
import itertools
import termcolor
import argparse
def parse(line):
left = line[0:line.find('=')].strip()
right = line[line.find('=')+1:].strip('\'"\n ')
try:
cmd = next(part for part in right.split() if len([char for char in '=<>' if char in part])==0)
except StopIteration:
cmd = right
return (left, right, cmd)
def cheatsheet(lines):
exps = [ parse(line) for line in lines ]
exps.sort(key=lambda exp:exp[2])
cheatsheet = {'_default': []}
for key, group in itertools.groupby(exps, lambda exp:exp[2]):
group_list = [ item for item in group ]
if len(group_list)==1:
target_aliases = cheatsheet['_default']
else:
if key not in cheatsheet:
cheatsheet[key] = []
target_aliases = cheatsheet[key]
target_aliases.extend(group_list)
return cheatsheet
def pretty_print_group(key, aliases, highlight=None, only_groupname=False):
if len(aliases) == 0:
return
group_hl_formatter = lambda g, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'red') for part in ('[%s]' % g).split(hl)])
alias_hl_formatter = lambda alias, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'green') for part in ('\t%s = %s' % alias[0:2]).split(hl)])
group_formatter = lambda g: termcolor.colored('[%s]' % g, 'red')
alias_formatter = lambda alias: termcolor.colored('\t%s = %s' % alias[0:2], 'green')
if highlight and len(highlight)>0:
print (group_hl_formatter(key, highlight))
if not only_groupname:
print ('\n'.join([alias_hl_formatter(alias, highlight) for alias in aliases]))
else:
print (group_formatter(key))
if not only_groupname:
print ('\n'.join([alias_formatter(alias) for alias in aliases]))
print ('')
def pretty_print(cheatsheet, wfilter, group_list=None, groups_only=False):
sorted_key = sorted(cheatsheet.keys())
for key in sorted_key:
if group_list and key not in group_list:
continue
aliases = cheatsheet.get(key)
if not wfilter:
pretty_print_group(key, aliases, wfilter, groups_only)
else:
pretty_print_group(key, [ alias for alias in aliases if alias[0].find(wfilter)>-1 or alias[1].find(wfilter)>-1], wfilter)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Pretty print aliases.", prog="acs")
parser.add_argument('filter', nargs="*", metavar="<keyword>", help="search aliases matching keywords")
parser.add_argument('-g', '--group', dest="group_list", action='append', help="only print aliases in given groups")
parser.add_argument('--groups', dest='groups_only', action='store_true', help="only print alias groups")
args = parser.parse_args()
lines = sys.stdin.readlines()
group_list = args.group_list or None
wfilter = " ".join(args.filter) or None
pretty_print(cheatsheet(lines), wfilter, group_list, args.groups_only)

View file

@ -0,0 +1,168 @@
# coding: utf-8
# Copyright (c) 2008-2011 Volvox Development Team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: Konstantin Lepa <konstantin.lepa@gmail.com>
"""ANSI Color formatting for output in terminal."""
from __future__ import print_function
import os
__ALL__ = [ 'colored', 'cprint' ]
VERSION = (1, 1, 0)
ATTRIBUTES = dict(
list(zip([
'bold',
'dark',
'',
'underline',
'blink',
'',
'reverse',
'concealed'
],
list(range(1, 9))
))
)
del ATTRIBUTES['']
HIGHLIGHTS = dict(
list(zip([
'on_grey',
'on_red',
'on_green',
'on_yellow',
'on_blue',
'on_magenta',
'on_cyan',
'on_white'
],
list(range(40, 48))
))
)
COLORS = dict(
list(zip([
'grey',
'red',
'green',
'yellow',
'blue',
'magenta',
'cyan',
'white',
],
list(range(30, 38))
))
)
RESET = '\033[0m'
def colored(text, color=None, on_color=None, attrs=None):
"""Colorize text.
Available text colors:
red, green, yellow, blue, magenta, cyan, white.
Available text highlights:
on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
Available attributes:
bold, dark, underline, blink, reverse, concealed.
Example:
colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink'])
colored('Hello, World!', 'green')
"""
if os.getenv('ANSI_COLORS_DISABLED') is None:
fmt_str = '\033[%dm%s'
if color is not None:
text = fmt_str % (COLORS[color], text)
if on_color is not None:
text = fmt_str % (HIGHLIGHTS[on_color], text)
if attrs is not None:
for attr in attrs:
text = fmt_str % (ATTRIBUTES[attr], text)
text += RESET
return text
def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
"""Print colorize text.
It accepts arguments of print function.
"""
print((colored(text, color, on_color, attrs)), **kwargs)
if __name__ == '__main__':
print('Current terminal type: %s' % os.getenv('TERM'))
print('Test basic colors:')
cprint('Grey color', 'grey')
cprint('Red color', 'red')
cprint('Green color', 'green')
cprint('Yellow color', 'yellow')
cprint('Blue color', 'blue')
cprint('Magenta color', 'magenta')
cprint('Cyan color', 'cyan')
cprint('White color', 'white')
print(('-' * 78))
print('Test highlights:')
cprint('On grey color', on_color='on_grey')
cprint('On red color', on_color='on_red')
cprint('On green color', on_color='on_green')
cprint('On yellow color', on_color='on_yellow')
cprint('On blue color', on_color='on_blue')
cprint('On magenta color', on_color='on_magenta')
cprint('On cyan color', on_color='on_cyan')
cprint('On white color', color='grey', on_color='on_white')
print('-' * 78)
print('Test attributes:')
cprint('Bold grey color', 'grey', attrs=['bold'])
cprint('Dark red color', 'red', attrs=['dark'])
cprint('Underline green color', 'green', attrs=['underline'])
cprint('Blink yellow color', 'yellow', attrs=['blink'])
cprint('Reversed blue color', 'blue', attrs=['reverse'])
cprint('Concealed Magenta color', 'magenta', attrs=['concealed'])
cprint('Bold underline reverse cyan color', 'cyan',
attrs=['bold', 'underline', 'reverse'])
cprint('Dark blink concealed white color', 'white',
attrs=['dark', 'blink', 'concealed'])
print(('-' * 78))
print('Test mixing:')
cprint('Underline red on grey color', 'red', 'on_grey',
['underline'])
cprint('Reversed green on red color', 'green', 'on_red', ['reverse'])

View file

@ -2,9 +2,9 @@
This plugin provides completion for [Ant](https://ant.apache.org/).
To use it add ant to the plugins array in your zshrc file.
To use it, add `ant` to the plugins array in your zshrc file:
```bash
```zsh
plugins=(... ant)
```

22
plugins/ant/_ant Normal file
View file

@ -0,0 +1,22 @@
#compdef ant
_ant_does_target_list_need_generating () {
[[ ! -f .ant_targets ]] && return 0
[[ build.xml -nt .ant_targets ]] && return 0
return 1
}
_ant () {
if [[ ! -f build.xml ]]; then
return
fi
if ! _ant_does_target_list_need_generating; then
return
fi
ant -p | awk -F " " 'NR > 5 { print lastTarget } { lastTarget = $1 }' >| .ant_targets
compadd -- "$(cat .ant_targets)"
}
_ant "$@"

View file

@ -1,16 +1,2 @@
_ant_does_target_list_need_generating () {
[ ! -f .ant_targets ] && return 0;
[ build.xml -nt .ant_targets ] && return 0;
return 1;
}
_ant () {
if [ -f build.xml ]; then
if _ant_does_target_list_need_generating; then
ant -p | awk -F " " 'NR > 5 { print lastTarget }{lastTarget = $1}' > .ant_targets
fi
compadd -- `cat .ant_targets`
fi
}
compdef _ant ant
# Default to colored output
export ANT_ARGS='-logger org.apache.tools.ant.listener.AnsiColorLogger'

View file

@ -14,6 +14,7 @@ plugins=(... arcanist)
| ------- | ---------------------------------- |
| ara | `arc amend` |
| arb | `arc branch` |
| arbl | `arc bland` |
| arco | `arc cover` |
| arci | `arc commit` |
| ard | `arc diff` |
@ -24,6 +25,7 @@ plugins=(... arcanist)
| ardpc | `arc diff --plan-changes` |
| are | `arc export` |
| arh | `arc help` |
| arho | `arc hotfix` |
| arl | `arc land` |
| arli | `arc lint` |
| arls | `arc list` |

View file

@ -5,6 +5,7 @@
alias ara='arc amend'
alias arb='arc branch'
alias arbl='arc bland'
alias arco='arc cover'
alias arci='arc commit'
@ -17,6 +18,7 @@ alias ardp='arc diff --preview' # creates a new diff in the phab interface
alias are='arc export'
alias arh='arc help'
alias arho='arc hotfix'
alias arl='arc land'
alias arli='arc lint'
alias arls='arc list'

View file

@ -17,8 +17,10 @@ plugins=(... archlinux)
| pacin | `sudo pacman -S` | Install packages from the repositories |
| pacins | `sudo pacman -U` | Install a package from a local file |
| pacinsd | `sudo pacman -S --asdeps` | Install packages as dependencies of another package |
| paclean | `sudo pacman -Sc` | Clean out old and unused caches and packages |
| pacloc | `pacman -Qi` | Display information about a package in the local database |
| paclocs | `pacman -Qs` | Search for packages in the local database |
| paclr | `sudo pacman -Scc` | Remove all files from the cache |
| paclsorphans | `sudo pacman -Qdt` | List all orphaned packages |
| pacmir | `sudo pacman -Syy` | Force refresh of all package lists after updating mirrorlist |
| pacre | `sudo pacman -R` | Remove packages, keeping its settings and dependencies |
@ -32,7 +34,7 @@ plugins=(... archlinux)
| pacfiles | `pacman -F` | Search package file names for matching strings |
| pacls | `pacman -Ql` | List files in a package |
| pacown | `pacman -Qo` | Show which package owns a file |
| upgrade[²](#f2) | `sudo pacman -Syu` | Sync with repositories before upgrading packages |
| upgrade[¹](#f1) | `sudo pacman -Syu` | Sync with repositories before upgrading packages |
| Function | Description |
|----------------|-----------------------------------------------------------|
@ -52,6 +54,8 @@ upgrades were available. Use `pacman -Que` instead.
| Alias | Command | Description |
|---------|-------------------------------------------------|-------------------------------------------------------------------------|
| auclean | `sudo aura -Sc` | Clean out old and unused caches and packages |
| auclr | `sudo aura -Scc` | Remove all files from the cache |
| auin | `sudo aura -S` | Install packages from the repositories |
| aurin | `sudo aura -A` | Install packages from the repositories |
| auins | `sudo aura -U` | Install a package from a local file |
@ -73,7 +77,7 @@ upgrades were available. Use `pacman -Que` instead.
| auupd | `sudo aura -Sy` | Update and refresh local package, ABS and AUR databases |
| auupg | `sudo sh -c "aura -Syu && aura -Au"` | Sync with repositories before upgrading all packages (from AUR too) |
| ausu | `sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"` | Same as `auupg`, but without confirmation |
| upgrade[²](#f2) | `sudo aura -Syu` | Sync with repositories before upgrading packages |
| upgrade[¹](#f1) | `sudo aura -Syu` | Sync with repositories before upgrading packages |
| Function | Description |
|-----------------|---------------------------------------------------------------------|
@ -84,6 +88,8 @@ upgrades were available. Use `pacman -Que` instead.
| Alias | Command | Description |
|---------|-----------------------------------|---------------------------------------------------------------------|
| pacclean| `pacaur -Sc` | Clean out old and unused caches and packages |
| pacclr | `pacaur -Scc` | Remove all files from the cache |
| pain | `pacaur -S` | Install packages from the repositories |
| pains | `pacaur -U` | Install a package from a local file |
| painsd | `pacaur -S --asdeps` | Install packages as dependencies of another package |
@ -99,13 +105,15 @@ upgrades were available. Use `pacman -Que` instead.
| paupd | `pacaur -Sy` | Update and refresh local package, ABS and AUR databases |
| paupg | `pacaur -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
| pasu | `pacaur -Syua --no-confirm` | Same as `paupg`, but without confirmation |
| upgrade[²](#f2) | `pacaur -Syu` | Sync with repositories before upgrading packages |
| upgrade[¹](#f1) | `pacaur -Syu` | Sync with repositories before upgrading packages |
#### Trizen
| Alias | Command | Description |
|---------|-----------------------------------|---------------------------------------------------------------------|
| trconf | `trizen -C` | Fix all configuration files with vimdiff |
| trclean | `trizen -Sc` | Clean out old and unused caches and packages |
| trclr | `trizen -Scc` | Remove all files from the cache |
| trin | `trizen -S` | Install packages from the repositories |
| trins | `trizen -U` | Install a package from a local file |
| trinsd | `trizen -S --asdeps` | Install packages as dependencies of another package |
@ -121,35 +129,15 @@ upgrades were available. Use `pacman -Que` instead.
| trupd | `trizen -Sy` | Update and refresh local package, ABS and AUR databases |
| trupg | `trizen -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
| trsu | `trizen -Syua --no-confirm` | Same as `trupg`, but without confirmation |
| upgrade[²](#f2) | `trizen -Syu` | Sync with repositories before upgrading packages |
| upgrade[¹](#f1) | `trizen -Syu` | Sync with repositories before upgrading packages |
#### Yaourt[¹](#f1)
| Alias | Command | Description |
|---------|-----------------------------------|---------------------------------------------------------------------|
| yaconf | `yaourt -C` | Fix all configuration files with vimdiff |
| yain | `yaourt -S` | Install packages from the repositories |
| yains | `yaourt -U` | Install a package from a local file |
| yainsd | `yaourt -S --asdeps` | Install packages as dependencies of another package |
| yaloc | `yaourt -Qi` | Display information about a package in the local database |
| yalocs | `yaourt -Qs` | Search for packages in the local database |
| yalst | `yaourt -Qe` | List installed packages including from AUR (tagged as "local") |
| yamir | `yaourt -Syy` | Force refresh of all package lists after updating mirrorlist |
| yaorph | `yaourt -Qtd` | Remove orphans using yaourt |
| yare | `yaourt -R` | Remove packages, keeping its settings and dependencies |
| yarem | `yaourt -Rns` | Remove packages, including its settings and unneeded dependencies |
| yarep | `yaourt -Si` | Display information about a package in the repositories |
| yareps | `yaourt -Ss` | Search for packages in the repositories |
| yaupd | `yaourt -Sy` | Update and refresh local package, ABS and AUR databases |
| yaupg | `yaourt -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
| yasu | `yaourt -Syua --no-confirm` | Same as `yaupg`, but without confirmation |
| upgrade[²](#f2) | `yaourt -Syu` | Sync with repositories before upgrading packages |
#### Yay[¹](#f1)
#### Yay
| Alias | Command | Description |
|---------|--------------------------------|-------------------------------------------------------------------|
| yaconf | `yay -Pg` | Print current configuration |
| yaclean | `yay -Sc` | Clean out old and unused caches and packages |
| yaclr | `yay -Scc` | Remove all files from the cache |
| yain | `yay -S` | Install packages from the repositories |
| yains | `yay -U` | Install a package from a local file |
| yainsd | `yay -S --asdeps` | Install packages as dependencies of another package |
@ -165,23 +153,19 @@ upgrades were available. Use `pacman -Que` instead.
| yaupd | `yay -Sy` | Update and refresh local package, ABS and AUR databases |
| yaupg | `yay -Syu` | Sync with repositories before upgrading packages |
| yasu | `yay -Syu --no-confirm` | Same as `yaupg`, but without confirmation |
| upgrade[²](#f2) | `yay -Syu` | Sync with repositories before upgrading packages |
| upgrade[¹](#f1) | `yay -Syu` | Sync with repositories before upgrading packages |
---
<span id="f1">¹</span>
Yay and Yaourt aliases overlap. If both are installed, yay will take precedence.
<span id="f2">²</span>
The `upgrade` alias is set for all package managers. Its value will depend on
whether the package manager is installed, checked in the following order:
1. `yay`
2. `yaourt`
3. `trizen`
4. `pacaur`
5. `aura`
6. `pacman`
2. `trizen`
3. `pacaur`
4. `aura`
5. `pacman`
## Contributors
@ -196,3 +180,5 @@ whether the package manager is installed, checked in the following order:
- ornicar - thibault.duplessis@gmail.com
- Ybalrid (Arthur Brainville) - ybalrid@ybalrid.info
- Jeff M. Hubbard - jeffmhubbard@gmail.com
- K. Harishankar(harishnkr) - hari2menon1234@gmail.com
- WH-2099 - wh2099@outlook.com

View file

@ -5,7 +5,9 @@
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
alias pacupg='sudo pacman -Syu'
alias pacin='sudo pacman -S'
alias paclean='sudo pacman -Sc'
alias pacins='sudo pacman -U'
alias paclr='sudo pacman -Scc'
alias pacre='sudo pacman -R'
alias pacrem='sudo pacman -Rns'
alias pacrep='pacman -Si'
@ -21,30 +23,27 @@ alias pacfiles='pacman -F'
alias pacls='pacman -Ql'
alias pacown='pacman -Qo'
alias pacupd="sudo pacman -Sy"
alias upgrade='sudo pacman -Syu'
function paclist() {
# Based on https://bbs.archlinux.org/viewtopic.php?id=93683
pacman -Qqe | \
xargs -I '{}' \
expac "${bold_color}% 20n ${fg_no_bold[white]}%d${reset_color}" '{}'
pacman -Qqe | xargs -I{} -P0 --no-run-if-empty pacman -Qs --color=auto "^{}\$"
}
function pacdisowned() {
local tmp db fs
tmp=${TMPDIR-/tmp}/pacman-disowned-$UID-$$
db=$tmp/db
fs=$tmp/fs
local tmp_dir db fs
tmp_dir=$(mktemp --directory)
db=$tmp_dir/db
fs=$tmp_dir/fs
mkdir "$tmp"
trap 'rm -rf "$tmp"' EXIT
trap "rm -rf $tmp_dir" EXIT
pacman -Qlq | sort -u > "$db"
find /bin /etc /lib /sbin /usr ! -name lost+found \
find /etc /usr ! -name lost+found \
\( -type d -printf '%p/\n' -o -print \) | sort > "$fs"
comm -23 "$fs" "$db"
rm -rf $tmp_dir
}
alias pacmanallkeys='sudo pacman-key --refresh-keys'
@ -88,6 +87,8 @@ fi
if (( $+commands[aura] )); then
alias auin='sudo aura -S'
alias aurin='sudo aura -A'
alias auclean='sudo aura -Sc'
alias auclr='sudo aura -Scc'
alias auins='sudo aura -U'
alias auinsd='sudo aura -S --asdeps'
alias aurinsd='sudo aura -A --asdeps'
@ -104,8 +105,7 @@ if (( $+commands[aura] )); then
alias auras='aura -As --both'
alias auupd="sudo aura -Sy"
alias auupg='sudo sh -c "aura -Syu && aura -Au"'
alias ausu='sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"'
alias upgrade='sudo aura -Syu'
alias ausu='sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"'
# extra bonus specially for aura
alias auown="aura -Qqo"
@ -115,6 +115,8 @@ if (( $+commands[aura] )); then
fi
if (( $+commands[pacaur] )); then
alias pacclean='pacaur -Sc'
alias pacclr='pacaur -Scc'
alias paupg='pacaur -Syu'
alias pasu='pacaur -Syu --noconfirm'
alias pain='pacaur -S'
@ -130,7 +132,6 @@ if (( $+commands[pacaur] )); then
alias painsd='pacaur -S --asdeps'
alias pamir='pacaur -Syy'
alias paupd="pacaur -Sy"
alias upgrade='pacaur -Syu'
fi
if (( $+commands[trizen] )); then
@ -138,6 +139,8 @@ if (( $+commands[trizen] )); then
alias trupg='trizen -Syua'
alias trsu='trizen -Syua --noconfirm'
alias trin='trizen -S'
alias trclean='trizen -Sc'
alias trclr='trizen -Scc'
alias trins='trizen -U'
alias trre='trizen -R'
alias trrem='trizen -Rns'
@ -150,31 +153,12 @@ if (( $+commands[trizen] )); then
alias trinsd='trizen -S --asdeps'
alias trmir='trizen -Syy'
alias trupd="trizen -Sy"
alias upgrade='trizen -Syu'
fi
if (( $+commands[yaourt] )); then
alias yaconf='yaourt -C'
alias yaupg='yaourt -Syua'
alias yasu='yaourt -Syua --noconfirm'
alias yain='yaourt -S'
alias yains='yaourt -U'
alias yare='yaourt -R'
alias yarem='yaourt -Rns'
alias yarep='yaourt -Si'
alias yareps='yaourt -Ss'
alias yaloc='yaourt -Qi'
alias yalocs='yaourt -Qs'
alias yalst='yaourt -Qe'
alias yaorph='yaourt -Qtd'
alias yainsd='yaourt -S --asdeps'
alias yamir='yaourt -Syy'
alias yaupd="yaourt -Sy"
alias upgrade='yaourt -Syu'
fi
if (( $+commands[yay] )); then
alias yaconf='yay -Pg'
alias yaclean='yay -Sc'
alias yaclr='yay -Scc'
alias yaupg='yay -Syu'
alias yasu='yay -Syu --noconfirm'
alias yain='yay -S'
@ -190,6 +174,30 @@ if (( $+commands[yay] )); then
alias yainsd='yay -S --asdeps'
alias yamir='yay -Syy'
alias yaupd="yay -Sy"
alias upgrade='yay -Syu'
fi
# Check Arch Linux PGP Keyring before System Upgrade to prevent failure.
function upgrade() {
echo ":: Checking Arch Linux PGP Keyring..."
local installedver="$(sudo pacman -Qi archlinux-keyring | grep -Po '(?<=Version : ).*')"
local currentver="$(sudo pacman -Si archlinux-keyring | grep -Po '(?<=Version : ).*')"
if [ $installedver != $currentver ]; then
echo " Arch Linux PGP Keyring is out of date."
echo " Updating before full system upgrade."
sudo pacman -Sy --needed --noconfirm archlinux-keyring
else
echo " Arch Linux PGP Keyring is up to date."
echo " Proceeding with full system upgrade."
fi
if (( $+commands[yay] )); then
yay -Syu
elif (( $+commands[trizen] )); then
trizen -Syu
elif (( $+commands[pacaur] )); then
pacaur -Syu
elif (( $+commands[aura] )); then
sudo aura -Syu
else
sudo pacman -Syu
fi
}

20
plugins/argocd/README.md Normal file
View file

@ -0,0 +1,20 @@
# Argo CD plugin
This plugin adds completion for the [Argo CD](https://argoproj.github.io/cd/) CLI.
To use it, add `argocd` to the plugins array in your zshrc file:
```zsh
plugins=(... argocd)
```
This plugin does not add any aliases.
## Cache
This plugin caches the completion script and is automatically updated asynchronously when the plugin is
loaded, which is usually when you start up a new terminal emulator.
The cache is stored at:
- `$ZSH_CACHE/completions/_argocd` completions script

View file

@ -0,0 +1,14 @@
# Autocompletion for argocd.
if (( ! $+commands[argocd] )); then
return
fi
# If the completion file doesn't exist yet, we need to autoload it and
# bind it to `argocd`. Otherwise, compinit will have already done that.
if [[ ! -f "$ZSH_CACHE_DIR/completions/_argocd" ]]; then
typeset -g -A _comps
autoload -Uz _argocd
_comps[argocd]=_argocd
fi
argocd completion zsh >| "$ZSH_CACHE_DIR/completions/_argocd" &|

View file

@ -6,22 +6,25 @@ Adds integration with [asdf](https://github.com/asdf-vm/asdf), the extendable ve
### Installation
1. Enable the plugin by adding it to your `plugins` definition in `~/.zshrc`.
1. [Download asdf](https://asdf-vm.com/guide/getting-started.html#_2-download-asdf) by running the following:
```
git clone https://github.com/asdf-vm/asdf.git ~/.asdf
```
2. [Enable asdf](https://asdf-vm.com/guide/getting-started.html#_3-install-asdf) by adding it to your `plugins` definition in `~/.zshrc`.
```
plugins=(asdf)
```
2. [Install asdf](https://github.com/asdf-vm/asdf#setup) by running the following:
```
git clone https://github.com/asdf-vm/asdf.git ~/.asdf
```
### Usage
See the [asdf usage documentation](https://github.com/asdf-vm/asdf#usage) for information on how to use asdf:
See the [asdf documentation](https://asdf-vm.com/guide/getting-started.html#_4-install-a-plugin) for information on how to use asdf:
```
asdf plugin-add nodejs git@github.com:asdf-vm/asdf-nodejs.git
asdf install nodejs 5.9.1
asdf plugin add nodejs https://github.com/asdf-vm/asdf-nodejs.git
asdf install nodejs latest
asdf global nodejs latest
asdf local nodejs latest
```

View file

@ -2,18 +2,26 @@
ASDF_DIR="${ASDF_DIR:-$HOME/.asdf}"
ASDF_COMPLETIONS="$ASDF_DIR/completions"
# If not found, check for archlinux/AUR package (/opt/asdf-vm/)
if [[ ! -f "$ASDF_DIR/asdf.sh" || ! -f "$ASDF_COMPLETIONS/asdf.bash" ]] && [[ -f "/opt/asdf-vm/asdf.sh" ]]; then
ASDF_DIR="/opt/asdf-vm"
ASDF_COMPLETIONS="$ASDF_DIR"
fi
# If not found, check for Homebrew package
if [[ ! -f "$ASDF_DIR/asdf.sh" || ! -f "$ASDF_COMPLETIONS/asdf.bash" ]] && (( $+commands[brew] )); then
ASDF_DIR="$(brew --prefix asdf)"
ASDF_COMPLETIONS="$ASDF_DIR/etc/bash_completion.d"
brew_prefix="$(brew --prefix asdf)"
ASDF_DIR="${brew_prefix}/libexec"
ASDF_COMPLETIONS="${brew_prefix}/etc/bash_completion.d"
unset brew_prefix
fi
# Load command
if [[ -f "$ASDF_DIR/asdf.sh" ]]; then
. "$ASDF_DIR/asdf.sh"
. "$ASDF_DIR/asdf.sh"
# Load completions
if [[ -f "$ASDF_COMPLETIONS/asdf.bash" ]]; then
. "$ASDF_COMPLETIONS/asdf.bash"
fi
# Load completions
if [[ -f "$ASDF_COMPLETIONS/asdf.bash" ]]; then
. "$ASDF_COMPLETIONS/asdf.bash"
fi
fi

View file

@ -13,12 +13,17 @@ if ! type autoenv_init >/dev/null; then
~/.autoenv
~/.local/bin
/usr/local/opt/autoenv
/opt/homebrew/opt/autoenv
/usr/local/bin
/usr/share/autoenv-git
~/Library/Python/bin
.venv/bin
venv/bin
env/bin
.env/bin
)
for d ( $install_locations ); do
if [[ -e $d/activate.sh ]]; then
if [[ -e $d/activate || -e $d/activate.sh ]]; then
autoenv_dir=$d
break
fi
@ -28,13 +33,13 @@ if ! type autoenv_init >/dev/null; then
# Look for Homebrew path as a last resort
if [[ -z "$autoenv_dir" ]] && (( $+commands[brew] )); then
d=$(brew --prefix)/opt/autoenv
if [[ -e $d/activate.sh ]]; then
if [[ -e $d/activate || -e $d/activate.sh ]]; then
autoenv_dir=$d
fi
fi
# Complain if autoenv is not installed
if [[ -z $autoenv_dir ]]; then
if [[ -z $autoenv_dir ]]; then
cat <<END >&2
-------- AUTOENV ---------
Could not locate autoenv installation.
@ -45,7 +50,11 @@ END
return 1
fi
# Load autoenv
source $autoenv_dir/activate.sh
if [[ -e $autoenv_dir/activate ]]; then
source $autoenv_dir/activate
else
source $autoenv_dir/activate.sh
fi
fi
}
[[ $? != 0 ]] && return $?

View file

@ -1,15 +1,18 @@
declare -a autojump_paths
autojump_paths=(
$HOME/.autojump/etc/profile.d/autojump.zsh # manual installation
$HOME/.autojump/share/autojump/autojump.zsh # manual installation
$HOME/.nix-profile/etc/profile.d/autojump.sh # NixOS installation
/run/current-system/sw/share/autojump/autojump.zsh # NixOS installation
/usr/share/autojump/autojump.zsh # Debian and Ubuntu package
/etc/profile.d/autojump.zsh # manual installation
/etc/profile.d/autojump.sh # Gentoo installation
/usr/local/share/autojump/autojump.zsh # FreeBSD installation
/opt/local/etc/profile.d/autojump.sh # macOS with MacPorts
/usr/local/etc/profile.d/autojump.sh # macOS with Homebrew (default)
$HOME/.autojump/etc/profile.d/autojump.zsh # manual installation
$HOME/.autojump/share/autojump/autojump.zsh # manual installation
$HOME/.nix-profile/etc/profile.d/autojump.sh # NixOS installation
/run/current-system/sw/share/autojump/autojump.zsh # NixOS installation
/usr/share/autojump/autojump.zsh # Debian and Ubuntu package
/etc/profile.d/autojump.zsh # manual installation
/etc/profile.d/autojump.sh # Gentoo installation
/usr/local/share/autojump/autojump.zsh # FreeBSD installation
/usr/pkg/share/autojump/autojump.zsh # NetBSD installation
/opt/local/etc/profile.d/autojump.sh # macOS with MacPorts
/usr/local/etc/profile.d/autojump.sh # macOS with Homebrew (default)
/opt/homebrew/etc/profile.d/autojump.sh # macOS with Homebrew (default on M1 macs)
/etc/profiles/per-user/$USER/etc/profile.d/autojump.sh # macOS Nix, Home Manager and flakes
)
for file in $autojump_paths; do

View file

@ -1,7 +1,8 @@
# aws
This plugin provides completion support for [awscli](https://docs.aws.amazon.com/cli/latest/reference/index.html)
and a few utilities to manage AWS profiles and display them in the prompt.
This plugin provides completion support for [awscli v2](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/index.html)
and a few utilities to manage AWS profiles/regions and display them in the prompt.
[awscli v1](https://docs.aws.amazon.com/cli/latest/userguide/cliv2-migration.html) is no longer supported.
To use it, add `aws` to the plugins array in your zshrc file.
@ -12,37 +13,52 @@ plugins=(... aws)
## Plugin commands
* `asp [<profile>]`: sets `$AWS_PROFILE` and `$AWS_DEFAULT_PROFILE` (legacy) to `<profile>`.
It also sets `$AWS_EB_PROFILE` to `<profile>` for the Elastic Beanstalk CLI.
It also sets `$AWS_EB_PROFILE` to `<profile>` for the Elastic Beanstalk CLI. It sets `$AWS_PROFILE_REGION` for display in `aws_prompt_info`.
Run `asp` without arguments to clear the profile.
* `asp [<profile>] login`: If AWS SSO has been configured in your aws profile, it will run the `aws sso login` command following profile selection.
* `acp [<profile>]`: in addition to `asp` functionality, it actually changes the profile by
assuming the role specified in the `<profile>` configuration. It supports MFA and sets
`$AWS_ACCESS_KEY_ID`, `$AWS_SECRET_ACCESS_KEY` and `$AWS_SESSION_TOKEN`, if obtained. It
requires the roles to be configured as per the
* `asr [<region>]`: sets `$AWS_REGION` and `$AWS_DEFAULT_REGION` (legacy) to `<region>`.
Run `asr` without arguments to clear the profile.
* `acp [<profile>] [<mfa_token>]`: in addition to `asp` functionality, it actually changes
the profile by assuming the role specified in the `<profile>` configuration. It supports
MFA and sets `$AWS_ACCESS_KEY_ID`, `$AWS_SECRET_ACCESS_KEY` and `$AWS_SESSION_TOKEN`, if
obtained. It requires the roles to be configured as per the
[official guide](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html).
Run `acp` without arguments to clear the profile.
* `agp`: gets the current value of `$AWS_PROFILE`.
* `agr`: gets the current value of `$AWS_REGION`.
* `aws_change_access_key`: changes the AWS access key of a profile.
* `aws_profiles`: lists the available profiles in the `$AWS_CONFIG_FILE` (default: `~/.aws/config`).
Used to provide completion for the `asp` function.
* `aws_regions`: lists the available regions.
Used to provide completion for the `asr` function.
## Plugin options
* Set `SHOW_AWS_PROMPT=false` in your zshrc file if you want to prevent the plugin from modifying your RPROMPT.
Some themes might overwrite the value of RPROMPT instead of appending to it, so they'll need to be fixed to
see the AWS profile prompt.
see the AWS profile/region prompt.
## Theme
The plugin creates an `aws_prompt_info` function that you can use in your theme, which displays
the current `$AWS_PROFILE`. It uses two variables to control how that is shown:
the current `$AWS_PROFILE` and `$AWS_REGION`. It uses four variables to control how that is shown:
* ZSH_THEME_AWS_PREFIX: sets the prefix of the AWS_PROFILE. Defaults to `<aws:`.
* ZSH_THEME_AWS_PROFILE_PREFIX: sets the prefix of the AWS_PROFILE. Defaults to `<aws:`.
* ZSH_THEME_AWS_SUFFIX: sets the suffix of the AWS_PROFILE. Defaults to `>`.
* ZSH_THEME_AWS_PROFILE_SUFFIX: sets the suffix of the AWS_PROFILE. Defaults to `>`.
* ZSH_THEME_AWS_REGION_PREFIX: sets the prefix of the AWS_REGION. Defaults to `<region:`.
* ZSH_THEME_AWS_REGION_SUFFIX: sets the suffix of the AWS_REGION. Defaults to `>`.
* ZSH_THEME_AWS_DIVIDER: sets the divider between ZSH_THEME_AWS_PROFILE_SUFFIX and ZSH_THEME_AWS_REGION_PREFIX. Defaults to ` ` (single space).
## Configuration
@ -52,7 +68,7 @@ the current `$AWS_PROFILE`. It uses two variables to control how that is shown:
Source profile credentials in `~/.aws/credentials`:
```
```ini
[source-profile-name]
aws_access_key_id = ...
aws_secret_access_key = ...
@ -60,7 +76,7 @@ aws_secret_access_key = ...
Role configuration in `~/.aws/config`:
```
```ini
[profile source-profile-name]
mfa_serial = arn:aws:iam::111111111111:mfa/myuser
region = us-east-1

View file

@ -2,10 +2,14 @@ function agp() {
echo $AWS_PROFILE
}
function agr() {
echo $AWS_REGION
}
# AWS profile selection
function asp() {
if [[ -z "$1" ]]; then
unset AWS_DEFAULT_PROFILE AWS_PROFILE AWS_EB_PROFILE
unset AWS_DEFAULT_PROFILE AWS_PROFILE AWS_EB_PROFILE AWS_PROFILE_REGION
echo AWS profile cleared.
return
fi
@ -21,6 +25,31 @@ function asp() {
export AWS_DEFAULT_PROFILE=$1
export AWS_PROFILE=$1
export AWS_EB_PROFILE=$1
export AWS_PROFILE_REGION=$(aws configure get region)
if [[ "$2" == "login" ]]; then
aws sso login
fi
}
# AWS region selection
function asr() {
if [[ -z "$1" ]]; then
unset AWS_DEFAULT_REGION AWS_REGION
echo AWS region cleared.
return
fi
local -a available_regions
available_regions=($(aws_regions))
if [[ -z "${available_regions[(r)$1]}" ]]; then
echo "${fg[red]}Available regions: \n$(aws_regions)"
return 1
fi
export AWS_REGION=$1
export AWS_DEFAULT_REGION=$1
}
# AWS profile switch
@ -41,6 +70,7 @@ function acp() {
fi
local profile="$1"
local mfa_token="$2"
# Get fallback credentials for if the aws command fails or no command is run
local aws_access_key_id="$(aws configure get aws_access_key_id --profile $profile)"
@ -54,55 +84,56 @@ function acp() {
if [[ -n "$mfa_serial" ]]; then
local -a mfa_opt
local mfa_token
echo -n "Please enter your MFA token for $mfa_serial: "
read -r mfa_token
if [[ -z "$mfa_token" ]]; then
echo -n "Please enter your MFA token for $mfa_serial: "
read -r mfa_token
fi
if [[ -z "$sess_duration" ]]; then
echo -n "Please enter the session duration in seconds (900-43200; default: 3600, which is the default maximum for a role): "
read -r sess_duration
fi
mfa_opt=(--serial-number "$mfa_serial" --token-code "$mfa_token" --duration-seconds "${sess_duration:-3600}")
fi
# Now see whether we need to just MFA for the current role, or assume a different one
local role_arn="$(aws configure get role_arn --profile $profile)"
local sess_name="$(aws configure get role_session_name --profile $profile)"
# Now see whether we need to just MFA for the current role, or assume a different one
local role_arn="$(aws configure get role_arn --profile $profile)"
local sess_name="$(aws configure get role_session_name --profile $profile)"
if [[ -n "$role_arn" ]]; then
# Means we need to assume a specified role
aws_command=(aws sts assume-role --role-arn "$role_arn" "${mfa_opt[@]}")
if [[ -n "$role_arn" ]]; then
# Means we need to assume a specified role
aws_command=(aws sts assume-role --role-arn "$role_arn" "${mfa_opt[@]}")
# Check whether external_id is configured to use while assuming the role
local external_id="$(aws configure get external_id --profile $profile)"
if [[ -n "$external_id" ]]; then
aws_command+=(--external-id "$external_id")
fi
# Get source profile to use to assume role
local source_profile="$(aws configure get source_profile --profile $profile)"
if [[ -z "$sess_name" ]]; then
sess_name="${source_profile:-profile}"
fi
aws_command+=(--profile="${source_profile:-profile}" --role-session-name "${sess_name}")
echo "Assuming role $role_arn using profile ${source_profile:-profile}"
else
# Means we only need to do MFA
aws_command=(aws sts get-session-token --profile="$profile" "${mfa_opt[@]}")
echo "Obtaining session token for profile $profile"
# Check whether external_id is configured to use while assuming the role
local external_id="$(aws configure get external_id --profile $profile)"
if [[ -n "$external_id" ]]; then
aws_command+=(--external-id "$external_id")
fi
# Format output of aws command for easier processing
aws_command+=(--query '[Credentials.AccessKeyId,Credentials.SecretAccessKey,Credentials.SessionToken]' --output text)
# Run the aws command to obtain credentials
local -a credentials
credentials=(${(ps:\t:)"$(${aws_command[@]})"})
if [[ -n "$credentials" ]]; then
aws_access_key_id="${credentials[1]}"
aws_secret_access_key="${credentials[2]}"
aws_session_token="${credentials[3]}"
# Get source profile to use to assume role
local source_profile="$(aws configure get source_profile --profile $profile)"
if [[ -z "$sess_name" ]]; then
sess_name="${source_profile:-profile}"
fi
aws_command+=(--profile="${source_profile:-profile}" --role-session-name "${sess_name}")
echo "Assuming role $role_arn using profile ${source_profile:-profile}"
else
# Means we only need to do MFA
aws_command=(aws sts get-session-token --profile="$profile" "${mfa_opt[@]}")
echo "Obtaining session token for profile $profile"
fi
# Format output of aws command for easier processing
aws_command+=(--query '[Credentials.AccessKeyId,Credentials.SecretAccessKey,Credentials.SessionToken]' --output text)
# Run the aws command to obtain credentials
local -a credentials
credentials=(${(ps:\t:)"$(${aws_command[@]})"})
if [[ -n "$credentials" ]]; then
aws_access_key_id="${credentials[1]}"
aws_secret_access_key="${credentials[2]}"
aws_session_token="${credentials[3]}"
fi
# Switch to AWS profile
@ -129,21 +160,60 @@ function aws_change_access_key() {
return 1
fi
echo "Insert the credentials when asked."
asp "$1" || return 1
AWS_PAGER="" aws iam create-access-key
AWS_PAGER="" aws configure --profile "$1"
local profile="$1"
# Get current access key
local original_aws_access_key_id="$(aws configure get aws_access_key_id --profile $profile)"
echo "You can now safely delete the old access key running \`aws iam delete-access-key --access-key-id ID\`"
asp "$profile" || return 1
echo "Generating a new access key pair for you now."
if aws --no-cli-pager iam create-access-key; then
echo "Insert the newly generated credentials when asked."
aws --no-cli-pager configure --profile $profile
else
echo "Current access keys:"
aws --no-cli-pager iam list-access-keys
echo "Profile \"${profile}\" is currently using the $original_aws_access_key_id key. You can delete an old access key by running \`aws --profile $profile iam delete-access-key --access-key-id AccessKeyId\`"
return 1
fi
read -q "yn?Would you like to disable your previous access key (${original_aws_access_key_id}) now? "
case $yn in
[Yy]*)
echo -n "\nDisabling access key ${original_aws_access_key_id}..."
if aws --no-cli-pager iam update-access-key --access-key-id ${original_aws_access_key_id} --status Inactive; then
echo "done."
else
echo "\nFailed to disable ${original_aws_access_key_id} key."
fi
;;
*)
echo ""
;;
esac
echo "You can now safely delete the old access key by running \`aws --profile $profile iam delete-access-key --access-key-id ${original_aws_access_key_id}\`"
echo "Your current keys are:"
AWS_PAGER="" aws iam list-access-keys
aws --no-cli-pager iam list-access-keys
}
function aws_regions() {
if [[ $AWS_DEFAULT_PROFILE || $AWS_PROFILE ]];then
aws ec2 describe-regions |grep RegionName | awk -F ':' '{gsub(/"/, "", $2);gsub(/,/, "", $2);gsub(/ /, "", $2); print $2}'
else
echo "You must specify a AWS profile."
fi
}
function aws_profiles() {
aws --no-cli-pager configure list-profiles 2> /dev/null && return
[[ -r "${AWS_CONFIG_FILE:-$HOME/.aws/config}" ]] || return 1
grep --color=never -Eo '\[.*\]' "${AWS_CONFIG_FILE:-$HOME/.aws/config}" | sed -E 's/^[[:space:]]*\[(profile)?[[:space:]]*([-_[:alnum:]\.@]+)\][[:space:]]*$/\2/g'
grep --color=never -Eo '\[.*\]' "${AWS_CONFIG_FILE:-$HOME/.aws/config}" | sed -E 's/^[[:space:]]*\[(profile)?[[:space:]]*([^[:space:]]+)\][[:space:]]*$/\2/g'
}
function _aws_regions() {
reply=($(aws_regions))
}
compctl -K _aws_regions asr
function _aws_profiles() {
reply=($(aws_profiles))
}
@ -151,15 +221,25 @@ compctl -K _aws_profiles asp acp aws_change_access_key
# AWS prompt
function aws_prompt_info() {
[[ -z $AWS_PROFILE ]] && return
echo "${ZSH_THEME_AWS_PREFIX:=<aws:}${AWS_PROFILE}${ZSH_THEME_AWS_SUFFIX:=>}"
local _aws_to_show
local region="${AWS_REGION:-${AWS_DEFAULT_REGION:-$AWS_PROFILE_REGION}}"
if [[ -n "$AWS_PROFILE" ]];then
_aws_to_show+="${ZSH_THEME_AWS_PROFILE_PREFIX="<aws:"}${AWS_PROFILE}${ZSH_THEME_AWS_PROFILE_SUFFIX=">"}"
fi
if [[ -n "$region" ]]; then
[[ -n "$_aws_to_show" ]] && _aws_to_show+="${ZSH_THEME_AWS_DIVIDER=" "}"
_aws_to_show+="${ZSH_THEME_AWS_REGION_PREFIX="<region:"}${region}${ZSH_THEME_AWS_REGION_SUFFIX=">"}"
fi
echo "$_aws_to_show"
}
if [[ "$SHOW_AWS_PROMPT" != false && "$RPROMPT" != *'$(aws_prompt_info)'* ]]; then
RPROMPT='$(aws_prompt_info)'"$RPROMPT"
fi
# Load awscli completions
# AWS CLI v2 comes with its own autocompletion. Check if that is there, otherwise fall back
@ -204,3 +284,4 @@ else
[[ -r $_aws_zsh_completer_path ]] && source $_aws_zsh_completer_path
unset _aws_zsh_completer_path _brew_prefix
fi

49
plugins/azure/README.md Normal file
View file

@ -0,0 +1,49 @@
# azure
This plugin provides completion support for [azure cli](https://docs.microsoft.com/en-us/cli/azure/)
and a few utilities to manage azure subscriptions and display them in the prompt.
To use it, add `azure` to the plugins array in your zshrc file.
```zsh
plugins=(... azure)
```
## Plugin commands
* `az_subscriptions`: lists the available subscriptions in the `AZURE_CONFIG_DIR` (default: `~/.azure/`).
Used to provide completion for the `azss` function.
* `azgs`: gets the current value of `$azure_subscription`.
* `azss [<subscription>]`: sets the `$azure_subscription`.
NOTE : because azure keeps the state of active subscription in ${AZURE_CONFIG_DIR:-$HOME/.azure/azureProfile.json}, the prompt command requires `jq` to be enabled to parse the file. If jq is not in the path the prompt will show nothing
## Theme
The plugin creates an `azure_prompt_info` function that you can use in your theme, which displays
the current `$azure_subscription`. It uses two variables to control how that is shown:
- ZSH_THEME_AZURE_PREFIX: sets the prefix of the azure_subscription. Defaults to `<az:`.
- ZSH_THEME_azure_SUFFIX: sets the suffix of the azure_subscription. Defaults to `>`.
```
RPROMPT='$(azure_prompt_info)'
```
## Develop
On ubuntu get a working environment with :
` docker run -it -v $(pwd):/mnt -w /mnt ubuntu bash`
```
apt install -y curl jq zsh git vim
sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)"
curl -sL https://aka.ms/InstallAzureCLIDeb | bash
```

View file

@ -0,0 +1,60 @@
# AZ Get Subscriptions
function azgs() {
az account show --output tsv --query 'name' 2>/dev/null
}
# AZ Subscription Selection
alias azss="az account set --subscription"
function az_subscriptions() {
az account list --all --output tsv --query '[*].name' 2> /dev/null
}
function _az_subscriptions() {
reply=($(az_subscriptions))
}
compctl -K _az_subscriptions azss
# Azure prompt
function azure_prompt_info() {
[[ ! -f "${AZURE_CONFIG_DIR:-$HOME/.azure}/azureProfile.json" ]] && return
# azgs is too expensive, if we have jq, we enable the prompt
(( $+commands[jq] )) || return 1
azgs=$(jq -r '.subscriptions[] | select(.isDefault==true) .name' "${AZURE_CONFIG_DIR:-$HOME/.azure}/azureProfile.json")
echo "${ZSH_THEME_AZURE_PREFIX:=<az:}${azgs}${ZSH_THEME_AZURE_SUFFIX:=>}"
}
# Load az completions
function _az-homebrew-installed() {
# check if Homebrew is installed
(( $+commands[brew] )) || return 1
# if so, we assume it's default way to install brew
if [[ ${commands[brew]:t2} == bin/brew ]]; then
_brew_prefix="${commands[brew]:h:h}" # remove trailing /bin/brew
else
# ok, it is not in the default prefix
# this call to brew is expensive (about 400 ms), so at least let's make it only once
_brew_prefix=$(brew --prefix)
fi
}
# get az.completion.sh location from $PATH
_az_zsh_completer_path="$commands[az_zsh_completer.sh]"
# otherwise check common locations
if [[ -z $_az_zsh_completer_path ]]; then
# Homebrew
if _az-homebrew-installed; then
_az_zsh_completer_path=$_brew_prefix/etc/bash_completion.d/az
# Linux
else
_az_zsh_completer_path=/etc/bash_completion.d/azure-cli
fi
fi
[[ -r $_az_zsh_completer_path ]] && autoload -U +X bashcompinit && bashcompinit && source $_az_zsh_completer_path
unset _az_zsh_completer_path _brew_prefix

View file

@ -8,15 +8,22 @@ To use, add `battery` to the list of plugins in your `.zshrc` file:
Then, add the `battery_pct_prompt` function to your custom theme. For example:
```
```zsh
RPROMPT='$(battery_pct_prompt) ...'
```
## Requirements
On Linux, you must have the `acpi` tool installed on your operating system.
- On Linux, you must have the `acpi` or `acpitool` commands installed on your operating system.
On Debian/Ubuntu, you can do that with `sudo apt install acpi` or `sudo apt install acpitool`.
Here's an example of how to install with apt:
```
sudo apt-get install acpi
```
- On Android (via [Termux](https://play.google.com/store/apps/details?id=com.termux)), you must have:
1. The `Termux:API` addon app installed:
[Google Play](https://play.google.com/store/apps/details?id=com.termux.api) | [F-Droid](https://f-droid.org/packages/com.termux.api/)
2. The `termux-api` package installed within termux:
```sh
pkg install termux-api
```

View file

@ -10,17 +10,17 @@
# Author: J (927589452) #
# Modified to add support for FreeBSD #
###########################################
# Author: Avneet Singh (kalsi-avneet) #
# Modified to add support for Android #
###########################################
if [[ "$OSTYPE" = darwin* ]]; then
function battery_is_charging() {
ioreg -rc AppleSmartBattery | command grep -q '^.*"ExternalConnected"\ =\ Yes'
}
function battery_pct() {
pmset -g batt | grep -Eo "\d+%" | cut -d% -f1
}
function battery_pct_remaining() {
if battery_is_charging; then
echo "External Power"
@ -28,7 +28,6 @@ if [[ "$OSTYPE" = darwin* ]]; then
battery_pct
fi
}
function battery_time_remaining() {
local smart_battery_status="$(ioreg -rc "AppleSmartBattery")"
if [[ $(echo $smart_battery_status | command grep -c '^.*"ExternalConnected"\ =\ No') -eq 1 ]]; then
@ -42,7 +41,6 @@ if [[ "$OSTYPE" = darwin* ]]; then
echo "∞"
fi
}
function battery_pct_prompt () {
local battery_pct color
if ioreg -rc AppleSmartBattery | command grep -q '^.*"ExternalConnected"\ =\ No'; then
@ -61,17 +59,14 @@ if [[ "$OSTYPE" = darwin* ]]; then
}
elif [[ "$OSTYPE" = freebsd* ]]; then
function battery_is_charging() {
[[ $(sysctl -n hw.acpi.battery.state) -eq 2 ]]
}
function battery_pct() {
if (( $+commands[sysctl] )); then
sysctl -n hw.acpi.battery.life
fi
}
function battery_pct_remaining() {
if ! battery_is_charging; then
battery_pct
@ -79,7 +74,6 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
echo "External Power"
fi
}
function battery_time_remaining() {
local remaining_time
remaining_time=$(sysctl -n hw.acpi.battery.time)
@ -89,7 +83,6 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
printf %02d:%02d $hour $minute
fi
}
function battery_pct_prompt() {
local battery_pct color
battery_pct=$(battery_pct_remaining)
@ -106,19 +99,22 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
fi
}
elif [[ "$OSTYPE" = linux* ]]; then
elif [[ "$OSTYPE" = linux-android ]] && (( ${+commands[termux-battery-status]} )); then
function battery_is_charging() {
! acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -q '^Battery.*Discharging'
termux-battery-status 2>/dev/null | command awk '/status/ { exit ($0 ~ /DISCHARGING/) }'
}
function battery_pct() {
if (( $+commands[acpi] )); then
acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -E '^Battery.*(Full|(Disc|C)harging)' | cut -f2 -d ',' | tr -cd '[:digit:]'
fi
# Sample output:
# {
# "health": "GOOD",
# "percentage": 93,
# "plugged": "UNPLUGGED",
# "status": "DISCHARGING",
# "temperature": 29.0,
# "current": 361816
# }
termux-battery-status 2>/dev/null | command awk '/percentage/ { gsub(/[,]/,""); print $2}'
}
function battery_pct_remaining() {
if ! battery_is_charging; then
battery_pct
@ -126,13 +122,72 @@ elif [[ "$OSTYPE" = linux* ]]; then
echo "External Power"
fi
}
function battery_time_remaining() {
if ! battery_is_charging; then
acpi 2>/dev/null | command grep -v "rate information unavailable" | cut -f3 -d ','
fi
}
function battery_time_remaining() { } # Not available on android
function battery_pct_prompt() {
local battery_pct color
battery_pct=$(battery_pct_remaining)
if battery_is_charging; then
echo "∞"
else
if [[ $battery_pct -gt 50 ]]; then
color='green'
elif [[ $battery_pct -gt 20 ]]; then
color='yellow'
else
color='red'
fi
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
fi
}
elif [[ "$OSTYPE" = linux* ]]; then
function battery_is_charging() {
if (( $+commands[acpitool] )); then
! acpitool 2>/dev/null | command grep -qE '^\s+Battery.*Discharging'
elif (( $+commands[acpi] )); then
! acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -q '^Battery.*Discharging'
fi
}
function battery_pct() {
if (( $+commands[acpitool] )); then
# Sample output:
# Battery #1 : Unknown, 99.55%
# Battery #2 : Discharging, 49.58%, 01:12:05
# All batteries : 62.60%, 02:03:03
local -i pct=$(acpitool 2>/dev/null | command awk -F, '
/^\s+All batteries/ {
gsub(/[^0-9.]/, "", $1)
pct=$1
exit
}
!pct && /^\s+Battery/ {
gsub(/[^0-9.]/, "", $2)
pct=$2
}
END { print pct }
')
echo $pct
elif (( $+commands[acpi] )); then
# Sample output:
# Battery 0: Discharging, 0%, rate information unavailable
# Battery 1: Full, 100%
acpi 2>/dev/null | command awk -F, '
/rate information unavailable/ { next }
/^Battery.*: /{ gsub(/[^0-9]/, "", $2); print $2; exit }
'
fi
}
function battery_pct_remaining() {
if ! battery_is_charging; then
battery_pct
else
echo "External Power"
fi
}
function battery_time_remaining() {
if ! battery_is_charging; then
acpi 2>/dev/null | command grep -v "rate information unavailable" | cut -f3 -d ','
fi
}
function battery_pct_prompt() {
local battery_pct color
battery_pct=$(battery_pct_remaining)
@ -149,7 +204,6 @@ elif [[ "$OSTYPE" = linux* ]]; then
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
fi
}
else
# Empty functions so we don't cause errors in prompts
function battery_is_charging { false }
@ -174,7 +228,7 @@ function battery_level_gauge() {
local charging_color=${BATTERY_CHARGING_COLOR:-$color_yellow}
local charging_symbol=${BATTERY_CHARGING_SYMBOL:-'⚡'}
local battery_remaining_percentage=$(battery_pct)
local -i battery_remaining_percentage=$(battery_pct)
local filled empty gauge_color
if [[ $battery_remaining_percentage =~ [0-9]+ ]]; then

View file

@ -1,5 +1,14 @@
## Bazel autocomplete plugin
# Bazel plugin
A copy of the completion script from the
[bazelbuild/bazel](https://github.com/bazelbuild/bazel/master/scripts/zsh_completion/_bazel)
git repo.
This plugin adds completion for [bazel](https://bazel.build), an open-source build and
test tool that scalably supports multi-language and multi-platform projects.
To use it, add `bazel` to the plugins array in your zshrc file:
```zsh
plugins=(... bazel)
```
The plugin has a copy of [the completion script from the git repository][1].
[1]: https://github.com/bazelbuild/bazel/blob/master/scripts/zsh_completion/_bazel

View file

@ -164,7 +164,7 @@ _get_build_targets() {
;;
esac
completions=(${$(_bazel_b query "kind(\"${rule_re}\", ${pkg}:all)" 2>/dev/null)##*:})
if ( (( ${#completions} > 0 )) && [[ $target_type != run ]] ); then
if ( (( ${#completions} > 0 )) && [[ $target_type != bin ]] ); then
completions+=(all)
fi
echo ${completions[*]}

View file

@ -0,0 +1,5 @@
# Bedtools plugin
This plugin adds support for the [bedtools suite](http://bedtools.readthedocs.org/en/latest/):
* Adds autocomplete options for all bedtools sub commands.

View file

@ -0,0 +1,64 @@
#compdef bedtools
#autoload
local curcontext="$curcontext" state line ret=1
local -a _files
_arguments -C \
'1: :->cmds' \
'2:: :->args' && ret=0
case $state in
cmds)
_values "bedtools command" \
"--contact[Feature requests, bugs, mailing lists, etc.]" \
"--help[Print this help menu.]" \
"--version[What version of bedtools are you using?.]" \
"annotate[Annotate coverage of features from multiple files.]" \
"bamtobed[Convert BAM alignments to BED (& other) formats.]" \
"bamtofastq[Convert BAM records to FASTQ records.]" \
"bed12tobed6[Breaks BED12 intervals into discrete BED6 intervals.]" \
"bedpetobam[Convert BEDPE intervals to BAM records.]" \
"bedtobam[Convert intervals to BAM records.]" \
"closest[Find the closest, potentially non-overlapping interval.]" \
"cluster[Cluster (but don't merge) overlapping/nearby intervals.]" \
"complement[Extract intervals _not_ represented by an interval file.]" \
"coverage[Compute the coverage over defined intervals.]" \
"expand[Replicate lines based on lists of values in columns.]" \
"fisher[Calculate Fisher statistic b/w two feature files.]" \
"flank[Create new intervals from the flanks of existing intervals.]" \
"genomecov[Compute the coverage over an entire genome.]" \
"getfasta[Use intervals to extract sequences from a FASTA file.]" \
"groupby[Group by common cols. & summarize oth. cols. (~ SQL "groupBy")]" \
"igv[Create an IGV snapshot batch script.]" \
"intersect[Find overlapping intervals in various ways.]" \
"jaccard[Calculate the Jaccard statistic b/w two sets of intervals.]" \
"links[Create a HTML page of links to UCSC locations.]" \
"makewindows[Make interval "windows" across a genome.]" \
"map[Apply a function to a column for each overlapping interval.]" \
"maskfasta[Use intervals to mask sequences from a FASTA file.]" \
"merge[Combine overlapping/nearby intervals into a single interval.]" \
"multicov[Counts coverage from multiple BAMs at specific intervals.]" \
"multiinter[Identifies common intervals among multiple interval files.]" \
"nuc[Profile the nucleotide content of intervals in a FASTA file.]" \
"overlap[Computes the amount of overlap from two intervals.]" \
"pairtobed[Find pairs that overlap intervals in various ways.]" \
"pairtopair[Find pairs that overlap other pairs in various ways.]" \
"random[Generate random intervals in a genome.]" \
"reldist[Calculate the distribution of relative distances b/w two files.]" \
"sample[Sample random records from file using reservoir sampling.]" \
"shuffle[Randomly redistrubute intervals in a genome.]" \
"slop[Adjust the size of intervals.]" \
"sort[Order the intervals in a file.]" \
"subtract[Remove intervals based on overlaps b/w two files.]" \
"tag[Tag BAM alignments based on overlaps with interval files.]" \
"unionbedg[Combines coverage intervals from multiple BEDGRAPH files.]" \
"window[Find overlapping intervals within a window around an interval.]" \
ret=0
;;
*)
_files
;;
esac
return ret

View file

@ -1,77 +1,106 @@
#!/usr/bin/env zsh
## setup ##
## Setup
[[ -o interactive ]] || return #interactive only!
zmodload zsh/datetime || { print "can't load zsh/datetime"; return } # faster than date()
autoload -Uz add-zsh-hook || { print "can't add zsh hook!"; return }
[[ -o interactive ]] || return # don't load on non-interactive shells
[[ -z "$SSH_CLIENT" && -z "$SSH_TTY" ]] || return # don't load on a SSH connection
(( ${+bgnotify_threshold} )) || bgnotify_threshold=5 #default 10 seconds
zmodload zsh/datetime # faster than `date`
## definitions ##
## Zsh Hooks
if ! (type bgnotify_formatted | grep -q 'function'); then ## allow custom function override
function bgnotify_formatted { ## args: (exit_status, command, elapsed_seconds)
elapsed="$(( $3 % 60 ))s"
(( $3 >= 60 )) && elapsed="$((( $3 % 3600) / 60 ))m $elapsed"
(( $3 >= 3600 )) && elapsed="$(( $3 / 3600 ))h $elapsed"
[ $1 -eq 0 ] && bgnotify "#win (took $elapsed)" "$2" || bgnotify "#fail (took $elapsed)" "$2"
function bgnotify_begin {
bgnotify_timestamp=$EPOCHSECONDS
bgnotify_lastcmd="${1:-$2}"
}
function bgnotify_end {
{
local exit_status=$?
local elapsed=$(( EPOCHSECONDS - bgnotify_timestamp ))
# check time elapsed
[[ $bgnotify_timestamp -gt 0 ]] || return
[[ $elapsed -ge $bgnotify_threshold ]] || return
# check if Terminal app is not active
[[ $(bgnotify_appid) != "$bgnotify_termid" ]] || return
printf '\a' # beep sound
bgnotify_formatted "$exit_status" "$bgnotify_lastcmd" "$elapsed"
} always {
bgnotify_timestamp=0
}
fi
}
currentWindowId () {
if hash osascript 2>/dev/null; then #osx
osascript -e 'tell application (path to frontmost application as text) to id of front window' 2&> /dev/null || echo "0"
elif (hash notify-send 2>/dev/null || hash kdialog 2>/dev/null); then #ubuntu!
xprop -root 2> /dev/null | awk '/NET_ACTIVE_WINDOW/{print $5;exit} END{exit !$5}' || echo "0"
autoload -Uz add-zsh-hook
add-zsh-hook preexec bgnotify_begin
add-zsh-hook precmd bgnotify_end
## Functions
# allow custom function override
(( ${+functions[bgnotify_formatted]} )) || \
function bgnotify_formatted {
local exit_status=$1
local cmd="$2"
# humanly readable elapsed time
local elapsed="$(( $3 % 60 ))s"
(( $3 < 60 )) || elapsed="$((( $3 % 3600) / 60 ))m $elapsed"
(( $3 < 3600 )) || elapsed="$(( $3 / 3600 ))h $elapsed"
if [[ $1 -eq 0 ]]; then
bgnotify "#win (took $elapsed)" "$2"
else
echo $EPOCHSECONDS #fallback for windows
bgnotify "#fail (took $elapsed)" "$2"
fi
}
bgnotify () { ## args: (title, subtitle)
if hash terminal-notifier 2>/dev/null; then #osx
[[ "$TERM_PROGRAM" == 'iTerm.app' ]] && term_id='com.googlecode.iterm2';
[[ "$TERM_PROGRAM" == 'Apple_Terminal' ]] && term_id='com.apple.terminal';
## now call terminal-notifier, (hopefully with $term_id!)
[ -z "$term_id" ] && terminal-notifier -message "$2" -title "$1" >/dev/null ||
terminal-notifier -message "$2" -title "$1" -activate "$term_id" -sender "$term_id" >/dev/null
elif hash growlnotify 2>/dev/null; then #osx growl
# for macOS, output is "app ID, window ID" (com.googlecode.iterm2, 116)
function bgnotify_appid {
if (( ${+commands[osascript]} )); then
osascript -e 'tell application (path to frontmost application as text) to get the {id, id of front window}' 2>/dev/null
elif (( ${+commands[xprop]} )); then
xprop -root _NET_ACTIVE_WINDOW 2>/dev/null | cut -d' ' -f5
else
echo $EPOCHSECONDS
fi
}
function bgnotify {
# $1: title, $2: message
if (( ${+commands[terminal-notifier]} )); then # macOS
local term_id="${bgnotify_termid%%,*}" # remove window id
if [[ -z "$term_id" ]]; then
case "$TERM_PROGRAM" in
iTerm.app) term_id='com.googlecode.iterm2' ;;
Apple_Terminal) term_id='com.apple.terminal' ;;
esac
fi
if [[ -z "$term_id" ]]; then
terminal-notifier -message "$2" -title "$1" &>/dev/null
else
terminal-notifier -message "$2" -title "$1" -activate "$term_id" -sender "$term_id" &>/dev/null
fi
elif (( ${+commands[growlnotify]} )); then # macOS growl
growlnotify -m "$1" "$2"
elif hash notify-send 2>/dev/null; then #ubuntu gnome!
elif (( ${+commands[notify-send]} )); then # GNOME
notify-send "$1" "$2"
elif hash kdialog 2>/dev/null; then #ubuntu kde!
elif (( ${+commands[kdialog]} )); then # KDE
kdialog --title "$1" --passivepopup "$2" 5
elif hash notifu 2>/dev/null; then #cygwyn support!
elif (( ${+commands[notifu]} )); then # cygwin
notifu /m "$2" /p "$1"
fi
}
## Defaults
## Zsh hooks ##
# notify if command took longer than 5s by default
bgnotify_threshold=${bgnotify_threshold:-5}
bgnotify_begin() {
bgnotify_timestamp=$EPOCHSECONDS
bgnotify_lastcmd="${1:-$2}"
bgnotify_windowid=$(currentWindowId)
}
bgnotify_end() {
didexit=$?
elapsed=$(( EPOCHSECONDS - bgnotify_timestamp ))
past_threshold=$(( elapsed >= bgnotify_threshold ))
if (( bgnotify_timestamp > 0 )) && (( past_threshold )); then
if [ $(currentWindowId) != "$bgnotify_windowid" ]; then
print -n "\a"
bgnotify_formatted "$didexit" "$bgnotify_lastcmd" "$elapsed"
fi
fi
bgnotify_timestamp=0 #reset it to 0!
}
## only enable if a local (non-ssh) connection
if [ -z "$SSH_CLIENT" ] && [ -z "$SSH_TTY" ]; then
add-zsh-hook preexec bgnotify_begin
add-zsh-hook precmd bgnotify_end
fi
# bgnotify_appid is slow in macOS and the terminal ID won't change, so cache it at startup
bgnotify_termid="$(bgnotify_appid)"

View file

@ -1,6 +0,0 @@
## Boot2docker autocomplete plugin
- Adds autocomplete options for all boot2docker commands.
Maintainer : Manfred Touron ([@moul](https://github.com/moul))

View file

@ -1,73 +0,0 @@
#compdef boot2docker
# Boot2docker autocompletion for oh-my-zsh
# Requires: Boot2docker installed
# Author: Manfred Touron (@moul)
local -a _1st_arguments
_1st_arguments=(
"init":"Create a new Boot2Docker VM."
"up":"Start VM from any states."
"start":"Start VM from any states."
"boot":"Start VM from any states."
"ssh":"[ssh-command] Login to VM via SSH."
"save":"Suspend VM and save state to disk."
"suspend":"Suspend VM and save state to disk."
"down":"Gracefully shutdown the VM."
"stop":"Gracefully shutdown the VM."
"halt":"Gracefully shutdown the VM."
"restart":"Gracefully reboot the VM."
"poweroff":"Forcefully power off the VM (may corrupt disk image)."
"reset":"Forcefully power cycle the VM (may corrupt disk image)."
"delete":"Delete Boot2Docker VM and its disk image."
"destroy":"Delete Boot2Docker VM and its disk image."
"config":"Show selected profile file settings."
"cfg":"Show selected profile file settings."
"info":"Display detailed information of VM."
"ip":"Display the IP address of the VM's Host-only network."
"socket":"Display the DOCKER_HOST socket to connect to."
"shellinit":"Display the shell command to set up the Docker client."
"status":"Display current state of VM."
"download":"Download Boot2Docker ISO image."
"upgrade":"Upgrade the Boot2Docker ISO image (restart if running)."
"version":"Display version information."
)
_arguments \
'(--basevmdk)--basevmdk[Path to VMDK to use as base for persistent partition]' \
'(--cpus)'{-c,--cpus}'[number of CPUs for boot2docker.]' \
'(--clobber)--clobber[overwrite Docker client binary on boot2docker upgrade]' \
'(--dhcp)--dhcp[enable VirtualBox host-only network DHCP.]' \
'(--dhcpip)--dhcpip[VirtualBox host-only network DHCP server address.]' \
'(-s --disksize)'{-s,--disksize}'[boot2docker disk image size (in MB).]' \
'(--dockerport)--dockerport[host Docker port (forward to port 2376 in VM). (deprecated - use with care)]' \
'(--driver)--driver[hypervisor driver.]' \
'(--force-upgrade-download)--force-upgrade-download[always download on boot2docker upgrade, never skip.]' \
'(--hostip)--hostip[VirtualBox host-only network IP address.]' \
'(--iso)--iso[path to boot2docker ISO image.]' \
'(--iso-url)--iso-url[/api.github.com/repos/boot2docker/boot2docker/releases": source URL to provision the boot2docker ISO image.]' \
'(--lowerip)--lowerip[VirtualBox host-only network DHCP lower bound.]' \
'(--memory)'{-m,--memory}'[virtual machine memory size (in MB).]' \
'(--netmask)--netmask[VirtualBox host-only network mask.]' \
'(--no-dummy)--no-dummy[Example parameter for the dummy driver.]' \
'(--retries)--retries[number of port knocking retries during 'start']' \
'(--serial)--serial[try serial console to get IP address (experimental)]' \
'(--serialfile)--serialfile[path to the serial socket/pipe.]' \
'(--ssh)--ssh[path to SSH client utility.]' \
'(--ssh-keygen)--ssh-keygen[path to ssh-keygen utility.]' \
'(--sshkey)--sshkey[path to SSH key to use.]' \
'(--sshport)--sshport[host SSH port (forward to port 22 in VM).]' \
'(--upperip)--upperip[VirtualBox host-only network DHCP upper bound.]' \
'(--vbm)--vbm[path to VirtualBox management utility.]' \
'(--vbox-share)--vbox-share[(defaults to "/Users=Users" if no shares are specified; use "disable" to explicitly prevent any shares from being created) List of directories to share during "up|start|boot" via VirtualBox Guest Additions, with optional labels]' \
'(--verbose)'{-v,--verbose}'[display verbose command invocations.]' \
'(--vm)--vm[virtual machine name.]' \
'(--waittime)--waittime[Time in milliseconds to wait between port knocking retries during 'start']' \
'*:: :->subcmds' && return 0
#_arguments '*:: :->command'
if (( CURRENT == 1 )); then
_describe -t commands "boot2docker command" _1st_arguments
return
fi

View file

@ -1,4 +1,6 @@
alias bi="bower install"
alias bisd="bower install --save-dev"
alias bis="bower install --save"
alias bl="bower list"
alias bs="bower search"
@ -7,7 +9,7 @@ _bower_installed_packages () {
}
_bower ()
{
local -a _1st_arguments _no_color _dopts _save_dev _force_lastest _production
local -a _1st_arguments _no_color _dopts _save_dev _force_latest _production
local expl
typeset -A opt_args
@ -20,7 +22,7 @@ _bower ()
_save_dev=('(--save-dev)--save-dev[Save installed packages into the project"s bower.json devDependencies]')
_force_lastest=('(--force-latest)--force-latest[Force latest version on conflict]')
_force_latest=('(--force-latest)--force-latest[Force latest version on conflict]')
_production=('(--production)--production[Do not install project devDependencies]')
@ -52,7 +54,7 @@ _bower ()
_arguments \
$_dopts \
$_save_dev \
$_force_lastest \
$_force_latest \
$_no_color \
$_production
;;
@ -60,7 +62,7 @@ _bower ()
_arguments \
$_dopts \
$_no_color \
$_force_lastest
$_force_latest
_bower_installed_packages
compadd "$@" $(echo $bower_package_list)
;;

View file

@ -1,31 +1,47 @@
# Branch
# Branch plugin
Displays the current Git or Mercurial branch fast.
This plugin displays the current Git or Mercurial branch, fast. If in a Mercurial repository,
also display the current bookmark, if present.
To use it, add `branch` to the plugins array in your zshrc file:
```zsh
plugins=(... branch)
```
## Speed test
### Mercurial
- `hg branch`:
```shell
$ time hg branch
0.11s user 0.14s system 70% cpu 0.355 total
```
```console
$ time hg branch
0.11s user 0.14s system 70% cpu 0.355 total
```
### Branch plugin
- branch plugin:
```shell
$ time zsh /tmp/branch_prompt_info_test.zsh
0.00s user 0.01s system 78% cpu 0.014 total
```
```console
$ time zsh /tmp/branch_prompt_info_test.zsh
0.00s user 0.01s system 78% cpu 0.014 total
```
## Usage
Edit your theme file (eg.: `~/.oh-my-zsh/theme/robbyrussell.zsh-theme`)
adding `$(branch_prompt_info)` in your prompt like this:
Copy your theme to `$ZSH_CUSTOM/themes/` and modify it to add `$(branch_prompt_info)` in your prompt.
This example is for the `robbyrussell` theme:
```diff
- PROMPT='${ret_status}%{$fg_bold[green]%}%p %{$fg[cyan]%}%c %{$fg_bold[blue]%}$(git_prompt_info)%{$fg_bold[blue]%} % %{$reset_color%}'
+ PROMPT='${ret_status}%{$fg_bold[green]%}%p %{$fg[cyan]%}%c %{$fg_bold[blue]%}$(git_prompt_info)$(branch_prompt_info)%{$fg_bold[blue]%} % %{$reset_color%}'
diff --git a/themes/robbyrussell.zsh-theme b/themes/robbyrussell.zsh-theme
index 2fd5f2cd..9d89a464 100644
--- a/themes/robbyrussell.zsh-theme
+++ b/themes/robbyrussell.zsh-theme
@@ -1,5 +1,5 @@
PROMPT="%(?:%{$fg_bold[green]%}➜ :%{$fg_bold[red]%}➜ )"
-PROMPT+=' %{$fg[cyan]%}%c%{$reset_color%} $(git_prompt_info)'
+PROMPT+=' %{$fg[cyan]%}%c%{$reset_color%} $(branch_prompt_info)'
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[blue]%}git:(%{$fg[red]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
```
## Maintainer

View file

@ -3,29 +3,33 @@
# Oct 2, 2015
function branch_prompt_info() {
# Defines path as current directory
local current_dir=$PWD
# While current path is not root path
while [[ $current_dir != '/' ]]
do
# Git repository
if [[ -d "${current_dir}/.git" ]]
then
echo '±' ${"$(<"$current_dir/.git/HEAD")"##*/}
return;
# Start checking in current working directory
local branch="" dir="$PWD"
while [[ "$dir" != '/' ]]; do
# Found .git directory
if [[ -d "${dir}/.git" ]]; then
branch="${"$(<"${dir}/.git/HEAD")"##*/}"
echo '±' "${branch:gs/%/%%}"
return
fi
# Mercurial repository
if [[ -d "${current_dir}/.hg" ]]
then
if [[ -f "$current_dir/.hg/branch" ]]
then
echo '☿' $(<"$current_dir/.hg/branch")
# Found .hg directory
if [[ -d "${dir}/.hg" ]]; then
if [[ -f "${dir}/.hg/branch" ]]; then
branch="$(<"${dir}/.hg/branch")"
else
echo '☿ default'
branch="default"
fi
return;
if [[ -f "${dir}/.hg/bookmarks.current" ]]; then
branch="${branch}/$(<"${dir}/.hg/bookmarks.current")"
fi
echo '☿' "${branch:gs/%/%%}"
return
fi
# Defines path as parent directory and keeps looking for :)
current_dir="${current_dir:h}"
# Check parent directory
dir="${dir:h}"
done
}

View file

@ -8,22 +8,33 @@ To use it, add `brew` to the plugins array of your zshrc file:
plugins=(... brew)
```
## Shellenv
If `brew` is not found in the PATH, this plugin will attempt to find it in common
locations, and execute `brew shellenv` to set the environment appropriately.
This plugin will also export `HOMEBREW_PREFIX="$(brew --prefix)"` if not previously
defined for convenience.
## Aliases
| Alias | Command | Description |
|----------|---------------------------------------|---------------------------------------------------------------------|
| `brewp` | `brew pin` | Pin a specified formula so that it's not upgraded. |
| `brews` | `brew list -1` | List installed formulae or the installed files for a given formula. |
| `brewsp` | `brew list --pinned` | List pinned formulae, or show the version of a given formula. |
| `bubo` | `brew update && brew outdated` | Update Homebrew data, then list outdated formulae and casks. |
| `bubc` | `brew upgrade && brew cleanup` | Upgrade outdated formulae and casks, then run cleanup. |
| `bubu` | `bubo && bubc` | Do the last two operations above. |
| `buf` | `brew upgrade --formula` | Upgrade only formulas (not casks). |
| `bcubo` | `brew update && brew outdated --cask` | Update Homebrew data, then list outdated casks. |
| `bcubc` | `brew upgrade --cask && brew cleanup` | Update outdated casks, then run cleanup. |
| Alias | Command | Description |
| -------- | --------------------------------------- | ------------------------------------------------------------------- |
| `bcubc` | `brew upgrade --cask && brew cleanup` | Update outdated casks, then run cleanup. |
| `bcubo` | `brew update && brew outdated --cask` | Update Homebrew data, then list outdated casks. |
| `brewp` | `brew pin` | Pin a specified formula so that it's not upgraded. |
| `brews` | `brew list -1` | List installed formulae or the installed files for a given formula. |
| `brewsp` | `brew list --pinned` | List pinned formulae, or show the version of a given formula. |
| `bubc` | `brew upgrade && brew cleanup` | Upgrade outdated formulae and casks, then run cleanup. |
| `bugbc` | `brew upgrade --greedy && brew cleanup` | Upgrade outdated formulae and casks (greedy), then run cleanup. |
| `bubo` | `brew update && brew outdated` | Update Homebrew data, then list outdated formulae and casks. |
| `bubu` | `bubo && bubc` | Do the last two operations above. |
| `bfu` | `brew upgrade --formula` | Upgrade only formulas (not casks). |
| `buz` | `brew uninstall --zap` | Remove all files associated with a cask. |
## Completion
This plugin configures paths with Homebrew's completion functions automatically, so you don't need to do it manually. See: https://docs.brew.sh/Shell-Completion#configuring-completions-in-zsh.
With the release of Homebrew 1.0, they decided to bundle the zsh completion as part of the
brew installation, so we no longer ship it with the brew plugin; now it only has brew
aliases. If you find that brew completion no longer works, make sure you have your Homebrew

View file

@ -1,9 +1,56 @@
alias brewp='brew pin'
alias brews='brew list -1'
alias brewsp='brew list --pinned'
alias bubo='brew update && brew outdated'
alias bubc='brew upgrade && brew cleanup'
alias bubu='bubo && bubc'
alias buf='brew upgrade --formula'
if (( ! $+commands[brew] )); then
if [[ -x /opt/homebrew/bin/brew ]]; then
BREW_LOCATION="/opt/homebrew/bin/brew"
elif [[ -x /usr/local/bin/brew ]]; then
BREW_LOCATION="/usr/local/bin/brew"
elif [[ -x /home/linuxbrew/.linuxbrew/bin/brew ]]; then
BREW_LOCATION="/home/linuxbrew/.linuxbrew/bin/brew"
elif [[ -x "$HOME/.linuxbrew/bin/brew" ]]; then
BREW_LOCATION="$HOME/.linuxbrew/bin/brew"
else
return
fi
# Only add Homebrew installation to PATH, MANPATH, and INFOPATH if brew is
# not already on the path, to prevent duplicate entries. This aligns with
# the behavior of the brew installer.sh post-install steps.
eval "$("$BREW_LOCATION" shellenv)"
unset BREW_LOCATION
fi
if [[ -z "$HOMEBREW_PREFIX" ]]; then
# Maintain compatability with potential custom user profiles, where we had
# previously relied on always sourcing shellenv. OMZ plugins should not rely
# on this to be defined due to out of order processing.
export HOMEBREW_PREFIX="$(brew --prefix)"
fi
if [[ -d "$HOMEBREW_PREFIX/share/zsh/site-functions" ]]; then
fpath+=("$HOMEBREW_PREFIX/share/zsh/site-functions")
fi
alias bcubc='brew upgrade --cask && brew cleanup'
alias bcubo='brew update && brew outdated --cask'
alias bcubc='brew upgrade --cask && brew cleanup'
alias brewp='brew pin'
alias brewsp='brew list --pinned'
alias bubc='brew upgrade && brew cleanup'
alias bugbc='brew upgrade --greedy && brew cleanup'
alias bubo='brew update && brew outdated'
alias bubu='bubo && bubc'
alias bubug='bubo && bugbc'
alias bfu='brew upgrade --formula'
alias buz='brew uninstall --zap'
function brews() {
local formulae="$(brew leaves | xargs brew deps --installed --for-each)"
local casks="$(brew list --cask 2>/dev/null)"
local blue="$(tput setaf 4)"
local bold="$(tput bold)"
local off="$(tput sgr0)"
echo "${blue}==>${off} ${bold}Formulae${off}"
echo "${formulae}" | sed "s/^\(.*\):\(.*\)$/\1${blue}\2${off}/"
echo "\n${blue}==>${off} ${bold}Casks${off}\n${casks}"
}

View file

@ -0,0 +1,26 @@
# Bridgetown plugin
This plugin adds some aliases and autocompletion for common [Bridgetown](https://bridgetownrb.com/) commands.
To use it, add `bridgetown` to the plugins array in your zshrc file:
```zsh
plugins=(... bridgetown)
```
## Aliases
| Alias | Command |
|-------|----------------------------|
| br | `bridgetown` |
| bra | `bin/bridgetown apply` |
| brb | `bin/bridgetown build` |
| brc | `bin/bridgetown console` |
| brclean | `bin/bridgetown clean` |
| brd | `bin/bridgetown deploy` |
| brdoc | `bin/bridgetown doctor` |
| brh | `bin/bridgetown help` |
| brn | `bridgetown new` |
| brp | `bridgetown plugins` |
| brpl | `bridgetown plugins list` |
| brs | `bin/bridgetown start` |

View file

@ -0,0 +1,12 @@
alias br='bridgetown'
alias bra='bin/bridgetown apply'
alias brb='bin/bridgetown build'
alias brc='bin/bridgetown console'
alias brclean='bin/bridgetown clean'
alias brd='bin/bridgetown deploy'
alias brdoc='bin/bridgetown doctor'
alias brh='bin/bridgetown help'
alias brn='bridgetown new'
alias brp='bridgetown plugins'
alias brpl='bridgetown plugins list'
alias brs='bin/bridgetown start'

View file

@ -18,6 +18,7 @@ case $state in
"check[Determine whether the requirements for your application are installed]" \
"list[Show all of the gems in the current bundle]" \
"show[Show the source location of a particular gem in the bundle]" \
"info[Show details of a particular gem in the bundle]" \
"outdated[Show all of the outdated gems in the current bundle]" \
"console[Start an IRB session in the context of the current bundle]" \
"open[Open an installed gem in the editor]" \
@ -84,7 +85,7 @@ case $state in
'(--verbose)--verbose[Enable verbose output mode]'
ret=0
;;
(open|show)
(open|show|info)
_gems=( $(bundle show 2> /dev/null | sed -e '/^ \*/!d; s/^ \* \([^ ]*\) .*/\1/') )
if [[ $_gems != "" ]]; then
_values 'gems' $_gems && ret=0

View file

@ -40,7 +40,7 @@ bundle_install() {
else
local cores_num="$(nproc)"
fi
bundle install --jobs="$cores_num" "$@"
BUNDLE_JOBS="$cores_num" bundle install "$@"
}
## Gem wrapper
@ -81,14 +81,12 @@ bundled_commands=(
)
# Remove $UNBUNDLED_COMMANDS from the bundled_commands list
for cmd in $UNBUNDLED_COMMANDS; do
bundled_commands=(${bundled_commands#$cmd});
done
bundled_commands=(${bundled_commands:|UNBUNDLED_COMMANDS})
unset UNBUNDLED_COMMANDS
# Add $BUNDLED_COMMANDS to the bundled_commands list
for cmd in $BUNDLED_COMMANDS; do
bundled_commands+=($cmd);
done
bundled_commands+=($BUNDLED_COMMANDS)
unset BUNDLED_COMMANDS
# Check if in the root or a subdirectory of a bundled project
_within-bundled-project() {
@ -126,5 +124,4 @@ for cmd in $bundled_commands; do
compdef "_$cmd" "bundled_$cmd"="$cmd"
fi
done
unset cmd bundled_commands

View file

@ -1,10 +1,10 @@
# CakePHP 3 basic command completion
_cakephp3_get_command_list () {
bin/cake Completion commands
bin/cake completion commands
}
_cakephp3_get_sub_command_list () {
bin/cake Completion subcommands ${words[2]}
bin/cake completion subcommands ${words[2]}
}
_cakephp3_get_3rd_argument () {
@ -34,5 +34,5 @@ compdef _cakephp3 cake
#Alias
alias c3='bin/cake'
alias c3cache='bin/cake orm_cache clear'
alias c3cache='bin/cake schema_cache clear'
alias c3migrate='bin/cake migrations migrate'

View file

@ -1,11 +0,0 @@
# cargo
This plugin adds completion for the Rust build tool [`Cargo`](https://github.com/rust-lang/cargo).
To use it, add `cargo` to the plugins array in your zshrc file:
```zsh
plugins=(... cargo)
```
Updated on March 3rd, 2019, from [Cargo 0.34.0](https://github.com/rust-lang/cargo/releases/tag/0.34.0).

View file

@ -1,407 +0,0 @@
#compdef cargo
autoload -U regexp-replace
_cargo() {
local curcontext="$curcontext" ret=1
local -a command_scope_spec common parallel features msgfmt triple target registry
local -a state line state_descr # These are set by _arguments
typeset -A opt_args
common=(
'(-q --quiet)*'{-v,--verbose}'[use verbose output]'
'(-q --quiet -v --verbose)'{-q,--quiet}'[no output printed to stdout]'
'-Z+[pass unstable (nightly-only) flags to cargo]: :_cargo_unstable_flags'
'--frozen[require that Cargo.lock and cache are up to date]'
'--locked[require that Cargo.lock is up to date]'
'--color=[specify colorization option]:coloring:(auto always never)'
'(- 1 *)'{-h,--help}'[show help message]'
)
# leading items in parentheses are an exclusion list for the arguments following that arg
# See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions
# - => exclude all other options
# 1 => exclude positional arg 1
# * => exclude all other args
# +blah => exclude +blah
_arguments -s -S -C $common \
'(- 1 *)--list[list installed commands]' \
'(- 1 *)--explain=[provide a detailed explanation of an error message]:error code' \
'(- 1 *)'{-V,--version}'[show version information]' \
'(+beta +nightly)+stable[use the stable toolchain]' \
'(+stable +nightly)+beta[use the beta toolchain]' \
'(+stable +beta)+nightly[use the nightly toolchain]' \
'1: :_cargo_cmds' \
'*:: :->args'
# These flags are mutually exclusive specifiers for the scope of a command; as
# they are used in multiple places without change, they are expanded into the
# appropriate command's `_arguments` where appropriate.
command_scope_spec=(
'(--bin --example --test --lib)--bench=[specify benchmark name]: :_cargo_benchmark_names'
'(--bench --bin --test --lib)--example=[specify example name]:example name'
'(--bench --example --test --lib)--bin=[specify binary name]:binary name'
'(--bench --bin --example --test)--lib=[specify library name]:library name'
'(--bench --bin --example --lib)--test=[specify test name]:test name'
)
parallel=(
'(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]'
)
features=(
'(--all-features)--features=[specify features to activate]:feature'
'(--features)--all-features[activate all available features]'
"--no-default-features[don't build the default features]"
)
msgfmt='--message-format=[specify error format]:error format [human]:(human json short)'
triple='--target=[specify target triple]:target triple'
target='--target-dir=[specify directory for all generated artifacts]:directory:_directories'
manifest='--manifest-path=[specify path to manifest]:path:_directories'
registry='--registry=[specify registry to use]:registry'
case $state in
args)
curcontext="${curcontext%:*}-${words[1]}:"
case ${words[1]} in
bench)
_arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \
"${command_scope_spec[@]}" \
'--all-targets[benchmark all targets]' \
"--no-run[compile but don't run]" \
'(-p --package)'{-p+,--package=}'[specify package to run benchmarks for]:package:_cargo_package_names' \
'--exclude=[exclude packages from the benchmark]:spec' \
'--no-fail-fast[run all benchmarks regardless of failure]' \
'1: :_guard "^-*" "bench name"' \
'*:args:_default'
;;
build|b)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
"${command_scope_spec[@]}" \
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
'--release[build in release mode]' \
'--build-plan[output the build plan in JSON]' \
;;
check|c)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
"${command_scope_spec[@]}" \
'(-p --package)'{-p+,--package=}'[specify package to check]:package:_cargo_package_names' \
'--release[check in release mode]' \
;;
clean)
_arguments -s -S $common $triple $target $manifest \
'(-p --package)'{-p+,--package=}'[specify package to clean]:package:_cargo_package_names' \
'--release[clean release artifacts]' \
'--doc[clean just the documentation directory]'
;;
doc)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
'--no-deps[do not build docs for dependencies]' \
'--document-private-items[include non-public items in the documentation]' \
'--open[open docs in browser after the build]' \
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
'--release[build artifacts in release mode, with optimizations]' \
;;
fetch)
_arguments -s -S $common $triple $manifest
;;
fix)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
"${command_scope_spec[@]}" \
'--broken-code[fix code even if it already has compiler errors]' \
'--edition[fix in preparation for the next edition]' \
'--edition-idioms[fix warnings to migrate to the idioms of an edition]' \
'--allow-no-vcs[fix code even if a VCS was not detected]' \
'--allow-dirty[fix code even if the working directory is dirty]' \
'--allow-staged[fix code even if the working directory has staged changes]'
;;
generate-lockfile)
_arguments -s -S $common $manifest
;;
git-checkout)
_arguments -s -S $common \
'--reference=:reference' \
'--url=:url:_urls'
;;
help)
_cargo_cmds
;;
init)
_arguments -s -S $common $registry \
'--lib[use library template]' \
'--edition=[specify edition to set for the crate generated]:edition:(2015 2018)' \
'--vcs=[initialize a new repo with a given VCS]:vcs:(git hg pijul fossil none)' \
'--name=[set the resulting package name]:name' \
'1:path:_directories'
;;
install)
_arguments -s -S $common $parallel $features $triple $registry \
'(-f --force)'{-f,--force}'[force overwriting of existing crates or binaries]' \
'--bin=[only install the specified binary]:binary' \
'--branch=[branch to use when installing from git]:branch' \
'--debug[build in debug mode instead of release mode]' \
'--example=[install the specified example instead of binaries]:example' \
'--git=[specify URL from which to install the crate]:url:_urls' \
'--path=[local filesystem path to crate to install]: :_directories' \
'--rev=[specific commit to use when installing from git]:commit' \
'--root=[directory to install packages into]: :_directories' \
'--tag=[tag to use when installing from git]:tag' \
'--vers=[version to install from crates.io]:version' \
'--list[list all installed packages and their versions]' \
'*: :_guard "^-*" "crate"'
;;
locate-project)
_arguments -s -S $common $manifest
;;
login)
_arguments -s -S $common $registry \
'*: :_guard "^-*" "token"'
;;
metadata)
_arguments -s -S $common $features $manifest \
"--no-deps[output information only about the root package and don't fetch dependencies]" \
'--format-version=[specify format version]:version [1]:(1)'
;;
new)
_arguments -s -S $common $registry \
'--lib[use library template]' \
'--vcs:initialize a new repo with a given VCS:(git hg none)' \
'--name=[set the resulting package name]'
;;
owner)
_arguments -s -S $common $registry \
'(-a --add)'{-a,--add}'[specify name of a user or team to invite as an owner]:name' \
'--index=[specify registry index]:index' \
'(-l --list)'{-l,--list}'[list owners of a crate]' \
'(-r --remove)'{-r,--remove}'[specify name of a user or team to remove as an owner]:name' \
'--token=[specify API token to use when authenticating]:token' \
'*: :_guard "^-*" "crate"'
;;
package)
_arguments -s -S $common $parallel $features $triple $target $manifest \
'(-l --list)'{-l,--list}'[print files included in a package without making one]' \
'--no-metadata[ignore warnings about a lack of human-usable metadata]' \
'--allow-dirty[allow dirty working directories to be packaged]' \
"--no-verify[don't build to verify contents]"
;;
pkgid)
_arguments -s -S $common $manifest \
'(-p --package)'{-p+,--package=}'[specify package to get ID specifier for]:package:_cargo_package_names' \
'*: :_guard "^-*" "spec"'
;;
publish)
_arguments -s -S $common $parallel $features $triple $target $manifest $registry \
'--index=[specify registry index]:index' \
'--allow-dirty[allow dirty working directories to be packaged]' \
"--no-verify[don't verify the contents by building them]" \
'--token=[specify token to use when uploading]:token' \
'--dry-run[perform all checks without uploading]'
;;
read-manifest)
_arguments -s -S $common $manifest
;;
run|r)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
'--example=[name of the bin target]:name' \
'--bin=[name of the bin target]:name' \
'(-p --package)'{-p+,--package=}'[specify package with the target to run]:package:_cargo_package_names' \
'--release[build in release mode]' \
'*: :_default'
;;
rustc)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
'--profile=[specify profile to build the selected target for]:profile' \
'--release[build artifacts in release mode, with optimizations]' \
"${command_scope_spec[@]}" \
'*: : _dispatch rustc rustc -default-'
;;
rustdoc)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
'--document-private-items[include non-public items in the documentation]' \
'--open[open the docs in a browser after the operation]' \
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
'--release[build artifacts in release mode, with optimizations]' \
"${command_scope_spec[@]}" \
'*: : _dispatch rustdoc rustdoc -default-'
;;
search)
_arguments -s -S $common $registry \
'--index=[specify registry index]:index' \
'--limit=[limit the number of results]:results [10]' \
'*: :_guard "^-*" "query"'
;;
test|t)
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
'--test=[test name]: :_cargo_test_names' \
'--no-fail-fast[run all tests regardless of failure]' \
'--no-run[compile but do not run]' \
'(-p --package)'{-p+,--package=}'[package to run tests for]:package:_cargo_package_names' \
'--all[test all packages in the workspace]' \
'--release[build artifacts in release mode, with optimizations]' \
'1: :_cargo_test_names' \
'(--doc --bin --example --test --bench)--lib[only test library]' \
'(--lib --bin --example --test --bench)--doc[only test documentation]' \
'(--lib --doc --example --test --bench)--bin=[binary name]' \
'(--lib --doc --bin --test --bench)--example=[example name]' \
'(--lib --doc --bin --example --bench)--test=[test name]' \
'(--lib --doc --bin --example --test)--bench=[benchmark name]' \
'*: :_default'
;;
uninstall)
_arguments -s -S $common \
'(-p --package)'{-p+,--package=}'[specify package to uninstall]:package:_cargo_package_names' \
'--bin=[only uninstall the specified binary]:name' \
'--root=[directory to uninstall packages from]: :_files -/' \
'*:crate:_cargo_installed_crates -F line'
;;
update)
_arguments -s -S $common $manifest \
'--aggressive=[force dependency update]' \
"--dry-run[don't actually write the lockfile]" \
'(-p --package)'{-p+,--package=}'[specify package to update]:package:_cargo_package_names' \
'--precise=[update single dependency to precise release]:release'
;;
verify-project)
_arguments -s -S $common $manifest
;;
version)
_arguments -s -S $common
;;
yank)
_arguments -s -S $common $registry \
'--vers=[specify yank version]:version' \
'--undo[undo a yank, putting a version back into the index]' \
'--index=[specify registry index to yank from]:registry index' \
'--token=[specify API token to use when authenticating]:token' \
'*: :_guard "^-*" "crate"'
;;
*)
# allow plugins to define their own functions
if ! _call_function ret _cargo-${words[1]}; then
# fallback on default completion for unknown commands
_default && ret=0
fi
(( ! ret ))
;;
esac
;;
esac
}
_cargo_unstable_flags() {
local flags
flags=( help ${${${(M)${(f)"$(_call_program flags cargo -Z help)"}:#*--*}/ #-- #/:}##*-Z } )
_describe -t flags 'unstable flag' flags
}
_cargo_installed_crates() {
local expl
_description crates expl 'crate'
compadd "$@" "$expl[@]" - ${${${(f)"$(cargo install --list)"}:# *}%% *}
}
_cargo_cmds() {
local -a commands
# This uses Parameter Expansion Flags, which are a built-in Zsh feature.
# See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
# and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion
#
# # How this work?
#
# First it splits the result of `cargo --list` at newline, then it removes the first line.
# Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]).
# Then it replaces those spaces between item and description with a `:`
#
# [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns
commands=( ${${${(M)"${(f)$(_call_program commands cargo --list)}":# *}/ ##/}/ ##/:} )
_describe -t commands 'command' commands
}
#FIXME: Disabled until fixed
#gets package names from the manifest file
_cargo_package_names() {
_message -e packages package
}
# Extracts the values of "name" from the array given in $1 and shows them as
# command line options for completion
_cargo_names_from_array() {
# strip json from the path
local manifest=${${${"$(cargo locate-project)"}%\"\}}##*\"}
if [[ -z $manifest ]]; then
return 0
fi
local last_line
local -a names;
local in_block=false
local block_name=$1
names=()
while read -r line; do
if [[ $last_line == "[[$block_name]]" ]]; then
in_block=true
else
if [[ $last_line =~ '\s*\[\[.*' ]]; then
in_block=false
fi
fi
if [[ $in_block == true ]]; then
if [[ $line =~ '\s*name\s*=' ]]; then
regexp-replace line '^\s*name\s*=\s*|"' ''
names+=( "$line" )
fi
fi
last_line=$line
done < "$manifest"
_describe "$block_name" names
}
#Gets the test names from the manifest file
_cargo_test_names() {
_cargo_names_from_array "test"
}
#Gets the bench names from the manifest file
_cargo_benchmark_names() {
_cargo_names_from_array "bench"
}
_cargo

View file

@ -2,7 +2,7 @@
# catimg script by Eduardo San Martin Morote aka Posva #
# https://posva.net #
# #
# Ouput the content of an image to the stdout using the 256 colors of the #
# Output the content of an image to the stdout using the 256 colors of the #
# terminal. #
# GitHub: https://github.com/posva/catimg #
################################################################################

View file

@ -2,7 +2,7 @@
# catimg script by Eduardo San Martin Morote aka Posva #
# https://posva.net #
# #
# Ouput the content of an image to the stdout using the 256 colors of the #
# Output the content of an image to the stdout using the 256 colors of the #
# terminal. #
# GitHub: https://github.com/posva/catimg #
################################################################################

9
plugins/charm/README.md Normal file
View file

@ -0,0 +1,9 @@
# Charm plugin
This plugin adds completion for the [charm](https://github.com/charmbracelet/charm) CLI.
To use it, add `charm` to the plugins array in your zshrc file:
```zsh
plugins=(... charm)
```

View file

@ -0,0 +1,14 @@
# Autocompletion for the Charm CLI (charm).
if (( ! $+commands[charm] )); then
return
fi
# If the completion file doesn't exist yet, we need to autoload it and
# bind it to `charm`. Otherwise, compinit will have already done that.
if [[ ! -f "$ZSH_CACHE_DIR/completions/_charm" ]]; then
typeset -g -A _comps
autoload -Uz _charm
_comps[charm]=_charm
fi
charm completion zsh >| "$ZSH_CACHE_DIR/completions/_charm" &|

View file

@ -5,6 +5,7 @@ current Ruby version, and completion and a prompt function to display the Ruby v
Supports brew and manual installation of chruby.
To use it, add `chruby` to the plugins array in your zshrc file:
```zsh
plugins=(... chruby)
```
@ -14,7 +15,7 @@ plugins=(... chruby)
If you'd prefer to specify an explicit path to load chruby from
you can set variables like so:
```
```zsh
zstyle :omz:plugins:chruby path /local/path/to/chruby.sh
zstyle :omz:plugins:chruby auto /local/path/to/auto.sh
```

View file

@ -1,121 +1,94 @@
#
# INSTRUCTIONS
#
# With either a manual or brew installed chruby things should just work.
#
# If you'd prefer to specify an explicit path to load chruby from
# you can set variables like so:
#
# zstyle :omz:plugins:chruby path /local/path/to/chruby.sh
# zstyle :omz:plugins:chruby auto /local/path/to/auto.sh
#
# TODO
# - autodetermine correct source path on non OS X systems
# - completion if ruby-install exists
## load chruby from different locations
_source-from-omz-settings() {
local _chruby_path _chruby_auto
zstyle -s :omz:plugins:chruby path _chruby_path || return 1
zstyle -s :omz:plugins:chruby auto _chruby_auto || return 1
if [[ -r ${_chruby_path} ]]; then
source ${_chruby_path}
fi
if [[ -r ${_chruby_auto} ]]; then
source ${_chruby_auto}
fi
}
_source-from-homebrew() {
(( $+commands[brew] )) || return 1
local _brew_prefix
# check default brew prefix
if [[ -h /usr/local/opt/chruby ]];then
_brew_prefix="/usr/local/opt/chruby"
else
# ok , it is not default prefix
# this call to brew is expensive ( about 400 ms ), so at least let's make it only once
_brew_prefix=$(brew --prefix chruby)
fi
[[ -r "$_brew_prefix" ]] || return 1
source $_brew_prefix/share/chruby/chruby.sh
source $_brew_prefix/share/chruby/auto.sh
}
_load-chruby-dirs() {
local dir
for dir in "$HOME/.rubies" "$PREFIX/opt/rubies"; do
if [[ -d "$dir" ]]; then
RUBIES+=("$dir")
fi
done
}
# Load chruby
if _source-from-omz-settings; then
_load-chruby-dirs
elif [[ -r "/usr/local/share/chruby/chruby.sh" ]] ; then
source /usr/local/share/chruby/chruby.sh
source /usr/local/share/chruby/auto.sh
_load-chruby-dirs
elif _source-from-homebrew; then
_load-chruby-dirs
fi
unfunction _source-from-homebrew _source-from-omz-settings _load-chruby-dirs
## chruby utility functions and aliases
# rvm and rbenv plugins also provide this alias
alias rubies='chruby'
_homebrew-installed() {
whence brew &> /dev/null
_xit=$?
if [ $_xit -eq 0 ];then
# ok , we have brew installed
# speculatively we check default brew prefix
if [ -h /usr/local/opt/chruby ];then
_brew_prefix="/usr/local/opt/chruby"
else
# ok , it is not default prefix
# this call to brew is expensive ( about 400 ms ), so at least let's make it only once
_brew_prefix=$(brew --prefix chruby)
fi
return 0
else
return $_xit
fi
}
_chruby-from-homebrew-installed() {
[ -r _brew_prefix ] &> /dev/null
}
_ruby-build_installed() {
whence ruby-build &> /dev/null
}
_ruby-install-installed() {
whence ruby-install &> /dev/null
}
# Simple definition completer for ruby-build
if _ruby-build_installed; then
_ruby-build() { compadd $(ruby-build --definitions) }
compdef _ruby-build ruby-build
fi
_source_from_omz_settings() {
local _chruby_path
local _chruby_auto
zstyle -s :omz:plugins:chruby path _chruby_path
zstyle -s :omz:plugins:chruby auto _chruby_auto
if [[ -r ${_chruby_path} ]]; then
source ${_chruby_path}
fi
if [[ -r ${_chruby_auto} ]]; then
source ${_chruby_auto}
fi
}
_chruby_dirs() {
chrubydirs=($HOME/.rubies/ $PREFIX/opt/rubies)
for dir in chrubydirs; do
if [[ -d $dir ]]; then
RUBIES+=$dir
fi
done
}
if _homebrew-installed && _chruby-from-homebrew-installed ; then
source $_brew_prefix/share/chruby/chruby.sh
source $_brew_prefix/share/chruby/auto.sh
_chruby_dirs
elif [[ -r "/usr/local/share/chruby/chruby.sh" ]] ; then
source /usr/local/share/chruby/chruby.sh
source /usr/local/share/chruby/auto.sh
_chruby_dirs
else
_source_from_omz_settings
_chruby_dirs
fi
function ensure_chruby() {
$(whence chruby)
}
function current_ruby() {
local _ruby
_ruby="$(chruby |grep \* |tr -d '* ')"
if [[ $(chruby |grep -c \*) -eq 1 ]]; then
echo ${_ruby}
else
echo "system"
fi
local ruby
ruby="$(chruby | grep \* | tr -d '* ')"
if [[ $(chruby | grep -c \*) -eq 1 ]]; then
echo ${ruby}
else
echo "system"
fi
}
function chruby_prompt_info() {
echo "$(current_ruby)"
echo "${$(current_ruby):gs/%/%%}"
}
# complete on installed rubies
# Complete chruby command with installed rubies
_chruby() {
compadd $(chruby | tr -d '* ')
local default_path='/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin'
if PATH=${default_path} type ruby &> /dev/null; then
compadd system
fi
compadd $(chruby | tr -d '* ')
if PATH="/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin" command ruby &>/dev/null; then
compadd system
fi
}
compdef _chruby chruby
# Simple definition completer for ruby-build
if command ruby-build &> /dev/null; then
_ruby-build() { compadd $(ruby-build --definitions) }
compdef _ruby-build ruby-build
fi

View file

@ -1,8 +1,6 @@
# chucknorris
Chuck Norris fortunes plugin for oh-my-zsh. Perfectly suitable as MOTD.
**Maintainers**: [apjanke](https://github.com/apjanke) [maff](https://github.com/maff)
Chuck Norris fortunes plugin for Oh My Zsh. Perfectly suitable as MOTD.
To use it add `chucknorris` to the plugins array in you zshrc file.

View file

@ -1,28 +1,24 @@
# chucknorris: Chuck Norris fortunes
# Automatically generate or update Chuck's compiled fortune data file
# $0 must be used outside a local function. This variable name is unlikly to collide.
CHUCKNORRIS_PLUGIN_DIR=${0:h}
() {
local DIR=$CHUCKNORRIS_PLUGIN_DIR/fortunes
if [[ ! -f $DIR/chucknorris.dat ]] || [[ $DIR/chucknorris.dat -ot $DIR/chucknorris ]]; then
# For some reason, Cygwin puts strfile in /usr/sbin, which is not on the path by default
local strfile=strfile
if ! which strfile &>/dev/null && [[ -f /usr/sbin/strfile ]]; then
strfile=/usr/sbin/strfile
# %x: name of file containing code being executed
local fortunes_dir="${${(%):-%x}:h}/fortunes"
# Aliases
alias chuck="fortune -a $fortunes_dir"
alias chuck_cow="chuck | cowthink"
# Automatically generate or update Chuck's compiled fortune data file
if [[ "$fortunes_dir/chucknorris" -ot "$fortunes_dir/chucknorris.dat" ]]; then
return
fi
if which $strfile &> /dev/null; then
$strfile $DIR/chucknorris $DIR/chucknorris.dat >/dev/null
else
# For some reason, Cygwin puts strfile in /usr/sbin, which is not on the path by default
local strfile="${commands[strfile]:-/usr/sbin/strfile}"
if [[ ! -x "$strfile" ]]; then
echo "[oh-my-zsh] chucknorris depends on strfile, which is not installed" >&2
echo "[oh-my-zsh] strfile is often provided as part of the 'fortune' package" >&2
return
fi
fi
# Aliases
alias chuck="fortune -a $DIR"
alias chuck_cow="chuck | cowthink"
# Generate the compiled fortune data file
$strfile "$fortunes_dir/chucknorris" "$fortunes_dir/chucknorris.dat" >/dev/null
}
unset CHUCKNORRIS_PLUGIN_DIR

View file

@ -228,7 +228,7 @@ Chuck Norris once punched the ground to stop an earthquake. The resulting afters
%
Chuck Norris once round-house kicked a salesman. Over the phone.
%
Chuck Norris once rounhouse kicked a football. The astronomical society now considers it a planet.
Chuck Norris once roundhouse kicked a football. The astronomical society now considers it a planet.
%
Chuck Norris once thought he was wrong. He was, however, mistaken.
%
@ -342,7 +342,7 @@ Every time there's an earthquake, you know Chuck Norris is hungry. The earthquak
%
Evolution's driving mechanism is nature's desperate attempt to escape Chuck Norris.
%
Fear of spiders is arachnaphobia. Fear of tight spaces is claustrophobia. Fear of Chuck Norris is called Logic.
Fear of spiders is arachnophobia. Fear of tight spaces is claustrophobia. Fear of Chuck Norris is called Logic.
%
Fool me once, shame on you. Fool Chuck Norris once and he will roundhouse you in the face.
%
@ -426,7 +426,7 @@ Some people ask for a Kleenex when they sneeze, Chuck Norris asks for a body bag
%
Someone once videotaped Chuck Norris getting pissed off. It was called Walker: Texas Chain Saw Massacre.
%
Staring at Chuck Norris for extended periods of time without proper eye protection will cause blindess, and possibly foot sized brusies on the face.
Staring at Chuck Norris for extended periods of time without proper eye protection will cause blindness, and possibly foot sized bruises on the face.
%
Taking Karate Lessons = $100, Buying MMA DVD's = $150, Subscribing to a UFC event = $50, Getting a Roundhouse Kick from Chuck Norris = PRICELESS.
%
@ -452,7 +452,7 @@ The best part of waking up is not Folgers in your cup. it's knowing that Chuck N
%
The chief export of Chuck Norris is pain.
%
The dictionary references Chuck Norris several times, he is metioned under Fear, Law, Order and Chucktatorship.
The dictionary references Chuck Norris several times, he is mentioned under Fear, Law, Order and Chucktatorship.
%
The leading causes of death in the United States are: 1. Heart Disease 2. Chuck Norris 3. Cancer.
%
@ -468,7 +468,7 @@ The only way sharks will come near CN underwater is when CN is inside of a cage.
%
The only word that rhymes with orange is Chuck Norris.
%
The producers of the movie "The Last Airbender" are now in talks with Chuck Norris in Order to star him in their next sequal "The Last Skull Bender".
The producers of the movie "The Last Airbender" are now in talks with Chuck Norris in Order to star him in their next sequel "The Last Skull Bender".
%
The quickest way to a man's heart is with Chuck Norris' fist.
%
@ -558,3 +558,11 @@ You know Chuck Norris' pet lizard, right? Last I heard, he was in the movie "God
%
http://chucknorrisfacts.com/ is built in Drupal because Chuck Norris knows a good CMS when he sees one.
%
Chuck Norris made the first Giraffe by uppercutting a horse.
%
Chuck Norris can hear sign language.
%
Chuck Norris make onions cry.
%
Chuck Norris doesn't shake hands, he makes them tremble.
%

View file

@ -1,26 +0,0 @@
# CloudApp plugin
## The CloudApp API is deprecated, so the plugin will be removed shortly
[CloudApp](https://www.getcloudapp.com) brings screen recording, screenshots, and GIF creation to the cloud, in an easy-to-use enterprise-level app. The CloudApp plugin allows you to upload a file to your CloadApp account from the command line.
To use it, add `cloudapp` to the plugins array of your `~/.zshrc` file:
```zsh
plugins=(... cloudapp)
```
## Requirements
1. [Aaron Russell's `cloudapp_api` gem](https://github.com/aaronrussell/cloudapp_api#installation)
2. That you set your CloudApp credentials in `~/.cloudapp` as a simple text file like below:
```
email
password
```
## Usage
- `cloudapp <filename>`: uploads `<filename>` to your CloudApp account, and if you're using
macOS, copies the URL to your clipboard.

View file

@ -1,4 +0,0 @@
print -Pn "%F{yellow}"
print "[oh-my-zsh] The CloudApp API no longer works, so the cloudapp plugin will"
print "[oh-my-zsh] be removed shortly. Please remove it from your plugins list."
print -Pn "%f"

View file

@ -1,6 +1,6 @@
#compdef coffee
# ------------------------------------------------------------------------------
# Copyright (c) 2011 Github zsh-users - https://github.com/zsh-users
# Copyright (c) 2011 GitHub zsh-users - https://github.com/zsh-users
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@ -39,14 +39,14 @@
#
# ------------------------------------------------------------------------------
local curcontext="$curcontext" state line ret=1 version opts first second third
local curcontext="$curcontext" state line ret=1 version
local -a opts
typeset -A opt_args
version=(${(f)"$(_call_program version $words[1] --version)"})
version=(${(f)"$(_call_program version $words[1] --version)"}) || return ret
version=${${(z)${version[1]}}[3]}
first=$(echo $version|cut -d '.' -f 1)
second=$(echo $version|cut -d '.' -f 2)
third=$(echo $version|cut -d '.' -f 3)
if (( $first < 2 )) && (( $second < 7 )) && (( $third < 3 ));then
autoload -Uz is-at-least
if ! is-at-least 1.6.3 "$version"; then
opts+=('(-l --lint)'{-l,--lint}'[pipe the compiled JavaScript through JavaScript Lint]'
'(-r --require)'{-r,--require}'[require a library before executing your script]:library')
fi

1
plugins/colemak/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
.less

View file

@ -19,4 +19,20 @@ bindkey -a 'N' vi-join
bindkey -a 'j' vi-forward-word-end
bindkey -a 'J' vi-forward-blank-word-end
lesskey $ZSH/plugins/colemak/colemak-less
# Handle $0 according to the standard:
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
0="${${(M)0:#/*}:-$PWD/$0}"
# New less versions will read this file directly
export LESSKEYIN="${0:h:A}/colemak-less"
# Only run lesskey if less version is older than v582
less_ver=$(less --version | awk '{print $2;exit}')
autoload -Uz is-at-least
if ! is-at-least 582 $less_ver; then
# Old less versions will read this transformed file
export LESSKEY="${0:h:A}/.less"
lesskey -o "$LESSKEY" "$LESSKEYIN" 2>/dev/null
fi
unset less_ver

View file

@ -16,8 +16,13 @@ less_termcap[se]="${reset_color}"
less_termcap[us]="${fg_bold[green]}"
less_termcap[ue]="${reset_color}"
# Handle $0 according to the standard:
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
0="${${(M)0:#/*}:-$PWD/$0}"
# Absolute path to this file's directory.
typeset __colored_man_pages_dir="${0:A:h}"
typeset -g __colored_man_pages_dir="${0:A:h}"
function colored() {
local -a environment

View file

@ -23,7 +23,7 @@ colorize_check_requirements() {
if [[ ${available_tools[(Ie)$ZSH_COLORIZE_TOOL]} -eq 0 ]]; then
echo "ZSH_COLORIZE_TOOL '$ZSH_COLORIZE_TOOL' not recognized. Available options are 'pygmentize' and 'chroma'." >&2
return 1
elif (( $+commands["$ZSH_COLORIZE_TOOL"] )); then
elif ! (( $+commands[$ZSH_COLORIZE_TOOL] )); then
echo "Package '$ZSH_COLORIZE_TOOL' is not installed!" >&2
return 1
fi
@ -82,7 +82,7 @@ colorize_less() {
# This variable tells less to pipe every file through the specified command
# (see the man page of less INPUT PREPROCESSOR).
# 'zsh -ic "colorize_cat %s 2> /dev/null"' would not work for huge files like
# the ~/.zsh_history. For such files the tty of the preprocessor will be supended.
# the ~/.zsh_history. For such files the tty of the preprocessor will be suspended.
# Therefore we must source this file to make colorize_cat available in the
# preprocessor without the interactive mode.
# `2>/dev/null` will suppress the error for large files 'broken pipe' of the python

View file

@ -29,5 +29,6 @@ It works out of the box with the command-not-found packages for:
- [Fedora](https://fedoraproject.org/wiki/Features/PackageKitCommandNotFound)
- [NixOS](https://github.com/NixOS/nixpkgs/tree/master/nixos/modules/programs/command-not-found)
- [Termux](https://github.com/termux/command-not-found)
- [SUSE](https://www.unix.com/man-page/suse/1/command-not-found/)
You can add support for other platforms by submitting a Pull Request.

View file

@ -50,13 +50,20 @@ fi
# NixOS: https://github.com/NixOS/nixpkgs/tree/master/nixos/modules/programs/command-not-found
if [[ -x /run/current-system/sw/bin/command-not-found ]]; then
command_not_found_handler() {
/run/current-system/sw/bin/command-not-found -- "$@"
/run/current-system/sw/bin/command-not-found "$@"
}
fi
# Termux: https://github.com/termux/command-not-found
if [[ -x /data/data/com.termux/files/usr/libexec/termux/command-not-found ]]; then
command_not_found_handler() {
/data/data/com.termux/files/usr/libexec/termux/command-not-found -- "$1"
/data/data/com.termux/files/usr/libexec/termux/command-not-found "$1"
}
fi
# SUSE and derivates: https://www.unix.com/man-page/suse/1/command-not-found/
if [[ -x /usr/bin/command-not-found ]]; then
command_not_found_handler() {
/usr/bin/command-not-found "$1"
}
fi

View file

@ -12,51 +12,55 @@ plugins=(... common-aliases)
### ls command
| Alias | Command | Description |
|-------|---------------|--------------------------------------------------------------------------------|
| l | `ls -lFh` | List files as a long list, show size, type, human-readable |
| la | `ls -lAFh` | List almost all files as a long list show size, type, human-readable |
| lr | `ls -tRFh` | List files recursively sorted by date, show type, human-readable |
| lt | `ls -ltFh` | List files as a long list sorted by date, show type, human-readable |
| ll | `ls -l` | List files as a long list |
| ldot | `ls -ld .*` | List dot files as a long list |
| lS | `ls -1FSsh` | List files showing only size and name sorted by size |
| lart | `ls -1Fcart` | List all files sorted in reverse of create/modification time (oldest first) |
| lrt | `ls -1Fcrt` | List files sorted in reverse of create/modification time(oldest first) |
| Alias | Command | Description |
| ----- | ------------ | --------------------------------------------------------------------------- |
| l | `ls -lFh` | List files as a long list, show size, type, human-readable |
| la | `ls -lAFh` | List almost all files as a long list show size, type, human-readable |
| lr | `ls -tRFh` | List files recursively sorted by date, show type, human-readable |
| lt | `ls -ltFh` | List files as a long list sorted by date, show type, human-readable |
| ll | `ls -l` | List files as a long list |
| ldot | `ls -ld .*` | List dot files as a long list |
| lS | `ls -1FSsh` | List files showing only size and name sorted by size |
| lart | `ls -1Fcart` | List all files sorted in reverse of create/modification time (oldest first) |
| lrt | `ls -1Fcrt` | List files sorted in reverse of create/modification time(oldest first) |
| lsr | `ls -lARFh` | List all files and directories recursively |
| lsn | `ls -1` | List files and directories in a single column |
### File handling
| Alias | Command | Description |
|-------|-----------------------|------------------------------------------------------------------------------------|
| rm | `rm -i` | Remove a file |
| cp | `cp -i` | Copy a file |
| mv | `mv -i` | Move a file |
| zshrc | `${=EDITOR} ~/.zshrc` | Quickly access the ~/.zshrc file |
| dud | `du -d 1 -h` | Display the size of files at depth 1 in current location in human-readable form |
| duf | `du -sh` | Display the size of files in current location in human-readable form |
| t | `tail -f` | Shorthand for tail which outputs the last part of a file |
| Alias | Command | Description |
| ----- | --------------------- | ------------------------------------------------------------------------------- |
| rm | `rm -i` | Remove a file |
| cp | `cp -i` | Copy a file |
| mv | `mv -i` | Move a file |
| zshrc | `${=EDITOR} ~/.zshrc` | Quickly access the ~/.zshrc file |
| dud | `du -d 1 -h` | Display the size of files at depth 1 in current location in human-readable form |
| duf\* | `du -sh` | Display the size of files in current location in human-readable form |
| t | `tail -f` | Shorthand for tail which outputs the last part of a file |
\* Only if the [`duf`](https://github.com/muesli/duf) command isn't installed.
### find and grep
| Alias | Command | Description |
|-------|-----------------------------------------------------|-----------------------------------------|
| fd\* | `find . -type d -name` | Find a directory with the given name |
| ff | `find . -type f -name` | Find a file with the given name |
| grep | `grep --color` | Searches for a query string |
| sgrep | `grep -R -n -H -C 5 --exclude-dir={.git,.svn,CVS}` | Useful for searching within files |
| Alias | Command | Description |
| ----- | -------------------------------------------------- | ------------------------------------ |
| fd\* | `find . -type d -name` | Find a directory with the given name |
| ff | `find . -type f -name` | Find a file with the given name |
| grep | `grep --color` | Searches for a query string |
| sgrep | `grep -R -n -H -C 5 --exclude-dir={.git,.svn,CVS}` | Useful for searching within files |
\* Only if the [`fd`](https://github.com/sharkdp/fd) command isn't installed.
### Other Aliases
| Alias | Command | Description |
|-----------|---------------------|-------------------------------------------------------------|
| h | `history` | Lists all recently used commands |
| hgrep | `fc -El 0 \| grep` | Searches for a word in the list of previously used commands |
| help | `man` | Opens up the man page for a command |
| p | `ps -f` | Displays currently executing processes |
| sortnr | `sort -n -r` | Used to sort the lines of a text file |
| unexport | `unset` | Used to unset an environment variable |
| Alias | Command | Description |
| -------- | ------------------ | ----------------------------------------------------------- |
| h | `history` | Lists all recently used commands |
| hgrep | `fc -El 0 \| grep` | Searches for a word in the list of previously used commands |
| help | `man` | Opens up the man page for a command |
| p | `ps -f` | Displays currently executing processes |
| sortnr | `sort -n -r` | Used to sort the lines of a text file |
| unexport | `unset` | Used to unset an environment variable |
## Global aliases
@ -64,12 +68,15 @@ These aliases are expanded in any position in the command line, meaning you can
end of the command you've typed. Examples:
Quickly pipe to less:
```zsh
$ ls -l /var/log L
# will run
$ ls -l /var/log | less
```
Silences stderr output:
```zsh
$ find . -type f NE
# will run
@ -77,7 +84,7 @@ $ find . -type f 2>/dev/null
```
| Alias | Command | Description |
|-------|-----------------------------|-------------------------------------------------------------|
| ----- | --------------------------- | ----------------------------------------------------------- |
| H | `\| head` | Pipes output to head which outputs the first part of a file |
| T | `\| tail` | Pipes output to tail which outputs the last part of a file |
| G | `\| grep` | Pipes output to grep to search for some word |
@ -97,23 +104,23 @@ that file will be open with `acroread`.
### Reading Docs
| Alias | Command | Description |
|-------|-------------|-------------------------------------|
| pdf | `acroread` | Opens up a document using acroread |
| ps | `gv` | Opens up a .ps file using gv |
| dvi | `xdvi` | Opens up a .dvi file using xdvi |
| chm | `xchm` | Opens up a .chm file using xchm |
| djvu | `djview` | Opens up a .djvu file using djview |
| Alias | Command | Description |
| ----- | ---------- | ---------------------------------- |
| pdf | `acroread` | Opens up a document using acroread |
| ps | `gv` | Opens up a .ps file using gv |
| dvi | `xdvi` | Opens up a .dvi file using xdvi |
| chm | `xchm` | Opens up a .chm file using xchm |
| djvu | `djview` | Opens up a .djvu file using djview |
### Listing files inside a packed file
| Alias | Command | Description |
|---------|-------------|-------------------------------------|
| zip | `unzip -l` | Lists files inside a .zip file |
| rar | `unrar l` | Lists files inside a .rar file |
| tar | `tar tf` | Lists files inside a .tar file |
| tar.gz | `echo` | Lists files inside a .tar.gz file |
| ace | `unace l` | Lists files inside a .ace file |
| Alias | Command | Description |
| ------ | ---------- | --------------------------------- |
| zip | `unzip -l` | Lists files inside a .zip file |
| rar | `unrar l` | Lists files inside a .rar file |
| tar | `tar tf` | Lists files inside a .tar file |
| tar.gz | `echo` | Lists files inside a .tar.gz file |
| ace | `unace l` | Lists files inside a .ace file |
### Some other features

View file

@ -12,6 +12,8 @@ alias ldot='ls -ld .*'
alias lS='ls -1FSsh'
alias lart='ls -1Fcart'
alias lrt='ls -1Fcrt'
alias lsr='ls -lARFh' #Recursive list of files and directories
alias lsn='ls -1' #A column contains name of files and directories
alias zshrc='${=EDITOR} ${ZDOTDIR:-$HOME}/.zshrc' # Quick access to the .zshrc file
@ -33,7 +35,7 @@ alias -g NUL="> /dev/null 2>&1"
alias -g P="2>&1| pygmentize -l pytb"
alias dud='du -d 1 -h'
alias duf='du -sh *'
(( $+commands[duf] )) || alias duf='du -sh *'
(( $+commands[fd] )) || alias fd='find . -type d -name'
alias ff='find . -type f -name'

View file

@ -10,22 +10,26 @@ To use it add `composer` to the plugins array in your zshrc file.
plugins=(... composer)
```
Original author: Daniel Gomes <me@danielcsgomes.com>
## Aliases
| Alias | Command | Description |
| ------ | ------------------------------------------- | --------------------------------------------------------------------------------------- |
| `c` | `composer` | Starts composer |
| `csu` | `composer self-update` | Updates composer to the latest version |
| `cu` | `composer update` | Updates composer dependencies and `composer.lock` file |
| `cr` | `composer require` | Adds new packages to `composer.json` |
| `crm` | `composer remove` | Removes packages from `composer.json` |
| `ci` | `composer install` | Resolves and installs dependencies from `composer.json` |
| `ccp` | `composer create-project` | Create new project from an existing package |
| `cdu` | `composer dump-autoload` | Updates the autoloader |
| `cdo` | `composer dump-autoload -o` | Converts PSR-0/4 autoloading to classmap for a faster autoloader (good for production) |
| `cgu` | `composer global update` | Allows update command to run on COMPOSER_HOME directory |
| `cgr` | `composer global require` | Allows require command to run on COMPOSER_HOME directory |
| `cgrm` | `composer global remove` | Allows remove command to run on COMPOSER_HOME directory |
| `cget` | `curl -s https://getcomposer.org/installer` | Installs composer in the current directory |
| `co` | `composer outdated` | Shows a list of installed packages with available updates |
| `cod` | `composer outdated --direct` | Shows a list of installed packages with available updates which are direct dependencies |
| Alias | Command | Description |
| ------ | ---------------------------------- | --------------------------------------------------------------------------------------- |
| `c` | `composer` | Starts composer |
| `ccp` | `composer create-project` | Create new project from an existing package |
| `cdo` | `composer dump-autoload -o` | Converts PSR-0/4 autoloading to classmap for a faster autoloader (good for production) |
| `cdu` | `composer dump-autoload` | Updates the autoloader |
| `cget` | `curl -s <installer> \| php` | Installs composer in the current directory |
| `cgr` | `composer global require` | Allows require command to run on COMPOSER_HOME directory |
| `cgrm` | `composer global remove` | Allows remove command to run on COMPOSER_HOME directory |
| `cgu` | `composer global update` | Allows update command to run on COMPOSER_HOME directory |
| `ci` | `composer install` | Resolves and installs dependencies from `composer.json` |
| `co` | `composer outdated` | Shows a list of installed packages with available updates |
| `cod` | `composer outdated --direct` | Shows a list of installed packages with available updates which are direct dependencies |
| `cr` | `composer require` | Adds new packages to `composer.json` |
| `crm` | `composer remove` | Removes packages from `composer.json` |
| `cs` | `composer show` | Lists available packages, with optional filtering |
| `csu` | `composer self-update` | Updates composer to the latest version |
| `cu` | `composer update` | Updates composer dependencies and `composer.lock` file |
| `cuh` | `composer update -d <config-home>` | Updates globally installed packages |

View file

@ -1,70 +1,76 @@
# ------------------------------------------------------------------------------
# FILE: composer.plugin.zsh
# DESCRIPTION: oh-my-zsh composer plugin file.
# AUTHOR: Daniel Gomes (me@danielcsgomes.com)
# VERSION: 1.0.0
# ------------------------------------------------------------------------------
# Composer basic command completion
_composer_get_command_list () {
$_comp_command1 --no-ansi 2>/dev/null | sed "1,/Available commands/d" | awk '/^[ \t]*[a-z]+/ { print $1 }'
}
_composer_get_required_list () {
$_comp_command1 show -s --no-ansi 2>/dev/null | sed '1,/requires/d' | awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }'
}
_composer () {
## Basic Composer command completion
# Since Zsh 5.7, an improved composer command completion is provided
if ! is-at-least 5.7; then
_composer () {
local curcontext="$curcontext" state line
typeset -A opt_args
_arguments \
'*:: :->subcmds'
_arguments '*:: :->subcmds'
if (( CURRENT == 1 )) || ( ((CURRENT == 2)) && [ "$words[1]" = "global" ] ) ; then
compadd $(_composer_get_command_list)
if (( CURRENT == 1 )) || ( (( CURRENT == 2 )) && [[ "$words[1]" = "global" ]] ); then
# Command list
local -a subcmds
subcmds=("${(@f)"$($_comp_command1 --no-ansi 2>/dev/null | awk '
/Available commands/{ r=1 }
r == 1 && /^[ \t]*[a-z]+/{
gsub(/^[ \t]+/, "")
gsub(/ +/, ":")
print $0
}
')"}")
_describe -t commands 'composer command' subcmds
else
compadd $(_composer_get_required_list)
# Required list
compadd $($_comp_command1 show -s --no-ansi 2>/dev/null \
| sed '1,/requires/d' \
| awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }')
fi
}
}
compdef _composer composer
compdef _composer composer.phar
compdef _composer composer
compdef _composer composer.phar
fi
# Aliases
## Aliases
alias c='composer'
alias csu='composer self-update'
alias cu='composer update'
alias cr='composer require'
alias crm='composer remove'
alias ci='composer install'
alias ccp='composer create-project'
alias cdu='composer dump-autoload'
alias cdo='composer dump-autoload -o'
alias cgu='composer global update'
alias cdu='composer dump-autoload'
alias cget='curl -s https://getcomposer.org/installer | php'
alias cgr='composer global require'
alias cgrm='composer global remove'
alias cgu='composer global update'
alias ci='composer install'
alias co='composer outdated'
alias cod='composer outdated --direct'
alias cr='composer require'
alias crm='composer remove'
alias cs='composer show'
alias csu='composer self-update'
alias cu='composer update'
alias cuh='composer update --working-dir=$(composer config -g home)'
# install composer in the current directory
alias cget='curl -s https://getcomposer.org/installer | php'
# Add Composer's global binaries to PATH, using Composer if available.
if (( $+commands[composer] )); then
autoload -Uz _store_cache _retrieve_cache _cache_invalid
## If Composer not found, try to add known directories to $PATH
if (( ! $+commands[composer] )); then
[[ -d "$HOME/.composer/vendor/bin" ]] && export PATH="$PATH:$HOME/.composer/vendor/bin"
[[ -d "$HOME/.config/composer/vendor/bin" ]] && export PATH="$PATH:$HOME/.config/composer/vendor/bin"
_retrieve_cache composer
if [[ -z $__composer_bin_dir ]]; then
__composer_bin_dir=$(composer global config bin-dir --absolute 2>/dev/null)
_store_cache composer __composer_bin_dir
fi
# Add Composer's global binaries to PATH
export PATH="$PATH:$__composer_bin_dir"
unset __composer_bin_dir
else
[ -d $HOME/.composer/vendor/bin ] && export PATH=$PATH:$HOME/.composer/vendor/bin
[ -d $HOME/.config/composer/vendor/bin ] && export PATH=$PATH:$HOME/.config/composer/vendor/bin
# If still not found, don't do the rest of the script
(( $+commands[composer] )) || return 0
fi
## Add Composer's global binaries to PATH
autoload -Uz _store_cache _retrieve_cache _cache_invalid
_retrieve_cache composer
if [[ -z $__composer_bin_dir ]]; then
__composer_bin_dir=$(composer global config bin-dir --absolute 2>/dev/null)
_store_cache composer __composer_bin_dir
fi
# Add Composer's global binaries to PATH
export PATH="$PATH:$__composer_bin_dir"
unset __composer_bin_dir

View file

@ -1,7 +1,7 @@
# `copybuffer` plugin
This plugin binds the ctrl-o keyboard shortcut to a command that copies the text
that is currently typed in the command line ($BUFFER) to the system clipboard.
This plugin adds the <kbd>ctrl-o</kbd> keyboard shortcut to copy the current text
in the command line to the system clipboard.
This is useful if you type a command - and before you hit enter to execute it - want
to copy it maybe so you can paste it into a script, gist or whatnot.

View file

@ -5,10 +5,12 @@ copybuffer () {
if which clipcopy &>/dev/null; then
printf "%s" "$BUFFER" | clipcopy
else
echo "clipcopy function not found. Please make sure you have Oh My Zsh installed correctly."
zle -M "clipcopy not found. Please make sure you have Oh My Zsh installed correctly."
fi
}
zle -N copybuffer
bindkey "^O" copybuffer
bindkey -M emacs "^O" copybuffer
bindkey -M viins "^O" copybuffer
bindkey -M vicmd "^O" copybuffer

View file

@ -1,10 +0,0 @@
# copydir plugin
Copies the path of your current folder to the system clipboard.
To use, add `copydir` to your plugins array:
```
plugins=(... copydir)
```
Then use the command `copydir` to copy the $PWD.

View file

@ -1,5 +0,0 @@
# Copies the pathname of the current directory to the system or X Windows clipboard
function copydir {
emulate -L zsh
print -n $PWD | clipcopy
}

View file

@ -0,0 +1,15 @@
# copypath plugin
Copies the path of given directory or file to the system clipboard.
To use it, add `copypath` to the plugins array in your zshrc file:
```zsh
plugins=(... copypath)
```
## Usage
- `copypath`: copies the absolute path of the current directory.
- `copypath <file_or_directory>`: copies the absolute path of the given file.

View file

@ -0,0 +1,15 @@
# Copies the path of given directory or file to the system or X Windows clipboard.
# Copy current directory if no parameter.
function copypath {
# If no argument passed, use current directory
local file="${1:-.}"
# If argument is not an absolute path, prepend $PWD
[[ $file = /* ]] || file="$PWD/$file"
# Copy the absolute path without resolving symlinks
# If clipcopy fails, exit the function with an error
print -n "${file:a}" | clipcopy || return 1
echo ${(%):-"%B${file:a}%b copied to clipboard."}
}

View file

@ -25,7 +25,7 @@ The enabled options for rsync are:
* `-hhh`: outputs numbers in human-readable format, in units of 1024 (K, M, G, T).
* `--backup-dir=/tmp/rsync`: move backup copies to "/tmp/rsync".
* `--backup-dir="/tmp/rsync-$USERNAME"`: move backup copies to "/tmp/rsync-$USERNAME".
* `-e /dev/null`: only work on local files (disable remote shells).

View file

@ -1,4 +1,4 @@
cpv() {
rsync -pogbr -hhh --backup-dir=/tmp/rsync -e /dev/null --progress "$@"
rsync -pogbr -hhh --backup-dir="/tmp/rsync-${USERNAME}" -e /dev/null --progress "$@"
}
compdef _files cpv

View file

@ -36,7 +36,7 @@ arguments=(
'--reinstall[Reinstall the distribution even if you already have the latest version installed]'
'--interactive[Turn on interactive configure]'
'--scandeps[Scan the depencencies of given modules and output the tree in a text format]'
'--scandeps[Scan the dependencies of given modules and output the tree in a text format]'
'--format[Specify what format to display the scanned dependency tree]:scandeps format:(tree json yaml dists)'
'--save-dists[Specify the optional directory path to copy downloaded tarballs]'

View file

@ -1,80 +1,84 @@
# Usage: dash [keyword:]query
dash() { open dash://"$*" }
dash() { open -a Dash.app dash://"$*" }
compdef _dash dash
_dash() {
# No sense doing this for anything except the 2nd position and if we haven't
# specified which docset to query against
if [[ $CURRENT -eq 2 && ! "$words[2]" =~ ":" ]]; then
local -a _all_docsets
_all_docsets=()
# Use defaults to get the array of docsets from preferences
# Have to smash it into one big line so that each docset is an element of
# our DOCSETS array
DOCSETS=("${(@f)$(defaults read com.kapeli.dashdoc docsets | tr -d '\n' | grep -oE '\{.*?\}')}")
if [[ $CURRENT -ne 2 || "$words[2]" =~ ":" ]]; then
return
fi
# remove all newlines since defaults prints so pretty like
# Now get each docset and output each on their own line
for doc in "$DOCSETS[@]"; do
# Only output docsets that are actually enabled
if [[ "`echo $doc | grep -Eo \"isEnabled = .*?;\" | sed 's/[^01]//g'`" == "0" ]]; then
local -aU docsets
docsets=()
# Use defaults to get the array of docsets from preferences
# Have to smash it into one big line so that each docset is an element of our docsets array
# Only output docsets that are actually enabled
local -a enabled_docsets
enabled_docsets=("${(@f)$(defaults read com.kapeli.dashdoc docsets \
| tr -d '\n' | grep -oE '\{.*?\}' | grep -E 'isEnabled = 1;')}")
local docset name keyword
# Now get each docset and output each on their own line
for docset in "$enabled_docsets[@]"; do
keyword=''
# Order of preference as explained to me by @kapeli via email
for locator in keyword suggestedKeyword platform; do
# Echo the docset, try to find the appropriate keyword
# Strip doublequotes and colon from any keyword so that everything has the
# same format when output (we'll add the colon in the completion)
if [[ "$docset" =~ "$locator = ([^;]*);" ]]; then
keyword="${match[1]//[\":]}"
fi
if [[ -z "$keyword" ]]; then
continue
fi
keyword=''
# Order of preference as explained to me by @kapeli via email
KEYWORD_LOCATORS=(keyword suggestedKeyword platform)
for locator in "$KEYWORD_LOCATORS[@]"; do
# Echo the docset, try to find the appropriate keyword
# Strip doublequotes and colon from any keyword so that everything has the
# same format when output (we'll add the colon in the completion)
keyword=`echo $doc | grep -Eo "$locator = .*?;" | sed -e "s/$locator = \(.*\);/\1/" -e "s/[\":]//g"`
if [[ ! -z "$keyword" ]]; then
# if we fall back to platform, we should do some checking per @kapeli
if [[ "$locator" == "platform" ]]; then
# Since these are the only special cases right now, let's not do the
# expensive processing unless we have to
if [[ "$keyword" = (python|java|qt|cocos2d) ]]; then
docsetName=`echo $doc | grep -Eo "docsetName = .*?;" | sed -e "s/docsetName = \(.*\);/\1/" -e "s/[\":]//g"`
case "$keyword" in
python)
case "$docsetName" in
"Python 2") keyword="python2" ;;
"Python 3") keyword="python3" ;;
esac ;;
java)
case "$docsetName" in
"Java SE7") keyword="java7" ;;
"Java SE6") keyword="java6" ;;
"Java SE8") keyword="java8" ;;
esac ;;
qt)
case "$docsetName" in
"Qt 5") keyword="qt5" ;;
"Qt 4"|Qt) keyword="qt4" ;;
esac ;;
cocos2d)
case "$docsetName" in
Cocos3D) keyword="cocos3d" ;;
esac ;;
esac
fi
# if we fall back to platform, we should do some checking per @kapeli
if [[ "$locator" == "platform" ]]; then
# Since these are the only special cases right now, let's not do the
# expensive processing unless we have to
if [[ "$keyword" = (python|java|qt|cocos2d) ]]; then
if [[ "$docset" =~ "docsetName = ([^;]*);" ]]; then
name="${match[1]//[\":]}"
case "$keyword" in
python)
case "$name" in
"Python 2") keyword="python2" ;;
"Python 3") keyword="python3" ;;
esac ;;
java)
case "$name" in
"Java SE7") keyword="java7" ;;
"Java SE6") keyword="java6" ;;
"Java SE8") keyword="java8" ;;
esac ;;
qt)
case "$name" in
"Qt 5") keyword="qt5" ;;
"Qt 4"|Qt) keyword="qt4" ;;
esac ;;
cocos2d)
case "$name" in
Cocos3D) keyword="cocos3d" ;;
esac ;;
esac
fi
# Bail once we have a match
break
fi
done
# If we have a keyword, add it to the list!
if [[ ! -z "$keyword" ]]; then
_all_docsets+=($keyword)
fi
# Bail once we have a match
break
done
# special thanks to [arx] on #zsh for getting me sorted on this piece
compadd -qS: -- "$_all_docsets[@]"
return
fi
# If we have a keyword, add it to the list!
if [[ -n "$keyword" ]]; then
docsets+=($keyword)
fi
done
# special thanks to [arx] on #zsh for getting me sorted on this piece
compadd -qS: -- "$docsets[@]"
}

29
plugins/dbt/README.md Normal file
View file

@ -0,0 +1,29 @@
# dbt plugin
## Introduction
The `dbt plugin` adds several aliases for useful [dbt](https://docs.getdbt.com/) commands and
[aliases](#aliases).
To use it, add `dbt` to the plugins array of your zshrc file:
```
plugins=(... dbt)
```
## Aliases
| Alias | Command | Description |
| ------ | ------------------------------------------------ | ---------------------------------------------------- |
| dbtlm | `dbt ls -s state:modified` | List modified models only |
| dbtrm | `dbt run -s state:modified` | Run modified models only |
| dbttm | `dbt test -m state:modified` | Test modified models only |
| dbtrtm | `dbtrm && dbttm` | Run and test modified models only |
| dbtrs | `dbt clean; dbt deps; dbt seed` | Re-seed data |
| dbtfrt | `dbtrs; dbt run --full-refresh; dbt test` | Perform a full fresh run with tests |
| dbtcds | `dbt docs generate; dbt docs serve` | Generate docs without compiling |
| dbtds | `dbt docs generate --no-compile; dbt docs serve` | Generate and serve docs skipping doc. re-compilation |
## Maintainer
### [msempere](https://github.com/msempere)

View file

@ -0,0 +1,23 @@
# list modified models only
alias dbtlm="dbt ls -s state:modified"
# run modified models only
alias dbtrm="dbt run -s state:modified"
# test modified models only
alias dbttm="dbt test -m state:modified"
# run and test modified models only
alias dbtrtm="dbtrm && dbttm"
# re-seed data
alias dbtrs="dbt clean; dbt deps; dbt seed"
# perform a full fresh run with tests
alias dbtfrt="dbtrs; dbt run --full-refresh; dbt test"
# generate and serve docs
alias dbtcds="dbt docs generate; dbt docs serve"
# generate and serve docs skipping doc. re-compilation
alias dbtds="dbt docs generate --no-compile; dbt docs serve"

View file

@ -10,7 +10,7 @@ plugins=(... debian)
## Settings
- `$apt_pref`: use apt or aptitude if installed, fallback is apt-get.
- `$apt_pref`: use aptitude or apt if installed, fallback is apt-get.
- `$apt_upgr`: use upgrade or safe-upgrade (for aptitude).
Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh My Zsh) to override this behavior.
@ -21,7 +21,7 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
| ------ | ---------------------------------------------------------------------- | ---------------------------------------------------------- |
| `age` | `apt-get` | Command line tool for handling packages |
| `api` | `aptitude` | Same functionality as `apt-get`, provides extra options |
| `acs` | `apt-cache search` | Command line tool for searching apt software package cache |
| `acse` | `apt-cache search` | Command line tool for searching apt software package cache |
| `aps` | `aptitude search` | Searches installed packages using aptitude |
| `as` | `aptitude -F '* %p -> %d \n(%v/%V)' --no-gui --disable-columns search` | Print searched packages using a custom format |
| `afs` | `apt-file search --regexp` | Search file in packages |
@ -30,30 +30,32 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
## Superuser Operations Aliases
| Alias | Command | Description |
| -------- | -------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- |
| `aac` | `sudo $apt_pref autoclean` | Clears out the local repository of retrieved package files |
| `abd` | `sudo $apt_pref build-dep` | Installs all dependencies for building packages |
| `ac` | `sudo $apt_pref clean` | Clears out the local repository of retrieved package files except lock files |
| `ad` | `sudo $apt_pref update` | Updates the package lists for upgrades for packages |
| `adg` | `sudo $apt_pref update && sudo $apt_pref $apt_upgr` | Update and upgrade packages |
| `adu` | `sudo $apt_pref update && sudo $apt_pref dist-upgrade` | Smart upgrade that handles dependencies |
| `afu` | `sudo apt-file update` | Update the files in packages |
| `au` | `sudo $apt_pref $apt_upgr` | Install package upgrades |
| `ai` | `sudo $apt_pref install` | Command-line tool to install package |
| `ail` | `sed -e 's/ */ /g' -e 's/ *//' \| cut -s -d ' ' -f 1 \| xargs sudo $apt_pref install` | Install all packages given on the command line while using only the first word of each line |
| `ap` | `sudo $apt_pref purge` | Removes packages along with configuration files |
| `ar` | `sudo $apt_pref remove` | Removes packages, keeps the configuration files |
| `ads` | `sudo apt-get dselect-upgrade` | Installs packages from list and removes all not in the list |
| `dia` | `sudo dpkg -i ./*.deb` | Install all .deb files in the current directory |
| `di` | `sudo dpkg -i` | Install all .deb files in the current directory |
| `kclean` | `sudo aptitude remove -P ?and(~i~nlinux-(ima\|hea) ?not(~n$(uname -r)))` | Remove ALL kernel images and headers EXCEPT the one in use |
| Alias | Command | Description |
| -------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- |
| `aac` | `sudo $apt_pref autoclean` | Clears out the local repository of retrieved package files |
| `aar` | `sudo $apt_pref autoremove` | Removes packages installed automatically that are no longer needed |
| `abd` | `sudo $apt_pref build-dep` | Installs all dependencies for building packages |
| `ac` | `sudo $apt_pref clean` | Clears out the local repository of retrieved package files except lock files |
| `ad` | `sudo $apt_pref update` | Updates the package lists for upgrades for packages |
| `adg` | `sudo $apt_pref update && sudo $apt_pref $apt_upgr` | Update and upgrade packages |
| `ads` | `sudo apt-get dselect-upgrade` | Installs packages from list and removes all not in the list |
| `adu` | `sudo $apt_pref update && sudo $apt_pref dist-upgrade` | Smart upgrade that handles dependencies |
| `afu` | `sudo apt-file update` | Update the files in packages |
| `ai` | `sudo $apt_pref install` | Command-line tool to install package |
| `ail` | `sed -e 's/ */ /g' -e 's/ *//' \| cut -s -d ' ' -f 1 \| xargs sudo $apt_pref install` | Install all packages given on the command line while using only the first word of each line |
| `alu` | `sudo apt update && apt list -u && sudo apt upgrade` | Update, list and upgrade packages |
| `ap` | `sudo $apt_pref purge` | Removes packages along with configuration files |
| `au` | `sudo $apt_pref $apt_upgr` | Install package upgrades |
| `di` | `sudo dpkg -i` | Install all .deb files in the current directory |
| `dia` | `sudo dpkg -i ./*.deb` | Install all .deb files in the current directory |
| `kclean` | `sudo aptitude remove -P ?and(~i~nlinux-(ima\|hea) ?not(~n$(uname -r)))` | Remove ALL kernel images and headers EXCEPT the one in use |
## Aliases - Commands using `su`
| Alias | Command |
| ----- | --------------------------------------------------------- |
| `aac` | `su -ls "$apt_pref autoclean" root` |
| `aar` | `su -ls "$apt_pref autoremove" root` |
| `ac` | `su -ls "$apt_pref clean" root` |
| `ad` | `su -lc "$apt_pref update" root` |
| `adg` | `su -lc "$apt_pref update && aptitude $apt_upgr" root` |
@ -75,8 +77,8 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
| ------------------- | --------------------------------------------------------------- |
| `apt-copy` | Create a simple script that can be used to 'duplicate' a system |
| `apt-history` | Displays apt history for a command |
| `kerndeb` | Builds kernel packages |
| `apt-list-packages` | List packages by size |
| `kerndeb` | Builds kernel packages |
## Authors

View file

@ -1,13 +1,13 @@
# Use apt or aptitude if installed, fallback is apt-get
# Use aptitude or apt if installed, fallback is apt-get
# You can just set apt_pref='apt-get' to override it.
if [[ -z $apt_pref || -z $apt_upgr ]]; then
if [[ -e $commands[apt] ]]; then
apt_pref='apt'
apt_upgr='upgrade'
elif [[ -e $commands[aptitude] ]]; then
if [[ -e $commands[aptitude] ]]; then
apt_pref='aptitude'
apt_upgr='safe-upgrade'
elif [[ -e $commands[apt] ]]; then
apt_pref='apt'
apt_upgr='upgrade'
else
apt_pref='apt-get'
apt_upgr='upgrade'
@ -26,7 +26,7 @@ alias age='apt-get'
alias api='aptitude'
# Some self-explanatory aliases
alias acs="apt-cache search"
alias acse="apt-cache search"
alias aps='aptitude search'
alias as="aptitude -F '* %p -> %d \n(%v/%V)' --no-gui --disable-columns search"
@ -51,14 +51,18 @@ if [[ $use_sudo -eq 1 ]]; then
alias au="sudo $apt_pref $apt_upgr"
alias ai="sudo $apt_pref install"
# Install all packages given on the command line while using only the first word of each line:
# acs ... | ail
# acse ... | ail
alias ail="sed -e 's/ */ /g' -e 's/ *//' | cut -s -d ' ' -f 1 | xargs sudo $apt_pref install"
alias ap="sudo $apt_pref purge"
alias ar="sudo $apt_pref remove"
alias aar="sudo $apt_pref autoremove"
# apt-get only
alias ads="sudo apt-get dselect-upgrade"
# apt only
alias alu="sudo apt update && apt list -u && sudo apt upgrade"
# Install all .deb files in the current directory.
# Warning: you will need to put the glob in single quotes if you use:
# glob_subst
@ -84,21 +88,20 @@ else
alias afu="su -lc '$apt-file update'"
alias au="su -lc '$apt_pref $apt_upgr' root"
function ai() {
cmd="su -lc 'aptitude -P install $@' root"
cmd="su -lc '$apt_pref install $@' root"
print "$cmd"
eval "$cmd"
}
function ap() {
cmd="su -lc '$apt_pref -P purge $@' root"
cmd="su -lc '$apt_pref purge $@' root"
print "$cmd"
eval "$cmd"
}
function ar() {
cmd="su -lc '$apt_pref -P remove $@' root"
function aar() {
cmd="su -lc '$apt_pref autoremove $@' root"
print "$cmd"
eval "$cmd"
}
# Install all .deb files in the current directory
# Assumes glob_subst is off
alias dia='su -lc "dpkg -i ./*.deb" root'
@ -138,7 +141,7 @@ apt_pref_compdef au "$apt_upgr"
apt_pref_compdef ai "install"
apt_pref_compdef ail "install"
apt_pref_compdef ap "purge"
apt_pref_compdef ar "remove"
apt_pref_compdef aar "autoremove"
apt_pref_compdef ads "dselect-upgrade"
# Misc. #####################################################################

View file

@ -1 +0,0 @@
_deno

View file

@ -4,15 +4,17 @@ This plugin sets up completion and aliases for [Deno](https://deno.land).
## Aliases
| Alias | Full command |
| ----- | ---------------- |
| db | deno bundle |
| dc | deno compile |
| dca | deno cache |
| dfmt | deno fmt |
| dh | deno help |
| dli | deno lint |
| drn | deno run |
| drw | deno run --watch |
| dts | deno test |
| dup | deno upgrade |
| Alias | Full command |
| ----- | ------------------- |
| db | deno bundle |
| dc | deno compile |
| dca | deno cache |
| dfmt | deno fmt |
| dh | deno help |
| dli | deno lint |
| drn | deno run |
| drA | deno run -A |
| drw | deno run --watch |
| dru | deno run --unstable |
| dts | deno test |
| dup | deno upgrade |

View file

@ -6,18 +6,23 @@ alias dfmt='deno fmt'
alias dh='deno help'
alias dli='deno lint'
alias drn='deno run'
alias drA='deno run -A'
alias drw='deno run --watch'
alias dru='deno run --unstable'
alias dts='deno test'
alias dup='deno upgrade'
# COMPLETION FUNCTION
if (( $+commands[deno] )); then
if [[ ! -f $ZSH_CACHE_DIR/deno_version ]] \
|| [[ "$(deno --version)" != "$(< "$ZSH_CACHE_DIR/deno_version")" ]] \
|| [[ ! -f $ZSH/plugins/deno/_deno ]]; then
deno completions zsh > $ZSH/plugins/deno/_deno
deno --version > $ZSH_CACHE_DIR/deno_version
fi
if (( ! $+commands[deno] )); then
return
fi
# If the completion file doesn't exist yet, we need to autoload it and
# bind it to `deno`. Otherwise, compinit will have already done that.
if [[ ! -f "$ZSH_CACHE_DIR/completions/_deno" ]]; then
typeset -g -A _comps
autoload -Uz _deno
_comps[deno]=_deno
fi
deno completions zsh >| "$ZSH_CACHE_DIR/completions/_deno" &|

View file

@ -12,16 +12,27 @@ plugins=(... dirhistory)
| Shortcut | Description |
|-----------------------------------|-----------------------------------------------------------|
| <kbd>alt</kbd> + <kbd>left</kbd> | Go to previous directory |
| <kbd>alt</kbd> + <kbd>right</kbd> | Undo <kbd>alt</kbd> + <kbd>left</kbd> |
| <kbd>alt</kbd> + <kbd>up</kbd> | Move into the parent directory |
| <kbd>alt</kbd> + <kbd>down</kbd> | Move into the first child directory by alphabetical order |
| <kbd>Alt</kbd> + <kbd>Left</kbd> | Go to previous directory |
| <kbd>Alt</kbd> + <kbd>Right</kbd> | Go to next directory |
| <kbd>Alt</kbd> + <kbd>Up</kbd> | Move into the parent directory |
| <kbd>Alt</kbd> + <kbd>Down</kbd> | Move into the first child directory by alphabetical order |
**For macOS: use the Option key (<kbd>⌥</kbd>) instead of <kbd>Alt</kbd>**.
> NOTE: some terminals might override the <kbd>Alt</kbd> + Arrows key bindings (e.g. Windows Terminal).
> If these don't work check your terminal settings and change them to a different keyboard shortcut.
## Usage
This plugin allows you to navigate the history of previous current-working-directories using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT. MAC users may alternately use OPT-LEFT and OPT-RIGHT.
This plugin allows you to navigate the history of previous working directories using <kbd>Alt</kbd> + <kbd>Left</kbd>
and <kbd>Alt</kbd> + <kbd>Right</kbd>. <kbd>Alt</kbd> + <kbd>Left</kbd> moves to past directories, and
<kbd>Alt</kbd> + <kbd>Right</kbd> goes back to recent directories.
Also, navigate directory **hierarchy** using ALT-UP and ALT-DOWN. (mac keybindings not yet implemented). ALT-UP moves to higher hierarchy (shortcut for 'cd ..'). ALT-DOWN moves into the first directory found in alphabetical order (useful to navigate long empty directories e.g. java packages)
**NOTE: the maximum directory history size is 30.**
You can also navigate **directory hierarchies** using <kbd>Alt</kbd> + <kbd>Up</kbd> and <kbd>Alt</kbd> + <kbd>Down</kbd>.
<kbd>Alt</kbd> + <kbd>Up</kbd> moves to the parent directory, while <kbd>Alt</kbd> + <kbd>Down</kbd> moves into the first
child directory found in alphabetical order (useful to navigate long empty directories, e.g. Java packages).
For example, if the shell was started, and the following commands were entered:
@ -32,8 +43,20 @@ cd share
cd doc
```
Then entering ALT-LEFT at the prompt would change directory from /usr/share/doc to /usr/share, then if pressed again to /usr/, then ~. If ALT-RIGHT were pressed the directory would be changed to /usr/ again.
the directory stack (`dirs -v`) would look like this:
After that, ALT-DOWN will probably go to /usr/bin (depends on your /usr structure), ALT-UP will return to /usr, then ALT-UP will get you to /
```console
$ dirs -v
0 /usr/share/doc
1 /usr/share
2 /usr
3 ~
```
**Currently the max history size is 30**. The navigation should work for xterm, PuTTY xterm mode, GNU screen, and on MAC with alternate keys as mentioned above.
then entering <kbd>Alt</kbd> + <kbd>Left</kbd> at the prompt would change directory from `/usr/share/doc` to `/usr/share`,
then if pressed again to `/usr`, then `~`. If <kbd>Alt</kbd> + <kbd>Right</kbd> were pressed the directory would be changed
to `/usr` again.
After that, <kbd>Alt</kbd> + <kbd>Down</kbd> will probably go to `/usr/bin` if `bin` is the first directory in alphabetical
order (depends on your `/usr` folder structure). <kbd>Alt</kbd> + <kbd>Up</kbd> will return to `/usr`, and once more will get
you to the root folder (`/`).

View file

@ -1,7 +1,7 @@
##
# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories
##
# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories
# that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT.
#
#
# Navigate directory hierarchy using ALT-UP and ALT-DOWN.
# ALT-UP moves to higher hierarchy (cd ..)
# ALT-DOWN moves into the first directory found in alphabetical order
@ -14,27 +14,30 @@ export dirhistory_future
export DIRHISTORY_SIZE=30
# Pop the last element of dirhistory_past.
# Pass the name of the variable to return the result in.
# Pop the last element of dirhistory_past.
# Pass the name of the variable to return the result in.
# Returns the element if the array was not empty,
# otherwise returns empty string.
function pop_past() {
eval "$1='$dirhistory_past[$#dirhistory_past]'"
setopt localoptions no_ksh_arrays
if [[ $#dirhistory_past -gt 0 ]]; then
typeset -g $1="${dirhistory_past[$#dirhistory_past]}"
dirhistory_past[$#dirhistory_past]=()
fi
}
function pop_future() {
eval "$1='$dirhistory_future[$#dirhistory_future]'"
setopt localoptions no_ksh_arrays
if [[ $#dirhistory_future -gt 0 ]]; then
typeset -g $1="${dirhistory_future[$#dirhistory_future]}"
dirhistory_future[$#dirhistory_future]=()
fi
}
# Push a new element onto the end of dirhistory_past. If the size of the array
# Push a new element onto the end of dirhistory_past. If the size of the array
# is >= DIRHISTORY_SIZE, the array is shifted
function push_past() {
setopt localoptions no_ksh_arrays
if [[ $#dirhistory_past -ge $DIRHISTORY_SIZE ]]; then
shift dirhistory_past
fi
@ -44,6 +47,7 @@ function push_past() {
}
function push_future() {
setopt localoptions no_ksh_arrays
if [[ $#dirhistory_future -ge $DIRHISTORY_SIZE ]]; then
shift dirhistory_future
fi
@ -76,7 +80,7 @@ function dirhistory_back() {
local d=""
# Last element in dirhistory_past is the cwd.
pop_past cw
pop_past cw
if [[ "" == "$cw" ]]; then
# Someone overwrote our variable. Recover it.
dirhistory_past=($PWD)
@ -121,40 +125,43 @@ function dirhistory_zle_dirhistory_future() {
}
zle -N dirhistory_zle_dirhistory_back
# xterm in normal mode
bindkey "\e[3D" dirhistory_zle_dirhistory_back
bindkey "\e[1;3D" dirhistory_zle_dirhistory_back
# Terminal.app
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
bindkey "^[b" dirhistory_zle_dirhistory_back
fi
# iTerm2
if [[ "$TERM_PROGRAM" == "iTerm.app" ]]; then
bindkey "^[^[[D" dirhistory_zle_dirhistory_back
fi
# Putty:
bindkey "\e\e[D" dirhistory_zle_dirhistory_back
# GNU screen:
bindkey "\eO3D" dirhistory_zle_dirhistory_back
zle -N dirhistory_zle_dirhistory_future
bindkey "\e[3C" dirhistory_zle_dirhistory_future
bindkey "\e[1;3C" dirhistory_zle_dirhistory_future
# Terminal.app
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
bindkey "^[f" dirhistory_zle_dirhistory_future
fi
# iTerm2
if [[ "$TERM_PROGRAM" == "iTerm.app" ]]; then
bindkey "^[^[[C" dirhistory_zle_dirhistory_future
fi
bindkey "\e\e[C" dirhistory_zle_dirhistory_future
bindkey "\eO3C" dirhistory_zle_dirhistory_future
for keymap in emacs vicmd viins; do
# dirhistory_back
bindkey -M $keymap "\e[3D" dirhistory_zle_dirhistory_back # xterm in normal mode
bindkey -M $keymap "\e[1;3D" dirhistory_zle_dirhistory_back # xterm in normal mode
bindkey -M $keymap "\e\e[D" dirhistory_zle_dirhistory_back # Putty
bindkey -M $keymap "\eO3D" dirhistory_zle_dirhistory_back # GNU screen
#
case "$TERM_PROGRAM" in
Apple_Terminal) bindkey -M $keymap "^[b" dirhistory_zle_dirhistory_back ;; # Terminal.app
iTerm.app) bindkey -M $keymap "^[^[[D" dirhistory_zle_dirhistory_back ;; # iTerm2
esac
if (( ${+terminfo[kcub1]} )); then
bindkey -M $keymap "^[${terminfo[kcub1]}" dirhistory_zle_dirhistory_back # urxvt
fi
# dirhistory_future
bindkey -M $keymap "\e[3C" dirhistory_zle_dirhistory_future # xterm in normal mode
bindkey -M $keymap "\e[1;3C" dirhistory_zle_dirhistory_future # xterm in normal mode
bindkey -M $keymap "\e\e[C" dirhistory_zle_dirhistory_future # Putty
bindkey -M $keymap "\eO3C" dirhistory_zle_dirhistory_future # GNU screen
case "$TERM_PROGRAM" in
Apple_Terminal) bindkey -M $keymap "^[f" dirhistory_zle_dirhistory_future ;; # Terminal.app
iTerm.app) bindkey -M $keymap "^[^[[C" dirhistory_zle_dirhistory_future ;; # iTerm2
esac
if (( ${+terminfo[kcuf1]} )); then
bindkey -M $keymap "^[${terminfo[kcuf1]}" dirhistory_zle_dirhistory_future # urxvt
fi
done
#
# HIERARCHY Implemented in this section, in case someone wants to split it to another plugin if it clashes bindings
#
#
# Move up in hierarchy
function dirhistory_up() {
@ -181,22 +188,38 @@ function dirhistory_zle_dirhistory_down() {
}
zle -N dirhistory_zle_dirhistory_up
# xterm in normal mode
bindkey "\e[3A" dirhistory_zle_dirhistory_up
bindkey "\e[1;3A" dirhistory_zle_dirhistory_up
if [[ "$TERM_PROGRAM" == "Apple_Terminal" || "$TERM_PROGRAM" == "iTerm.app" ]]; then
bindkey "^[[A" dirhistory_zle_dirhistory_up
fi
# Putty:
bindkey "\e\e[A" dirhistory_zle_dirhistory_up
# GNU screen:
bindkey "\eO3A" dirhistory_zle_dirhistory_up
zle -N dirhistory_zle_dirhistory_down
bindkey "\e[3B" dirhistory_zle_dirhistory_down
bindkey "\e[1;3B" dirhistory_zle_dirhistory_down
if [[ "$TERM_PROGRAM" == "Apple_Terminal" || "$TERM_PROGRAM" == "iTerm.app" ]]; then
bindkey "^[[B" dirhistory_zle_dirhistory_down
fi
bindkey "\e\e[B" dirhistory_zle_dirhistory_down
bindkey "\eO3B" dirhistory_zle_dirhistory_down
for keymap in emacs vicmd viins; do
# dirhistory_up
bindkey -M $keymap "\e[3A" dirhistory_zle_dirhistory_up # xterm in normal mode
bindkey -M $keymap "\e[1;3A" dirhistory_zle_dirhistory_up # xterm in normal mode
bindkey -M $keymap "\e\e[A" dirhistory_zle_dirhistory_up # Putty
bindkey -M $keymap "\eO3A" dirhistory_zle_dirhistory_up # GNU screen
case "$TERM_PROGRAM" in
Apple_Terminal) bindkey -M $keymap "^[[A" dirhistory_zle_dirhistory_up ;; # Terminal.app
iTerm.app) bindkey -M $keymap "^[^[[A" dirhistory_zle_dirhistory_up ;; # iTerm2
esac
if (( ${+terminfo[kcuu1]} )); then
bindkey -M $keymap "^[${terminfo[kcuu1]}" dirhistory_zle_dirhistory_up # urxvt
fi
# dirhistory_down
bindkey -M $keymap "\e[3B" dirhistory_zle_dirhistory_down # xterm in normal mode
bindkey -M $keymap "\e[1;3B" dirhistory_zle_dirhistory_down # xterm in normal mode
bindkey -M $keymap "\e\e[B" dirhistory_zle_dirhistory_down # Putty
bindkey -M $keymap "\eO3B" dirhistory_zle_dirhistory_down # GNU screen
case "$TERM_PROGRAM" in
Apple_Terminal) bindkey -M $keymap "^[[B" dirhistory_zle_dirhistory_down ;; # Terminal.app
iTerm.app) bindkey -M $keymap "^[^[[B" dirhistory_zle_dirhistory_down ;; # iTerm2
esac
if (( ${+terminfo[kcud1]} )); then
bindkey -M $keymap "^[${terminfo[kcud1]}" dirhistory_zle_dirhistory_down # urxvt
fi
done
unset keymap

View file

@ -1,36 +0,0 @@
# Django plugin
This plugin adds completion and hints for the [Django Project](https://www.djangoproject.com/) `manage.py` commands
and options.
To use it, add `django` to the plugins array in your zshrc file:
```zsh
plugins=(... django)
```
## Usage
```zsh
$> python manage.py (press <TAB> here)
```
Would result in:
```zsh
cleanup -- remove old data from the database
compilemessages -- compile .po files to .mo for use with gettext
createcachetable -- creates table for SQL cache backend
createsuperuser -- create a superuser
dbshell -- run command-line client for the current database
diffsettings -- display differences between the current settings and Django defaults
dumpdata -- output contents of database as a fixture
flush -- execute 'sqlflush' on the current database
inspectdb -- output Django model module for tables in database
loaddata -- install the named fixture(s) in the database
makemessages -- pull out all strings marked for translation
reset -- executes 'sqlreset' for the given app(s)
runfcgi -- run this project as a fastcgi
runserver -- start a lightweight web server for development
...
```

View file

@ -1,404 +0,0 @@
#compdef manage.py
typeset -ga nul_args
nul_args=(
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))'
'--settings=-[the Python path to a settings module.]:file:_files'
'--pythonpath=-[a directory to add to the Python path.]:directory:_directories'
'--traceback[print traceback on exception.]'
"--no-color[Don't colorize the command output.]"
"--version[show program's version number and exit.]"
{-h,--help}'[show this help message and exit.]'
)
typeset -ga start_args
start_args=(
'--template=-[The path or URL to load the template from.]:directory:_directories'
'--extension=-[The file extension(s) to render (default: "py").]'
'--name=-[The file name(s) to render.]:file:_files'
)
typeset -ga db_args
db_args=(
'--database=-[Nominates a database. Defaults to the "default" database.]'
)
typeset -ga noinput_args
noinput_args=(
'--noinput[tells Django to NOT prompt the user for input of any kind.]'
)
typeset -ga no_init_data_args
no_init_data_args=(
'--no-initial-data[Tells Django not to load any initial data after database synchronization.]'
)
typeset -ga tag_args
tag_args=(
'--tag=-[Run only checks labeled with given tag.]'
'--list-tags[List available tags.]'
)
_managepy-check(){
_arguments -s : \
$tag_args \
$nul_args && ret=0
}
_managepy-changepassword(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-createcachetable(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-createsuperuser(){
_arguments -s : \
'--username=-[Specifies the login for the superuser.]' \
'--email=-[Specifies the email for the superuser.]' \
$noinput_args \
$db_args \
$nul_args && ret=0
}
_managepy-collectstatic(){
_arguments -s : \
'--link[Create a symbolic link to each file instead of copying.]' \
'--no-post-process[Do NOT post process collected files.]' \
'--ignore=-[Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.]' \
'--dry-run[Do everything except modify the filesystem.]' \
'--clear[Clear the existing files using the storage before trying to copy or link the original file.]' \
'--link[Create a symbolic link to each file instead of copying.]' \
'--no-default-ignore[Do not ignore the common private glob-style patterns "CVS", ".*" and "*~".]' \
$noinput_args \
$nul_args && ret=0
}
_managepy-dbshell(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-diffsettings(){
_arguments -s : \
"--all[Display all settings, regardless of their value.]"
$nul_args && ret=0
}
_managepy-dumpdata(){
_arguments -s : \
'--format=-[Specifies the output serialization format for fixtures.]:format:(json yaml xml)' \
'--indent=-[Specifies the indent level to use when pretty-printing output.]' \
'--exclude=-[An app_label or app_label.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).]' \
'--natural-foreign[Use natural foreign keys if they are available.]' \
'--natural-primary[Use natural primary keys if they are available.]' \
"--all[Use Django's base manager to dump all models stored in the database.]" \
'--pks=-[Only dump objects with given primary keys.]' \
$db_args \
$nul_args \
'*::appname:_applist' && ret=0
}
_managepy-flush(){
_arguments -s : \
$no_init_data_args \
$db_args \
$noinput_args \
$nul_args && ret=0
}
_managepy-help(){
_arguments -s : \
'*:command:_managepy_cmds' \
$nul_args && ret=0
}
_managepy_cmds(){
local line
local -a cmd
_call_program help-command ./manage.py help \
|& sed -n '/^ /s/[(), ]/ /gp' \
| while read -A line; do cmd=($line $cmd) done
_describe -t managepy-command 'manage.py command' cmd
}
_managepy-inspectdb(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-loaddata(){
_arguments -s : \
'--ignorenonexistent[Ignores entries in the serialized data for fields that do not currently exist on the model.]' \
'--app=-[Only look for fixtures in the specified app.]:appname:_applist' \
'*::file:_files' \
$db_args \
$nul_args && ret=0
}
_managepy-makemessages(){
_arguments -s : \
'--locale=-[Creates or updates the message files for the given locale(s) (e.g. pt_BR).]' \
'--domain=-[The domain of the message files (default: "django").]' \
'--all[Updates the message files for all existing locales.]' \
'--extension=-[The file extension(s) to examine (default: "html,txt", or "js" if the domain is "djangojs").]' \
'--symlinks[Follows symlinks to directories when examining source code and templates for translation strings.]' \
'--ignore=-[Ignore files or directories matching this glob-style pattern.]' \
"--no-default-ignore[Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.]" \
"--no-wrap[Don't break long message lines into several lines.]" \
"--no-location[Don't write '#: filename:line' lines.]" \
'--no-obsolete[Remove obsolete message strings.]' \
'--keep-pot[Keep .pot file after making messages.]' \
$nul_args && ret=0
}
_managepy-makemigrations(){
_arguments -s : \
'--dry-run[Just show what migrations would be made]' \
'--merge[Enable fixing of migration conflicts.]' \
'--empty[Create an empty migration.]' \
$noinput_args \
$nul_args && ret=0
}
_managepy-migrate(){
_arguments -s : \
'--fake[Mark migrations as run without actually running them]' \
'--list[Show a list of all known migrations and which are applied]' \
$no_init_data_args \
$noinput_args \
$db_args \
$nul_args && ret=0
}
_managepy-runfcgi(){
local state
local fcgi_opts
fcgi_opts=(
'protocol[fcgi, scgi, ajp, ... (default fcgi)]:protocol:(fcgi scgi ajp)'
'host[hostname to listen on..]:'
'port[port to listen on.]:'
'socket[UNIX socket to listen on.]:file:_files'
'method[prefork or threaded (default prefork)]:method:(prefork threaded)'
'maxrequests[number of requests a child handles before it is killed and a new child is forked (0 = no limit).]:'
'maxspare[max number of spare processes / threads.]:'
'minspare[min number of spare processes / threads.]:'
'maxchildren[hard limit number of processes / threads.]:'
'daemonize[whether to detach from terminal.]:boolean:(False True)'
'pidfile[write the spawned process-id to this file.]:file:_files'
'workdir[change to this directory when daemonizing.]:directory:_files'
'outlog[write stdout to this file.]:file:_files'
'errlog[write stderr to this file.]:file:_files'
)
_arguments -s : \
$nul_args \
'*: :_values "FCGI Setting" $fcgi_opts' && ret=0
}
_managepy-runserver(){
_arguments -s : \
'--ipv6[Tells Django to use an IPv6 address.]' \
'--nothreading[Tells Django to NOT use threading.]' \
'--noreload[Tells Django to NOT use the auto-reloader.]' \
'--nostatic[Tells Django to NOT automatically serve static files at STATIC_URL.]' \
'--insecure[Allows serving static files even if DEBUG is False.]' \
$nul_args && ret=0
}
_managepy-shell(){
_arguments -s : \
'--plain[Tells Django to use plain Python, not IPython.]' \
'--no-startup[When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.]' \
'--interface=-[Specify an interactive interpreter interface.]:INTERFACE:((ipython bpython))' \
$nul_args && ret=0
}
_managepy-sql(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-sqlall(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-sqlclear(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-sqlcustom(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-dropindexes(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-sqlflush(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-sqlindexes(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-sqlinitialdata(){
_arguments -s : \
$nul_args && ret=0
}
_managepy-sqlsequencereset(){
_arguments -s : \
$db_args \
$nul_args && ret=0
}
_managepy-squashmigrations(){
_arguments -s : \
'--no-optimize[Do not try to optimize the squashed operations.]' \
$noinput_args \
$nul_args && ret=0
}
_managepy-startapp(){
_arguments -s : \
$start_args \
$nul_args && ret=0
}
_managepy-startproject(){
_arguments -s : \
$start_args \
$nul_args && ret=0
}
_managepy-syncdb() {
_arguments -s : \
$noinput_args \
$no_init_data_args \
$db_args \
$nul_args && ret=0
}
_managepy-test() {
_arguments -s : \
'--failfast[Tells Django to stop running the test suite after first failed test.]' \
'--testrunner=-[Tells Django to use specified test runner class instead of the one specified by the TEST_RUNNER setting.]' \
'--liveserver=-[Overrides the default address where the live server (used with LiveServerTestCase) is expected to run from. The default value is localhost:8081.]' \
'--top-level-directory=-[Top level of project for unittest discovery.]' \
'--pattern=-[The test matching pattern. Defaults to test*.py.]:' \
$noinput_args \
'*::appname:_applist' \
$nul_args && ret=0
}
_managepy-testserver() {
_arguments -s : \
'--addrport=-[port number or ipaddr:port to run the server on.]' \
'--ipv6[Tells Django to use an IPv6 address.]' \
$noinput_args \
'*::fixture:_files' \
$nul_args && ret=0
}
_managepy-validate() {
_arguments -s : \
$tag_args \
$nul_args && ret=0
}
_managepy-commands() {
local -a commands
commands=(
"changepassword:Change a user's password for django.contrib.auth."
'check:Checks the entire Django project for potential problems.'
'compilemessages:Compiles .po files to .mo files for use with builtin gettext support.'
'createcachetable:Creates the table needed to use the SQL cache backend.'
'createsuperuser:Used to create a superuser.'
'collectstatic:Collect static files in a single location.'
'dbshell:Runs the command-line client for the current DATABASE_ENGINE.'
"diffsettings:Displays differences between the current settings.py and Django's default settings."
'dumpdata:Output the contents of the database as a fixture of the given format.'
'flush:Executes ``sqlflush`` on the current database.'
'help:manage.py help.'
'inspectdb:Introspects the database tables in the given database and outputs a Django model module.'
'loaddata:Installs the named fixture(s) in the database.'
'makemessages:Runs over the entire source tree of the current directory and pulls out all strings marked for translation.'
'makemigrations:Creates new migration(s) for apps.'
'migrate:Updates database schema. Manages both apps with migrations and those without.'
'runfcgi:Run this project as a fastcgi (or some other protocol supported by flup) application,'
'runserver:Starts a lightweight Web server for development.'
'shell:Runs a Python interactive interpreter.'
'showmigrations:Shows all available migrations for the current project.'
'sql:Prints the CREATE TABLE SQL statements for the given app name(s).'
'sqlall:Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s).'
'sqlclear:Prints the DROP TABLE SQL statements for the given app name(s).'
'sqlcustom:Prints the custom table modifying SQL statements for the given app name(s).'
'sqldropindexes:Prints the DROP INDEX SQL statements for the given model module name(s).'
'sqlflush:Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed.'
'sqlindexes:Prints the CREATE INDEX SQL statements for the given model module name(s).'
"sqlinitialdata:RENAMED: see 'sqlcustom'"
'sqlsequencereset:Prints the SQL statements for resetting sequences for the given app name(s).'
'squashmigrations:Squashes an existing set of migrations (from first until specified) into a single new one.'
"startapp:Creates a Django app directory structure for the given app name in this project's directory."
"startproject:Creates a Django project directory structure for the given project name in this current directory."
"syncdb:Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
'test:Runs the test suite for the specified applications, or the entire site if no apps are specified.'
'testserver:Runs a development server with data from the given fixture(s).'
'validate:Validates all installed models.'
)
_describe -t commands 'manage.py command' commands && ret=0
}
_applist() {
local line
local -a apps
_call_program help-command "python -c \"import sys; del sys.path[0];\\
import os.path as op, re, django.conf;\\
bn=op.basename(op.abspath(op.curdir));[sys\\
.stdout.write(str(re.sub(r'^%s\.(.*?)$' %
bn, r'\1', i)) + '\n') for i in django.conf.settings.\\
INSTALLED_APPS if re.match(r'^%s' % bn, i)]\"" \
| while read -A line; do apps=($line $apps) done
_values 'Application' $apps && ret=0
}
_managepy() {
local curcontext=$curcontext ret=1
if ((CURRENT == 2)); then
_managepy-commands
else
shift words
(( CURRENT -- ))
curcontext="${curcontext%:*:*}:managepy-$words[1]:"
_call_function ret _managepy-$words[1]
fi
}
compdef _managepy manage.py
compdef _managepy django
compdef _managepy django-admin
compdef _managepy django-admin.py
compdef _managepy django-manage

View file

@ -1,6 +1,6 @@
# Dnote Plugin
This plugin adds auto-completion for [Dnote](https://dnote.io) project.
This plugin adds auto-completion for [Dnote](https://www.getdnote.com/), a simple command line notebook.
To use it, add `dnote` to the plugins array in your zshrc file:

View file

@ -11,22 +11,23 @@ plugins=(... docker-compose)
## Aliases
| Alias | Command | Description |
|-----------|--------------------------------|------------------------------------------------------------------|
| dco | `docker-compose` | Docker-compose main command |
| dcb | `docker-compose build` | Build containers |
| dce | `docker-compose exec` | Execute command inside a container |
| dcps | `docker-compose ps` | List containers |
| dcrestart | `docker-compose restart` | Restart container |
| dcrm | `docker-compose rm` | Remove container |
| dcr | `docker-compose run` | Run a command in container |
| dcstop | `docker-compose stop` | Stop a container |
| dcup | `docker-compose up` | Build, (re)create, start, and attach to containers for a service |
| dcupb | `docker-compose up --build` | Same as `dcup`, but build images before starting containers |
| dcupd | `docker-compose up -d` | Same as `dcup`, but starts as daemon |
| dcdn | `docker-compose down` | Stop and remove containers |
| dcl | `docker-compose logs` | Show logs of container |
| dclf | `docker-compose logs -f` | Show logs and follow output |
| dcpull | `docker-compose pull` | Pull image of a service |
| dcstart | `docker-compose start` | Start a container |
| dck | `docker-compose kill` | Kills containers |
| Alias | Command | Description |
|-----------|--------------------------------|----------------------------------------------------------------------------------|
| dco | `docker-compose` | Docker-compose main command |
| dcb | `docker-compose build` | Build containers |
| dce | `docker-compose exec` | Execute command inside a container |
| dcps | `docker-compose ps` | List containers |
| dcrestart | `docker-compose restart` | Restart container |
| dcrm | `docker-compose rm` | Remove container |
| dcr | `docker-compose run` | Run a command in container |
| dcstop | `docker-compose stop` | Stop a container |
| dcup | `docker-compose up` | Build, (re)create, start, and attach to containers for a service |
| dcupb | `docker-compose up --build` | Same as `dcup`, but build images before starting containers |
| dcupd | `docker-compose up -d` | Same as `dcup`, but starts as daemon |
| dcupdb | `docker-compose up -d --build` | Same as `dcup`, but build images before starting containers and starts as daemon |
| dcdn | `docker-compose down` | Stop and remove containers |
| dcl | `docker-compose logs` | Show logs of container |
| dclf | `docker-compose logs -f` | Show logs and follow output |
| dcpull | `docker-compose pull` | Pull image of a service |
| dcstart | `docker-compose start` | Start a container |
| dck | `docker-compose kill` | Kills containers |

View file

@ -121,12 +121,6 @@ __docker-compose_subcommand() {
'--parallel[Build images in parallel.]' \
'*:services:__docker-compose_services_from_build' && ret=0
;;
(bundle)
_arguments \
$opts_help \
'--push-images[Automatically push images for any services which have a `build` option specified.]' \
'(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0
;;
(config)
_arguments \
$opts_help \
@ -290,7 +284,7 @@ __docker-compose_subcommand() {
(up)
_arguments \
$opts_help \
'(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit.]' \
'(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit and --attach-dependencies.]' \
$opts_no_color \
$opts_no_deps \
$opts_force_recreate \
@ -298,6 +292,7 @@ __docker-compose_subcommand() {
$opts_no_build \
"(--no-build)--build[Build images before starting containers.]" \
"(-d)--abort-on-container-exit[Stops all containers if any container was stopped. Incompatible with -d.]" \
"(-d)--attach-dependencies[Attach to dependent containers. Incompatible with -d.]" \
'(-t --timeout)'{-t,--timeout}"[Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)]:seconds: " \
'--scale[SERVICE=NUM Scale SERVICE to NUM instances. Overrides the `scale` setting in the Compose file if present.]:service scale SERVICE=NUM: ' \
'--exit-code-from=[Return the exit code of the selected service container. Implies --abort-on-container-exit]:service:__docker-compose_services' \
@ -341,11 +336,13 @@ _docker-compose() {
'(- :)'{-h,--help}'[Get help]' \
'*'{-f,--file}"[${file_description}]:file:_files -g '*.yml'" \
'(-p --project-name)'{-p,--project-name}'[Specify an alternate project name (default: directory name)]:project name:' \
'--env-file[Specify an alternate environment file (default: .env)]:env-file:_files' \
"--compatibility[If set, Compose will attempt to convert keys in v3 files to their non-Swarm equivalent]" \
'(- :)'{-v,--version}'[Print version and exit]' \
'--verbose[Show more output]' \
'--log-level=[Set log level]:level:(DEBUG INFO WARNING ERROR CRITICAL)' \
'--no-ansi[Do not print ANSI control characters]' \
'--ansi=[Control when to print ANSI control characters]:when:(never always auto)' \
'(-H --host)'{-H,--host}'[Daemon socket to connect to]:host:' \
'--tls[Use TLS; implied by --tlsverify]' \
'--tlscacert=[Trust certs signed only by this CA]:ca path:' \
@ -359,6 +356,7 @@ _docker-compose() {
local -a relevant_compose_flags relevant_compose_repeatable_flags relevant_docker_flags compose_options docker_options
relevant_compose_flags=(
"--env-file"
"--file" "-f"
"--host" "-H"
"--project-name" "-p"

View file

@ -1,28 +1,23 @@
# Authors:
# https://github.com/tristola
#
# Docker-compose related zsh aliases
# support Compose v2 as docker CLI plugin
(( ${+commands[docker-compose]} )) && dccmd='docker-compose' || dccmd='docker compose'
# Aliases ###################################################################
alias dco="$dccmd"
alias dcb="$dccmd build"
alias dce="$dccmd exec"
alias dcps="$dccmd ps"
alias dcrestart="$dccmd restart"
alias dcrm="$dccmd rm"
alias dcr="$dccmd run"
alias dcstop="$dccmd stop"
alias dcup="$dccmd up"
alias dcupb="$dccmd up --build"
alias dcupd="$dccmd up -d"
alias dcupdb="$dccmd up -d --build"
alias dcdn="$dccmd down"
alias dcl="$dccmd logs"
alias dclf="$dccmd logs -f"
alias dcpull="$dccmd pull"
alias dcstart="$dccmd start"
alias dck="$dccmd kill"
# Use dco as alias for docker-compose, since dc on *nix is 'dc - an arbitrary precision calculator'
# https://www.gnu.org/software/bc/manual/dc-1.05/html_mono/dc.html
alias dco='docker-compose'
alias dcb='docker-compose build'
alias dce='docker-compose exec'
alias dcps='docker-compose ps'
alias dcrestart='docker-compose restart'
alias dcrm='docker-compose rm'
alias dcr='docker-compose run'
alias dcstop='docker-compose stop'
alias dcup='docker-compose up'
alias dcupb='docker-compose up --build'
alias dcupd='docker-compose up -d'
alias dcdn='docker-compose down'
alias dcl='docker-compose logs'
alias dclf='docker-compose logs -f'
alias dcpull='docker-compose pull'
alias dcstart='docker-compose start'
alias dck='docker-compose kill'
unset dccmd

View file

@ -90,7 +90,7 @@ __docker-machine_filters() {
}
__get_swarm_discovery() {
declare -a masters serivces
declare -a masters services
local service
services=()
masters=($(docker-machine ls -f {{.Swarm}} |grep '(master)' |awk '{print $1}'))
@ -169,7 +169,7 @@ __get_create_argument() {
__docker-machine_subcommand() {
local -a opts_help
opts_help=("(- :)--help[Print usage]")
local -a opts_only_host opts_driver opts_storage_driver opts_stragery
local -a opts_only_host opts_driver opts_storage_driver opts_state
opts_only_host=(
"$opts_help"
"*:host:__docker-machine_hosts_all"
@ -330,14 +330,14 @@ _docker-machine() {
_arguments -C \
"(- :)"{-h,--help}"[Show help]" \
"(-D --debug)"{-D,--debug}"[Enable debug mode]" \
'(-s --stroage-path)'{-s,--storage-path}'[Configures storage path]:file:_files' \
'(-s --storage-path)'{-s,--storage-path}'[Configures storage path]:file:_files' \
'--tls-ca-cert[CA to verify remotes against]:file:_files' \
'--tls-ca-key[Private key to generate certificates]:file:_files' \
'--tls-client-cert[Client cert to use for TLS]:file:_files' \
'--tls-client-key[Private key used in client TLS auth]:file:_files' \
'--github-api-token[Token to use for requests to the Github API]' \
'--github-api-token[Token to use for requests to the GitHub API]' \
'--native-ssh[Use the native (Go-based) SSH implementation.]' \
'--bugsnag-api-token[BugSnag API token for crash reporting]' \
'--bugsnag-api-token[Bugsnag API token for crash reporting]' \
'(- :)'{-v,--version}'[Print the version]' \
"(-): :->command" \
"(-)*:: :->option-or-argument" && ret=0

View file

@ -1,6 +1,6 @@
# Docker plugin
This plugin adds auto-completion for [docker](https://www.docker.com/).
This plugin adds auto-completion and aliases for [docker](https://www.docker.com/).
To use it add `docker` to the plugins array in your zshrc file.
@ -13,22 +13,57 @@ https://github.com/docker/cli/blob/master/contrib/completion/zsh/_docker
## Settings
By default, the completion doesn't allow option-stacking, meaning if you try to
complete `docker run -it <TAB>` it won't work, because you're _stacking_ the
`-i` and `-t` options.
By default, the completion doesn't allow option-stacking, meaning if you try to complete
`docker run -it <TAB>` it won't work, because you're _stacking_ the `-i` and `-t` options.
[You can enable it](https://github.com/docker/cli/commit/b10fb43048) by **adding
the lines below to your zshrc file**, but be aware of the side effects:
[You can enable it](https://github.com/docker/cli/commit/b10fb43048) by **adding the lines below to your zshrc
file**, but be aware of the side effects:
> This enables Zsh to understand commands like `docker run -it
> ubuntu`. However, by enabling this, this also makes Zsh complete
> `docker run -u<tab>` with `docker run -uapprox` which is not valid. The
> users have to put the space or the equal sign themselves before trying
> to complete.
> This enables Zsh to understand commands like `docker run -it ubuntu`. However, by enabling this, this also
> makes Zsh complete `docker run -u<tab>` with `docker run -uapprox` which is not valid. The users have to put
> the space or the equal sign themselves before trying to complete.
>
> Therefore, this behavior is disabled by default. To enable it:
>
> ```
> ```sh
> zstyle ':completion:*:*:docker:*' option-stacking yes
> zstyle ':completion:*:*:docker-*:*' option-stacking yes
> ```
## Aliases
| Alias | Command | Description |
| :------ | :---------------------------- | :--------------------------------------------------------------------------------------- |
| dbl | `docker build` | Build an image from a Dockerfile |
| dcin | `docker container inspect` | Display detailed information on one or more containers |
| dcls | `docker container ls` | List all the running docker containers |
| dclsa | `docker container ls -a` | List all running and stopped containers |
| dib | `docker image build` | Build an image from a Dockerfile (same as docker build) |
| dii | `docker image inspect` | Display detailed information on one or more images |
| dils | `docker image ls` | List docker images |
| dipu | `docker image push` | Push an image or repository to a remote registry |
| dirm | `docker image rm` | Remove one or more images |
| dit | `docker image tag` | Add a name and tag to a particular image |
| dlo | `docker container logs` | Fetch the logs of a docker container |
| dnc | `docker network create` | Create a new network |
| dncn | `docker network connect` | Connect a container to a network |
| dndcn | `docker network disconnect` | Disconnect a container from a network |
| dni | `docker network inspect` | Return information about one or more networks |
| dnls | `docker network ls` | List all networks the engine daemon knows about, including those spanning multiple hosts |
| dnrm | `docker network rm` | Remove one or more networks |
| dpo | `docker container port` | List port mappings or a specific mapping for the container |
| dpu | `docker pull` | Pull an image or a repository from a registry |
| dr | `docker container run` | Create a new container and start it using the specified command |
| drit | `docker container run -it` | Create a new container and start it in an interactive shell |
| drm | `docker container rm` | Remove the specified container(s) |
| drm! | `docker container rm -f` | Force the removal of a running container (uses SIGKILL) |
| dst | `docker container start` | Start one or more stopped containers |
| drs | `docker container restart` | Restart one or more containers
| dsta | `docker stop $(docker ps -q)` | Stop all running containers |
| dstp | `docker container stop` | Stop one or more running containers |
| dtop | `docker top` | Display the running processes of a container |
| dvi | `docker volume inspect` | Display detailed information about one or more volumes |
| dvls | `docker volume ls` | List all the volumes known to docker |
| dvprune | `docker volume prune` | Cleanup dangling volumes |
| dxc | `docker container exec` | Run a new command in a running container |
| dxcit | `docker container exec -it` | Run a new command in a running container in an interactive shell |

View file

@ -567,7 +567,7 @@ __docker_container_commands() {
"cp:Copy files/folders between a container and the local filesystem"
"create:Create a new container"
"diff:Inspect changes on a container's filesystem"
"exec:Run a command in a running container"
"exec:Execute a command in a running container"
"export:Export a container's filesystem as a tar archive"
"inspect:Display detailed information on one or more containers"
"kill:Kill one or more running containers"
@ -579,7 +579,7 @@ __docker_container_commands() {
"rename:Rename a container"
"restart:Restart one or more containers"
"rm:Remove one or more containers"
"run:Run a command in a new container"
"run:Create and run a new container from an image"
"start:Start one or more stopped containers"
"stats:Display a live stream of container(s) resource usage statistics"
"stop:Stop one or more running containers"
@ -602,6 +602,7 @@ __docker_container_subcommand() {
opts_create_run=(
"($help -a --attach)"{-a=,--attach=}"[Attach to stdin, stdout or stderr]:device:(STDIN STDOUT STDERR)"
"($help)*--add-host=[Add a custom host-to-IP mapping]:host\:ip mapping: "
"($help)*--annotation=[Add an annotation to the container (passed through to the OCI runtime)]:annotations: "
"($help)*--blkio-weight-device=[Block IO (relative device weight)]:device:Block IO weight: "
"($help)*--cap-add=[Add Linux capabilities]:capability: "
"($help)*--cap-drop=[Drop Linux capabilities]:capability: "
@ -650,6 +651,7 @@ __docker_container_subcommand() {
"($help)*"{-p=,--publish=}"[Expose a container's port to the host]:port:_ports"
"($help)--pid=[PID namespace to use]:PID namespace:__docker_complete_pid"
"($help)--privileged[Give extended privileges to this container]"
"($help -q --quiet)"{-q,--quiet}"[Suppress the pull output]"
"($help)--read-only[Mount the container's root filesystem as read only]"
"($help)*--security-opt=[Security options]:security option: "
"($help)*--shm-size=[Size of '/dev/shm' (format is '<number><unit>')]:shm size: "
@ -661,7 +663,7 @@ __docker_container_subcommand() {
"($help)*--ulimit=[ulimit options]:ulimit: "
"($help)--userns=[Container user namespace]:user namespace:(host)"
"($help)--tmpfs[mount tmpfs]"
"($help)*-v[Bind mount a volume]:volume: "
"($help)*-v[Bind mount a volume]:volume:_directories -W / -P '/' -S '\:' -r '/ '"
"($help)--volume-driver=[Optional volume driver for the container]:volume driver:(local)"
"($help)*--volumes-from=[Mount volumes from the specified container]:volume: "
"($help -w --workdir)"{-w=,--workdir=}"[Working directory inside the container]:directory:_directories"
@ -802,7 +804,7 @@ __docker_container_subcommand() {
"($help -a --all)"{-a,--all}"[Show all containers]" \
"($help)--before=[Show only container created before...]:containers:__docker_complete_containers" \
"($help)*"{-f=,--filter=}"[Filter values]:filter:__docker_complete_ps_filters" \
"($help)--format=[Pretty-print containers using a Go template]:template: " \
"($help)--format=[Format the output using the given Go template]:template: " \
"($help -l --latest)"{-l,--latest}"[Show only the latest created container]" \
"($help -n --last)"{-n=,--last=}"[Show n last created containers (includes all states)]:n:(1 5 10 25 50)" \
"($help)--no-trunc[Do not truncate output]" \
@ -907,7 +909,7 @@ __docker_container_subcommand() {
_arguments $(__docker_arguments) \
$opts_help \
"($help -a --all)"{-a,--all}"[Show all containers (default shows just running)]" \
"($help)--format=[Pretty-print images using a Go template]:template: " \
"($help)--format=[Format the output using the given Go template]:template: " \
"($help)--no-stream[Disable streaming stats and only pull the first result]" \
"($help)--no-trunc[Do not truncate output]" \
"($help -)*:containers:__docker_complete_running_containers" && ret=0
@ -973,8 +975,8 @@ __docker_image_commands() {
"load:Load an image from a tar archive or STDIN"
"ls:List images"
"prune:Remove unused images"
"pull:Pull an image or a repository from a registry"
"push:Push an image or a repository to a registry"
"pull:Download an image from a registry"
"push:Upload an image to a registry"
"rm:Remove one or more images"
"save:Save one or more images to a tar archive (streamed to STDOUT by default)"
"tag:Tag an image into a repository"
@ -1060,7 +1062,7 @@ __docker_image_subcommand() {
"($help -a --all)"{-a,--all}"[Show all images]" \
"($help)--digests[Show digests]" \
"($help)*"{-f=,--filter=}"[Filter values]:filter:__docker_complete_images_filters" \
"($help)--format=[Pretty-print images using a Go template]:template: " \
"($help)--format=[Format the output using the given Go template]:template: " \
"($help)--no-trunc[Do not truncate output]" \
"($help -q --quiet)"{-q,--quiet}"[Only show image IDs]" \
"($help -): :__docker_complete_repositories" && ret=0
@ -1082,7 +1084,7 @@ __docker_image_subcommand() {
(push)
_arguments $(__docker_arguments) \
$opts_help \
"($help -a --all-tags)"{-a,--all-tags}"[Push all tagged images in the repository]" \
"($help -a --all-tags)"{-a,--all-tags}"[Push all tags of an image to the repository]" \
"($help)--disable-content-trust[Skip image signing]" \
"($help -): :__docker_complete_images" && ret=0
;;
@ -1292,7 +1294,7 @@ __docker_network_subcommand() {
$opts_help \
"($help)--no-trunc[Do not truncate the output]" \
"($help)*"{-f=,--filter=}"[Provide filter values]:filter:__docker_network_complete_ls_filters" \
"($help)--format=[Pretty-print networks using a Go template]:template: " \
"($help)--format=[Format the output using the given Go template]:template: " \
"($help -q --quiet)"{-q,--quiet}"[Only display network IDs]" && ret=0
;;
(prune)
@ -1343,7 +1345,7 @@ __docker_node_complete_ls_filters() {
;;
esac
else
opts=('id' 'label' 'membership' 'name' 'role')
opts=('id' 'label' 'membership' 'name' 'node.label' 'role')
_describe -t filter-opts "filter options" opts -qS "=" && ret=0
fi
@ -2050,7 +2052,7 @@ __docker_service_subcommand() {
_arguments $(__docker_arguments) \
$opts_help \
"($help)*"{-f=,--filter=}"[Filter output based on conditions provided]:filter:__docker_service_complete_ls_filters" \
"($help)--format=[Pretty-print services using a Go template]:template: " \
"($help)--format=[Format the output using the given Go template]:template: " \
"($help -q --quiet)"{-q,--quiet}"[Only display IDs]" && ret=0
;;
(rm|remove)
@ -2253,7 +2255,7 @@ __docker_stack_subcommand() {
_arguments $(__docker_arguments) \
$opts_help \
"($help)*"{-f=,--filter=}"[Filter output based on conditions provided]:filter:__docker_stack_complete_services_filters" \
"($help)--format=[Pretty-print services using a Go template]:template: " \
"($help)--format=[Format the output using the given Go template]:template: " \
"($help -q --quiet)"{-q,--quiet}"[Only display IDs]" \
"($help -):stack:__docker_complete_stacks" && ret=0
;;
@ -2520,12 +2522,14 @@ __docker_volume_subcommand() {
_arguments $(__docker_arguments) \
$opts_help \
"($help)*"{-f=,--filter=}"[Provide filter values]:filter:__docker_volume_complete_ls_filters" \
"($help)--format=[Pretty-print volumes using a Go template]:template: " \
"($help)--format=[Format the output using the given Go template]:template: " \
"($help -q --quiet)"{-q,--quiet}"[Only display volume names]" && ret=0
;;
(prune)
_arguments $(__docker_arguments) \
$opts_help \
"($help -a --all)"{-a,--all}"[Remove all unused local volumes, not just anonymous ones]" \
"($help)*--filter=[Filter values]:filter:__docker_complete_prune_filters" \
"($help -f --force)"{-f,--force}"[Do not prompt for confirmation]" && ret=0
;;
(rm)
@ -2544,6 +2548,78 @@ __docker_volume_subcommand() {
# EO volume
# BO context
__docker_complete_contexts() {
[[ $PREFIX = -* ]] && return 1
integer ret=1
declare -a contexts
contexts=(${(f)${:-"$(_call_program commands docker $docker_options context ls -q)"$'\n'}})
_describe -t context-list "context" contexts && ret=0
return ret
}
__docker_context_commands() {
local -a _docker_context_subcommands
_docker_context_subcommands=(
"create:Create new context"
"inspect:Display detailed information on one or more contexts"
"list:List available contexts"
"rm:Remove one or more contexts"
"show:Print the current context"
"update:Update a context"
"use:Set the default context"
)
_describe -t docker-context-commands "docker context command" _docker_context_subcommands
}
__docker_context_subcommand() {
local -a _command_args opts_help
local expl help="--help"
integer ret=1
opts_help=("(: -)--help[Print usage]")
case "$words[1]" in
(create)
_arguments $(__docker_arguments) \
$opts_help \
"($help)--description=[Description of the context]:description:" \
"($help)--docker=[Set the docker endpoint]:docker:" \
"($help)--from=[Create context from a named context]:from:__docker_complete_contexts" \
"($help -):name: " && ret=0
;;
(use)
_arguments $(__docker_arguments) \
$opts_help \
"($help -)1:context:__docker_complete_contexts" && ret=0
;;
(inspect)
_arguments $(__docker_arguments) \
$opts_help \
"($help -)1:context:__docker_complete_contexts" && ret=0
;;
(rm)
_arguments $(__docker_arguments) \
$opts_help \
"($help -)1:context:__docker_complete_contexts" && ret=0
;;
(update)
_arguments $(__docker_arguments) \
$opts_help \
"($help)--description=[Description of the context]:description:" \
"($help)--docker=[Set the docker endpoint]:docker:" \
"($help -):name:" && ret=0
;;
esac
return ret
}
# EO context
__docker_caching_policy() {
oldp=( "$1"(Nmh+1) ) # 1 hour
(( $#oldp ))
@ -2576,7 +2652,7 @@ __docker_commands() {
then
local -a lines
lines=(${(f)"$(_call_program commands docker 2>&1)"})
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/ ##/:})
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/\*# ##/:})
_docker_subcommands=($_docker_subcommands 'daemon:Enable daemon mode' 'help:Show help for a command')
(( $#_docker_subcommands > 2 )) && _store_cache docker_subcommands _docker_subcommands
fi
@ -2631,6 +2707,23 @@ __docker_subcommand() {
;;
esac
;;
(context)
local curcontext="$curcontext" state
_arguments $(__docker_arguments) \
$opts_help \
"($help -): :->command" \
"($help -)*:: :->option-or-argument" && ret=0
case $state in
(command)
__docker_context_commands && ret=0
;;
(option-or-argument)
curcontext=${curcontext%:*:*}:docker-${words[-1]}:
__docker_context_subcommand && ret=0
;;
esac
;;
(daemon)
_arguments $(__docker_arguments) \
$opts_help \
@ -2641,9 +2734,6 @@ __docker_subcommand() {
"($help -b --bridge)"{-b=,--bridge=}"[Attach containers to a network bridge]:bridge:_net_interfaces" \
"($help)--bip=[Network bridge IP]:IP address: " \
"($help)--cgroup-parent=[Parent cgroup for all containers]:cgroup: " \
"($help)--cluster-advertise=[Address or interface name to advertise]:Instance to advertise (host\:port): " \
"($help)--cluster-store=[URL of the distributed storage backend]:Cluster Store:->cluster-store" \
"($help)*--cluster-store-opt=[Cluster store options]:Cluster options:->cluster-store-options" \
"($help)--config-file=[Path to daemon configuration file]:Config File:_files" \
"($help)--containerd=[Path to containerd socket]:socket:_files -g \"*.sock\"" \
"($help)--containerd-namespace=[Containerd namespace to use]:containerd namespace:" \
@ -2678,16 +2768,16 @@ __docker_subcommand() {
"($help)--live-restore[Enable live restore of docker when containers are still running]" \
"($help)--log-driver=[Default driver for container logs]:logging driver:__docker_complete_log_drivers" \
"($help)*--log-opt=[Default log driver options for containers]:log driver options:__docker_complete_log_options" \
"($help)--max-concurrent-downloads[Set the max concurrent downloads for each pull]" \
"($help)--max-concurrent-uploads[Set the max concurrent uploads for each push]" \
"($help)--max-concurrent-downloads[Set the max concurrent downloads]" \
"($help)--max-concurrent-uploads[Set the max concurrent uploads]" \
"($help)--max-download-attempts[Set the max download attempts for each pull]" \
"($help)--mtu=[Network MTU]:mtu:(0 576 1420 1500 9000)" \
"($help)--oom-score-adjust=[Set the oom_score_adj for the daemon]:oom-score:(-500)" \
"($help -p --pidfile)"{-p=,--pidfile=}"[Path to use for daemon PID file]:PID file:_files" \
"($help)--raw-logs[Full timestamps without ANSI coloring]" \
"($help)*--registry-mirror=[Preferred Docker registry mirror]:registry mirror: " \
"($help)*--registry-mirror=[Preferred registry mirror]:registry mirror: " \
"($help)--seccomp-profile=[Path to seccomp profile]:path:_files -g \"*.json\"" \
"($help -s --storage-driver)"{-s=,--storage-driver=}"[Storage driver to use]:driver:(aufs btrfs devicemapper overlay overlay2 vfs zfs)" \
"($help -s --storage-driver)"{-s=,--storage-driver=}"[Storage driver to use]:driver:(btrfs devicemapper overlay2 vfs zfs)" \
"($help)--selinux-enabled[Enable selinux support]" \
"($help)--shutdown-timeout=[Set the shutdown timeout value in seconds]:time: " \
"($help)*--storage-opt=[Storage driver options]:storage driver options: " \
@ -2698,25 +2788,10 @@ __docker_subcommand() {
"($help)--tlsverify[Use TLS and verify the remote]" \
"($help)--userns-remap=[User/Group setting for user namespaces]:user\:group:->users-groups" \
"($help)--userland-proxy[Use userland proxy for loopback traffic]" \
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" && ret=0
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" \
"($help)--validate[Validate daemon configuration and exit]" && ret=0
case $state in
(cluster-store)
if compset -P '*://'; then
_message 'host:port' && ret=0
else
store=('consul' 'etcd' 'zk')
_describe -t cluster-store "Cluster Store" store -qS "://" && ret=0
fi
;;
(cluster-store-options)
if compset -P '*='; then
_files && ret=0
else
opts=('discovery.heartbeat' 'discovery.ttl' 'kv.cacertfile' 'kv.certfile' 'kv.keyfile' 'kv.path')
_describe -t cluster-store-opts "Cluster Store Options" opts -qS "=" && ret=0
fi
;;
(users-groups)
if compset -P '*:'; then
_groups && ret=0
@ -3001,6 +3076,7 @@ _docker() {
_arguments $(__docker_arguments) -C \
"(: -)"{-h,--help}"[Print usage]" \
"($help)--config[Location of client config files]:path:_directories" \
"($help -c --context)"{-c=,--context=}"[Execute the command in a docker context]:context:__docker_complete_contexts" \
"($help -D --debug)"{-D,--debug}"[Enable debug mode]" \
"($help -H --host)"{-H=,--host=}"[tcp://host:port to bind/connect to]:host: " \
"($help -l --log-level)"{-l=,--log-level=}"[Logging level]:level:(debug info warn error fatal)" \
@ -3016,7 +3092,8 @@ _docker() {
local host=${opt_args[-H]}${opt_args[--host]}
local config=${opt_args[--config]}
local docker_options="${host:+--host $host} ${config:+--config $config}"
local context=${opt_args[-c]}${opt_args[--context]}
local docker_options="${host:+--host $host} ${config:+--config $config} ${context:+--context $context} "
case $state in
(command)

View file

@ -0,0 +1,61 @@
alias dbl='docker build'
alias dcin='docker container inspect'
alias dcls='docker container ls'
alias dclsa='docker container ls -a'
alias dib='docker image build'
alias dii='docker image inspect'
alias dils='docker image ls'
alias dipu='docker image push'
alias dirm='docker image rm'
alias dit='docker image tag'
alias dlo='docker container logs'
alias dnc='docker network create'
alias dncn='docker network connect'
alias dndcn='docker network disconnect'
alias dni='docker network inspect'
alias dnls='docker network ls'
alias dnrm='docker network rm'
alias dpo='docker container port'
alias dpu='docker pull'
alias dr='docker container run'
alias drit='docker container run -it'
alias drm='docker container rm'
alias 'drm!'='docker container rm -f'
alias dst='docker container start'
alias drs='docker container restart'
alias dsta='docker stop $(docker ps -q)'
alias dstp='docker container stop'
alias dtop='docker top'
alias dvi='docker volume inspect'
alias dvls='docker volume ls'
alias dvprune='docker volume prune'
alias dxc='docker container exec'
alias dxcit='docker container exec -it'
if (( ! $+commands[docker] )); then
return
fi
# Standarized $0 handling
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
0="${${(M)0:#/*}:-$PWD/$0}"
{
# docker version returns `Docker version 24.0.2, build cb74dfcd85`
# with `s:,:` remove the comma after the version, and select third word of it
local _docker_version=${${(s:,:z)"$(command docker --version)"}[3]}
# `docker completion` is only available from 23.0.0 on
if is-at-least 23.0.0 $_docker_version; then
# If the completion file doesn't exist yet, we need to autoload it and
# bind it to `docker`. Otherwise, compinit will have already done that.
if [[ ! -f "$ZSH_CACHE_DIR/completions/_docker" ]]; then
typeset -g -A _comps
autoload -Uz _docker
_comps[docker]=_docker
fi
command docker completion zsh >| "$ZSH_CACHE_DIR/completions/_docker"
else
command cp "${0:h}/completions/_docker" "$ZSH_CACHE_DIR/completions/_docker"
fi
} &|

Some files were not shown because too many files have changed in this diff Show more